hexsha stringlengths 40 40 | size int64 5 1.05M | ext stringclasses 98
values | lang stringclasses 21
values | max_stars_repo_path stringlengths 3 945 | max_stars_repo_name stringlengths 4 118 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 945 | max_issues_repo_name stringlengths 4 118 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 134k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 945 | max_forks_repo_name stringlengths 4 135 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 1.05M | avg_line_length float64 1 1.03M | max_line_length int64 2 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
75e6b2d3a63be721797d626fc4c7a09ad2f3a274 | 26,681 | sql | SQL | ishop.sql | yialexlee/ishop | 10912f3ac49bfedaf0a9e96f4d486ca48b5dc03b | [
"MIT"
] | null | null | null | ishop.sql | yialexlee/ishop | 10912f3ac49bfedaf0a9e96f4d486ca48b5dc03b | [
"MIT"
] | null | null | null | ishop.sql | yialexlee/ishop | 10912f3ac49bfedaf0a9e96f4d486ca48b5dc03b | [
"MIT"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 4.9.0.1
-- https://www.phpmyadmin.net/
--
-- 主机: 127.0.0.1
-- 生成日期: 2021-02-17 11:36:06
-- 服务器版本: 10.4.6-MariaDB
-- PHP 版本: 7.3.9
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- 数据库: `ishop`
--
-- --------------------------------------------------------
--
-- 表的结构 `admin`
--
CREATE TABLE `admin` (
`id` int(10) UNSIGNED NOT NULL,
`name` varchar(64) NOT NULL,
`email` varchar(64) NOT NULL,
`password` varchar(64) NOT NULL,
`status` varchar(120) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- 转存表中的数据 `admin`
--
INSERT INTO `admin` (`id`, `name`, `email`, `password`, `status`) VALUES
(1, 'Superadmin', 'superadmin@gmail.com', '17c4520f6cfd1ab53d8745e84681eb49', 'Active'),
(2, 'admin01', 'admin01@gmail.com', 'ef9ec39df300ab9cef8657af3cabe2db', 'Active'),
(3, 'Admin02', 'admin02@gmail.com', '9d74933b86ddb3b3f83d5285bb2133f9', 'Blocked'),
(4, 'admin05', 'dmin05@gmail.com', 'c0d107095acb294a1c97db68959ff218', 'Active');
-- --------------------------------------------------------
--
-- 表的结构 `brand`
--
CREATE TABLE `brand` (
`Brand_ID` int(11) NOT NULL,
`Brand_NAME` varchar(100) NOT NULL,
`Product_code` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- 转存表中的数据 `brand`
--
INSERT INTO `brand` (`Brand_ID`, `Brand_NAME`, `Product_code`) VALUES
(1, 'Logitech', '#M101'),
(2, 'Logitech', '#M102'),
(3, 'Logitech', '#M103'),
(4, 'Logitech', '#M104'),
(5, 'Logitech', '#M105'),
(6, 'Logitech', '#K201'),
(7, 'Logitech', '#K203'),
(8, 'Logitech', '#K204'),
(9, 'Logitech', '#K205'),
(10, 'Logitech', '#K202'),
(11, 'Logitech', '#K206'),
(12, 'Logitech', '#H306'),
(13, 'Sony', '#H301'),
(14, 'Sony', '#H305'),
(15, 'Sony', '#S501'),
(16, 'Sony', '#S502'),
(17, 'Sony', '#S504'),
(18, 'Sony', '#S505'),
(19, 'Philips', '#H304\r\n'),
(20, 'Philips', '#S503\r\n'),
(21, 'Philips', '#MC601\r\n'),
(22, 'Kingstom', '#F401'),
(23, 'Kingstom', '#F402'),
(24, 'Kingstom', '#F403'),
(25, 'Kingstom', '#F404'),
(26, 'Kingstom', '#F405'),
(27, 'Beats', '#H303'),
(28, 'Beats', '#H302'),
(29, 'Tonor', '#MC602'),
(30, 'Tonor', '#MC603'),
(31, 'Microsoft', '#W701'),
(32, 'Microsoft', '#W702'),
(33, 'Microsoft', '#W603'),
(34, 'Philips', '#H307\r\n'),
(35, 'Philips', '#M106'),
(36, 'Beats', '#H308'),
(37, 'Tonor', '#MC604'),
(38, 'Tonor', '#MC605'),
(39, 'Tonor', '#MC606'),
(40, 'Microsoft', '#D801'),
(41, 'Microsoft', '#D802'),
(42, 'Microsoft', '#D803'),
(43, 'Microsoft', '#W703'),
(44, 'Philips', '#M106'),
(45, 'Sony', '#M106'),
(46, 'Logitech', '4444444444444'),
(47, 'Logitech', '#M106'),
(48, 'Logitech', '#M1065'),
(49, 'Logitech', '#M106555555');
-- --------------------------------------------------------
--
-- 表的结构 `cart`
--
CREATE TABLE `cart` (
`Cart_ID` int(64) NOT NULL,
`Member_ID` int(64) NOT NULL,
`Product_ID` int(64) NOT NULL,
`Product_QTY` int(64) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- 转存表中的数据 `cart`
--
INSERT INTO `cart` (`Cart_ID`, `Member_ID`, `Product_ID`, `Product_QTY`) VALUES
(24, 1, 1, 2);
-- --------------------------------------------------------
--
-- 表的结构 `category`
--
CREATE TABLE `category` (
`Category_ID` int(11) NOT NULL,
`Category_NAME` varchar(100) NOT NULL,
`Product_code` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- 转存表中的数据 `category`
--
INSERT INTO `category` (`Category_ID`, `Category_NAME`, `Product_code`) VALUES
(1, 'Mouse', '#M101'),
(2, 'Mouse', '#M102'),
(3, 'Mouse', '#M103'),
(4, 'Mouse', '#M104'),
(5, 'Mouse', '#M105'),
(6, 'Keyboard', '#K201'),
(7, 'Keyboard', '#K202'),
(8, 'Keyboard', '#K203'),
(9, 'Keyboard', '#K204'),
(10, 'Keyboard', '#K205'),
(11, 'Keyboard', '#K206'),
(12, 'Headphone', '#H301'),
(13, 'Headphone', '#H302'),
(14, 'Headphone', '#H303'),
(15, 'Headphone', '#H304'),
(16, 'Headphone', '#H305'),
(17, 'Headphone', '#H306'),
(18, 'Headphone', '#H307'),
(19, 'Headphone', '#H308'),
(20, 'Keyboard', '#M106'),
(21, 'Flash Drive', '#F401'),
(22, 'Flash Drive', '#F402'),
(23, 'Flash Drive', '#F403'),
(24, 'Flash Drive', '#F404'),
(25, 'Flash Drive', '#F405'),
(26, 'Speaker', '#S501'),
(27, 'Speaker', '#S502'),
(28, 'Speaker', '#S503'),
(29, 'Speaker', '#S504'),
(30, 'Speaker', '#S505'),
(31, 'Microphone', '#MC601'),
(32, 'Microphone', '#MC602'),
(33, 'Microphone', '#MC603'),
(34, 'Microphone', '#MC604'),
(35, 'Microphone', '#MC605'),
(36, 'Microphone', '#MC606'),
(37, 'Webcam', '#W701'),
(38, 'Webcam', '#W702'),
(39, 'Webcam', '#W703'),
(40, 'Display Adapter', '#D801'),
(41, 'Display Adapter', '#D802'),
(42, 'Display Adapter', '#D803'),
(43, 'Keyboard', '#M106'),
(44, 'Headphone', '#M106'),
(45, 'Mouse', '4444444444444'),
(46, 'Mouse', '#M106'),
(47, 'Mouse', '#M1065'),
(48, 'Mouse', '#M106555555');
-- --------------------------------------------------------
--
-- 表的结构 `contact`
--
CREATE TABLE `contact` (
`id` int(100) NOT NULL,
`name` varchar(100) NOT NULL,
`email` varchar(50) NOT NULL,
`phone` int(11) NOT NULL,
`subject` varchar(30) NOT NULL,
`message` varchar(100) NOT NULL,
`status` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- 转存表中的数据 `contact`
--
INSERT INTO `contact` (`id`, `name`, `email`, `phone`, `subject`, `message`, `status`) VALUES
(2, 'LEE JIA YI', 'lee52934870@gmail.com', 184675055, 'Shipping', 'Do ishop have free shipping service?', 'contact'),
(3, 'Ramli', 'ramlimuddin225@yahoo.com', 125582218, 'Return', 'Do ishop return product?', 'replied');
-- --------------------------------------------------------
--
-- 表的结构 `member`
--
CREATE TABLE `member` (
`id` int(10) UNSIGNED NOT NULL,
`name` varchar(64) NOT NULL,
`email` varchar(64) NOT NULL,
`password` varchar(64) NOT NULL,
`phone` varchar(64) NOT NULL,
`address` varchar(64) NOT NULL,
`state` varchar(64) NOT NULL,
`city` varchar(64) NOT NULL,
`postcode` int(64) NOT NULL,
`status` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- 转存表中的数据 `member`
--
INSERT INTO `member` (`id`, `name`, `email`, `password`, `phone`, `address`, `state`, `city`, `postcode`, `status`) VALUES
(1, 'Admin', 'admin@gmail.com', '21232f297a57a5a743894a0e4a801fc3', '018955625894', '56,jalan tembikai,21', 'johor', 'johor bahru', 81100, 'Active'),
(2, 'lee', 'lee52934870@gmail.com', '71ee711610c4343c86cccc6f38cde8f4', '0195565996', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'Active'),
(3, 'admin000', 'admin000@gmail.com', 'bd6fddd42278812354823774c428b159', '0155804841', '31,jalan ganjar 20', 'johor', 'johor bahru', 81200, 'Blocked');
-- --------------------------------------------------------
--
-- 表的结构 `orderdetail`
--
CREATE TABLE `orderdetail` (
`Order_ID` int(240) NOT NULL,
`Member_ID` int(11) NOT NULL,
`Product_ID` int(11) NOT NULL,
`Product_QTY` int(11) NOT NULL,
`Order_AMOUNT` int(11) NOT NULL,
`Order_STATUS` varchar(11) NOT NULL,
`Order_RECIPIENT` varchar(100) NOT NULL,
`Order_PHONE` varchar(100) NOT NULL,
`Order_ADDRESS` varchar(100) NOT NULL,
`Order_STATE` varchar(100) NOT NULL,
`Order_CITY` varchar(100) NOT NULL,
`Order_POSTCODE` int(11) NOT NULL,
`Order_CARDBANK` varchar(100) NOT NULL,
`Order_CARDNUM` varchar(100) NOT NULL,
`Order_CARDNAME` varchar(100) NOT NULL,
`Order_CARDMM` varchar(100) NOT NULL,
`Order_CARDYYYY` varchar(100) NOT NULL,
`Order_CARDCVV` int(11) NOT NULL,
`Order_DATE` datetime(6) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- 转存表中的数据 `orderdetail`
--
INSERT INTO `orderdetail` (`Order_ID`, `Member_ID`, `Product_ID`, `Product_QTY`, `Order_AMOUNT`, `Order_STATUS`, `Order_RECIPIENT`, `Order_PHONE`, `Order_ADDRESS`, `Order_STATE`, `Order_CITY`, `Order_POSTCODE`, `Order_CARDBANK`, `Order_CARDNUM`, `Order_CARDNAME`, `Order_CARDMM`, `Order_CARDYYYY`, `Order_CARDCVV`, `Order_DATE`) VALUES
(1, 2, 1, 1, 178, 'Confirmed', 'lee', '01587496894', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'RHB Bank', '2147483647', 'kkkkkkkkkkkkk', '10', '36', 444, '2021-01-29 12:15:21.000000'),
(2, 2, 1, 1, 178, 'Paid', 'lee', '01587496894', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'RHB Bank', '2147483647', 'k', '08', '36', 454, '2021-01-29 13:04:56.000000'),
(3, 2, 1, 1, 178, 'Paid', 'lee', '01587496894', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'Public Bank', '2147483647', 'kk', '10', '35', 425, '2021-01-29 13:14:55.000000'),
(4, 2, 1, 1, 178, 'Cancelled', 'lee', '01587496894', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'Maybank', '455', 'Lee Jia Yi', '09', '35', 452, '2021-01-29 13:16:25.000000'),
(5, 2, 1, 1, 178, 'Shipped', 'lee', '0185626330', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'Hong Leong Bank', '5196203594826595', 'Lee Jia Yi', '09', '37', 454, '2021-01-29 13:17:39.000000'),
(6, 1, 4, 1, 1030, 'Paid', 'Admin', '018955625894', '56,jalan tembikai,21', 'johor', 'johor bahru', 81100, 'RHB Bank', '539800639400536', 'Lee Jia Yi', '03', '23', 553, '2021-02-01 14:24:05.000000'),
(7, 2, 8, 1, 120, 'Paid', 'lee', '01587496894', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'Maybank', '5196445655234452', 'Lee Jia Yi', '10', '24', 562, '2021-02-01 17:11:21.000000'),
(9, 2, 1, 2, 356, 'Shipped', 'lee', '', '24,jalan durian 63', 'melaka', 'melaka', 81500, 'RHB Bank', '2560864804684840065', 'Lee Jia Yi', '01', '21', 555, '2021-02-17 15:09:46.000000');
-- --------------------------------------------------------
--
-- 表的结构 `password_resets`
--
CREATE TABLE `password_resets` (
`id` int(10) NOT NULL,
`email` varchar(255) NOT NULL,
`token` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- 转存表中的数据 `password_resets`
--
INSERT INTO `password_resets` (`id`, `email`, `token`) VALUES
(1, 'lee52934870@gmail.com', '3bc886cc00de6e95a3937e643446f6bca6c6c99ff8000b9a7f2c51f20e746c66c7567cb780709e1a9683b0c8de31dbd6439c'),
(2, 'lee52934870@gmail.com', 'e0584926bb4cbd1b1b316af386b2388536769c717d8b9cb7402e03982953d22d68b7cdf9bff03ead66b23ced217d6af22ee5'),
(3, 'lee52934870@gmail.com', '2328e6f37c0ff53db003cef367ecde1b0b878a5703c68b1af93383c2196f13bd333ea1fa333fac00c6d445bd2390c8ef4edf'),
(4, 'lee52934870@gmail.com', '54454288fa5fa17ed2a1f2d20b6007be7bce28c80986111acba0f1713a656a933b6a4ebbace260f933c4e4e03aa386d4f5ff'),
(5, 'lee52934870@gmail.com', '5192581de9decb9faff8413800493cc7a5ddf125accb54235c6fbe197ceeaad5cb98882d9e5f487f8e984bdb743ff920b3df'),
(6, 'lee52934870@gmail.com', 'aa9e93000eace3d3b277348f293630c56ff8b4b2d7575a933afe073136ac442765138c4cdc87963d631b2a6dc3e7deda0eb7'),
(7, 'lee52934870@gmail.com', '72c9e46a614b68819bcdc1662068b7fea11f0eb14b8ab77a60cf68bd0d36103f64bc295e5b00cc54abccff6178d0722468c0'),
(8, 'lee52934870@gmail.com', '7e548e9c6174cb2b4398954fd3e247e202d67bfbed284fde0f294aa49924eff9d5148bc4de840584f91d8bd6ef3e6ea60d19'),
(9, 'lee52934870@gmail.com', 'e95707d34c471ac597a03310de7a0d7741270a6be04e1d72f04ac699ccd359af93842c63dfedcad52ab0bfa2b96c12e3692c');
-- --------------------------------------------------------
--
-- 表的结构 `product`
--
CREATE TABLE `product` (
`Product_ID` int(11) NOT NULL,
`Product_NAME` varchar(250) NOT NULL,
`Product_DESCRIPTION` varchar(250) NOT NULL,
`Product_PIC` blob NOT NULL,
`Product_PRICE` int(11) NOT NULL,
`Product_STOCK` int(11) NOT NULL,
`Product_Code` varchar(100) DEFAULT NULL,
`Product_Status` varchar(100) DEFAULT NULL,
`Product_Connectivity` varchar(100) DEFAULT NULL,
`Product_CATEGORY` varchar(100) NOT NULL,
`Product_BRAND` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- 转存表中的数据 `product`
--
INSERT INTO `product` (`Product_ID`, `Product_NAME`, `Product_DESCRIPTION`, `Product_PIC`, `Product_PRICE`, `Product_STOCK`, `Product_Code`, `Product_Status`, `Product_Connectivity`, `Product_CATEGORY`, `Product_BRAND`) VALUES
(1, 'GPRO WIRELESS GAMING MOUSE', 'PRO Wireless was designed to be the ultimate gaming mouse for esports professionals. ', 0x472050524f20576972656c6573732047616d696e67204d6f7573652e6a7067, 178, 17, '#M101', 'Available', 'Bluetooth', 'Mouse', 'Logitech'),
(2, 'G502 LIGHTSPEED WIRELESS OPTICAL GAMING MOUSE', 'G502 is an icon, topping the charts through every generation, and the mouse of choice for serious gamers.', 0x47203530324d2e6a7067, 178, 25, '#M102', 'Available', 'Bluetooth', 'Mouse', 'Logitech'),
(3, 'M185 COMPACT WIRELESS MOUSE', 'The M185 COMPACT WIRELESS MOUSE works with Window®, Mac, Chrome OS™ or Linux®-based computers. So small and unobtrusive, you can plug it into a USB port and forget about it. Plus, you can say good-bye to worries about lost receivers.', 0x4d31383520436f6d7061637420576972656c657373204d6f7573652c2044757261626c6520262044657369676e20666f72204c6170746f702e706e67, 30, 35, '#M103', 'Available', 'Bluetooth', 'Mouse', 'Logitech'),
(4, 'M557 BLUETOOTH MOUSE FOR WINDOWS & MAC', 'The 1000 dpi, high-definition optical sensor delivers smooth, responsive cursor control, so you can work with precision and confidence on a wide variety of surfaces.', 0x4d35353720426c7565746f6f7468204d6f75736520666f722057696e646f77732026204d61632e706e67, 27, 29, '#M104', 'Available', 'Bluetooth ', 'Mouse', 'Logitech'),
(5, 'G102 PRODIGY USB BLACK GAMING MOUSE', 'A true gaming mouse that allows you to enjoy high quality RGB lighting featuring 16.5 million colours.', 0x473130324d2e6a7067, 25, 30, '#M105', 'Available', 'Bluetooth & USB Unifying', 'Mouse', 'Logitech'),
(6, 'G613 WIRELESS MECHANICAL GAMING KEYBOARD', 'Six dedicated G-keys can be programmed with custom macros and in-app commands. Execute complex actions, simply and quickly, with a keystroke. G-keys can be assigned per game or app profile for customizability and convenience.', 0x4736313320576972656c657373204d656368616e6963616c2047616d696e67204b6579626f6172642e706e67, 150, 20, '#K201', 'Available', 'USB Unifying', 'KEYBOARD', 'Logitech'),
(7, 'CORDED KEYBOARD K280E', 'Solid construction ensures extended durability and 3-year limited hardware warranty provides for long-term use.', 0x436f72646564204b6579626f617264204b323830652e706e67, 90, 30, '#K202', 'Available', 'Bluetooth & USB Unifying', 'Keyboard', 'Logitech'),
(8, 'G213 PRODIGY GAMING KEYBOARD', 'The G213 gaming keyboard features Logitech G Mech-Dome keys that are specially tuned to deliver a superior tactile response and overall performance profile similar to a mechanical keyboard.', 0x473231332050726f646967792047616d696e67204b6579626f617264207769746820524742204c69676874696e67202620416e74692d47686f7374696e672e706e67, 120, 29, '#K203', 'Available', 'Bluetooth & USB Unifying', 'Keyboard', 'Logitech'),
(9, 'K360 COMPUTER WIRELESS KEYBOARD WITH HOT KEYS', 'Perfect for tight workspaces, or smaller frames, so you can place your mouse closer for less reaching and better body posture.', 0x4b3336302e706e67, 100, 35, '#K204', 'Available', 'Bluetooth & USB Unifying', 'Keyboard', 'Logitech'),
(10, 'K740 ILLUMINATED USB KEYBOARD WITH BUILT-IN PLAM REST', 'Perfect for tight workspaces, or smaller frames, so you can place your mouse closer for less reaching and better body posture.', 0x4b37343020696c6c756d696e6174656420555342204b6579626f6174642077697468204275696c742d696e2050616c6d20526573742e706e67, 100, 30, '#K205', 'Available', 'Bluetooth & USB Unifying', 'Keyboard', 'Logitech'),
(11, 'H340 USB COMPUTER HEADSET', 'Works with Common calling applications across almost all platforms and operating systems\r\nWindows® or macOS or Chrome OS™ and USB port (Type A port or adapter).', 0x483334302055534220434f4d505554455220484541445345542e706e67, 50, 30, '#H301', 'Available', 'USB', 'Headphone', 'Sony'),
(12, 'MX Keys for Mac- Wireless illuminated Keyboard', 'MX Keys for Mac – designed to work seamlessly on your Mac and iPad. ', 0x4d58204b65797320666f72204d61632d20576972656c65737320696c6c756d696e61746564204b6579626f6172642e706e67, 100, 35, '#K206', 'Available', 'USB Unifying', 'Keyboard', 'Logitech'),
(13, 'BEATS SOLO 3 WIRELESS', 'Enjoy award-winning Beats sound with Class 1 Bluetooth wireless listening freedom.', 0x426561747320536f6c6f204d2e6a7067, 175, 30, '#H302', 'Available', 'Bluetooth ', 'Headphone', 'Beats'),
(14, 'BEATS SOLO PRO WIRELESS', 'Beats Solo Pro wireless on-ear headphones have Active Noise Cancelling, powerful sound, Class 1 bluetooth, and up to 40 hours of battery life.', 0x536f6c6f5f48442e706e67, 180, 20, '#H303', 'Available', 'Bluetooth', 'Headphone', 'Beats'),
(15, 'PC HEADSET SHM7110U/97', 'This sleek PC solution lets you tailor your headset with customizable earshells.', 0x504320486561647365742053484d37313130552d39372e6a7067, 50, 40, '#H304', 'Available', 'Bluetooth', 'Headphone', 'Philips'),
(16, 'H540 USB COMPUTER HEADSET', 'This plug-and-play headset can be used with any PC or Mac computer with a USB-A port or adapter without the need to install software.', 0x483534302055534220434f4d505554455220484541445345542e706e67, 45, 30, '#H305', 'Available', 'USB', 'Headphone', 'Sony'),
(17, 'LOGITECH ZONE WIRELESS', 'Logitech Zone Wireless is designed for office use all day, every day.', 0x4c4f474954454348205a4f4e452057495245442e706e67, 70, 25, '#H306', 'Available', 'Bluetooth', 'Headphone', 'Logitech'),
(18, 'DATA TRAVELER 80 USB FLASH DRIVE', 'Kingston’s Data Traveler 80 is a high-performance USB flash drive that supports Type-C laptops, desktops, smartphones and tablets without the need for an adapter.', 0x4461746154726176656c65722038302055534220466c6173682044726976652e6a7067, 25, 50, '#F401', 'Available', 'USB(Type-C)', 'Flash Drive', 'Kingstom'),
(19, 'DATA TRAVELER 100 G3 USB FLASH DRIVE', 'Kingston’s Data Traveler 100 G3 (DT100G3) USB Flash drive is compliant with USB 3.0 specifications1 to take advantage of technology in newer notebooks, desktop PCs and digital devices.', 0x4461746154726176656c6572203130302047332055534220466c6173682044726976652e6a7067, 20, 50, '#F402', 'Available', 'USB', 'Flash Drive', 'Kingstom'),
(20, 'DATA TRAVELER MICRODUO 3C USB FLASH DRIVE', 'Data Traveler microDuo 3C has a dual interface that works with both standard USB and USB Type-C®1 ports. ', 0x4461746154726176656c6572204d6963726f44756f2033432055534220466c6173682044726976652e6a7067, 30, 50, '#F403', 'Available', 'USB(TYPE-C)', 'Flash Drive', 'Kingstom'),
(21, 'DATA TRAVELER SE9 USB FLASH DRIVE', 'Kingston’s Data Traveler SE9 USB Flash drive has a stylish metal casing with a large ring so it will attach easily. ', 0x4461746154726176656c6572205345392055534220466c6173682044726976652e6a7067, 20, 50, '#F404', 'Available', 'USB ', 'Flash Drive', 'Kingstom'),
(22, 'DT VAULT PRIVACY ENCYPTED USB FLASH DRIVE', 'Kingston’s Data Traveler Vault Privacy 3.0 USB Flash drive provides affordable business-grade security with 256-bit AES hardware-based encryption in XTS mode. ', 0x4454205661756c74205072697661637920456e637279707465642055534220466c6173682044726976652e6a7067, 25, 50, '#F405', 'Available', 'USB', 'Flash Drive', 'Kingstom'),
(23, 'PORTABLE WIRELESS BULETOOTH-WIFI SPEAKER', 'A smooth surface and clean lines ensure this sleek beauty blends into the background, yet still draws attention for its minimalist refinement.', 0x506f727461626c6520576972656c65737320424c5545544f4f54482d57692d466920537065616b65722e6a7067, 180, 30, '#S501', 'Available', 'Bluetooth & WIFI', 'Speaker', 'Sony'),
(24, 'STEREO BOOKSHELF SPEAKER', 'These speakers include new super tweeters with wide directionality.', 0x53746572656f20426f6f6b7368656c6620537065616b6572732e6a7067, 120, 25, '#S502', 'Available', 'Bluetooth & WIFI', 'Speaker', 'Sony'),
(25, 'USB NOTEBOOK SPEAKERS SPA200/00', ' A pair of speakers for notebook computers that are powered and audio-connected to the computer via USB.', 0x555342204e6f7465626f6f6b20737065616b6572732053504132302d30302e6a7067, 100, 25, '#S503', 'Available', 'USB', 'Speaker', 'Philips'),
(26, 'XB23 EXTRA BASS PORTABLE BLUETOOTH SPEAKER', 'Compact, lightweight and easy to carry, the SRS-XB23 combines punchy bass sound with a tough, durable, go-anywhere design.', 0x58423233204558545241204241535320506f727461626c6520424c5545544f4f544820537065616b65722e6a7067, 150, 30, '#S504', 'Available', 'Bluetooth ', 'Speaker', 'Sony'),
(27, 'XB41 EXTRA BASS PORTABLE BLUETOOTH SPEAKER', 'Get the ultimate three-dimensional music festival experience, wherever you go with the SRS-XB41.', 0x58423431204558545241204241535320506f727461626c6520424c5545544f4f544820537065616b65722e6a7067, 160, 40, '#S505', 'Available', 'Bluetooth', 'Speaker', 'Sony'),
(28, 'PC MICROPHONE SHM1000/97', 'This sensitive microphone is ideal for heavy online users.', 0x50436d6963726f70686f6e6553484d313030302d39372e6a7067, 145, 25, '#MC601', 'Available', 'USB', 'Microphone', 'Philips'),
(29, 'Tonor TC30 USB Microphone', 'TC30 is compatible with Windows, macOS and Linux. Ideal for gaming, podcasting, zoom meeting, streaming, Skype chatting, online conference. ', 0x544f4e4f522d54432d3737372e706e67, 50, 25, '#MC602', 'Available', 'USB', 'Microphone', 'Tonor'),
(30, 'TONOR TC-777 USB Computer Condenser PC Podcast Gaming Microphone', 'Condenser microphones are well-known for their excellent audio quality and sensitivity. ', 0x546333302e6a7067, 100, 40, '#MC603', 'Available', 'USB', 'Microphone', 'Tonor'),
(31, 'LIFECAM CINEMA', 'Don’t miss a thing—enjoy high-quality 720p HD widescreen video together with crystal clear audio, with the LifeCam Cinema. ', 0x4c69666543616d2043696e656d612e6a7067, 60, 35, '#W701', 'Available', 'USB', 'Webcam', 'Microsoft'),
(32, 'LIFECAM STUDIO', 'LifeCam Studio gets you the closest to being there Shoot with great accuracy in wide angle with the LifeCam precision glass element lens. ', 0x4c69666543616d2053747564696f2e6a7067, 50, 40, '#W702', 'Available', 'USB', 'Webcam', 'Microsoft'),
(33, 'LIFECAM HD-3000', 'TrueColor technology provides superior color and brightness in almost all lighting conditions, including low light and backlight. ', 0x4c6966652063616d2e6a7067, 80, 35, '#W703', 'Available', 'USB', 'Webcam', 'Microsoft'),
(34, 'PC Headset SHM1900/00', 'Philips SHM1900 are lightweight over-ear PC headsets that provide the ultimate in comfort.', 0x5068696c69707320486561647365742e6a7067, 150, 50, '#H307', 'Available', 'USB', 'Headphone', 'Philips'),
(35, 'Wired gaming mouse with Ambiglow', 'This momentum gaming mouse has 7 programmable buttons and adjustable DPI up to 16,400 for ultimate performance.', 0x47616d696e67204d6f75736520416d6269646c6f772e6a7067, 100, 35, '#M106', 'Available', 'USB', 'Mouse', 'Philips'),
(36, 'Beats Studio 3', 'Beats Studio 3 Wireless over-ear headphones delivers premium sound while blocking external noise with Active Noise Cancelling', 0x53747564696f20332e706e67, 190, 25, '#H308', 'Available', 'Bluetooth & Micro USB port', 'Headphone', 'Beats'),
(37, 'TONOR Q9 USB Computer Condenser Microphone Streaming Podcast PC Gaming Mic', 'Q9 microphone is easy to connect with both Mac and Windows computer, no need any extra driver software or sound card. ', 0x51392e6a7067, 70, 30, '#MC604', 'Available', 'USB', 'Microphone', 'Tonor'),
(38, 'TONOR TC-2030 USB Microphone Kit', 'Gold-plated 14MM large-diaphragm makes the microphone sound range of low and high frequency wider and the sound quality more exquisite.', 0x54432d323033302e6a7067, 80, 34, '#MC605', 'Available', 'USB', 'Microphone', 'Tonor'),
(39, 'TONOR TC20 XLR Microphone Kit', 'The microphone adopts a cardioid polar pattern with excellent off-axis sound suppression capabilities, it also has a low-noise FET preamplifier.', 0x5443323020584c522e6a7067, 68, 28, '#MC606', 'Available', 'USB', 'Microphone', 'Tonor'),
(40, 'Microsoft 4K Wireless Display Adapter', 'Discover the strong, reliable way to project your content, photos and videos on the big screen. HDMI (powered through USB), compatible with HDCP 2.2 and HDCP 1.4', 0x344b20776972656c6573732044412e706e67, 200, 36, '#D801', 'Available', 'HDMI', 'Display Adapter', 'Microsoft'),
(41, 'Microsoft USB-C Travel Hub', 'Turn your laptop into a productivity companion with this elegant, multi-port adapter.', 0x5553422d432054726176656c204875622e6a7067, 289, 20, '#D802', 'Available', 'USB(Type-C)', 'Display Adapter', 'Microsoft'),
(42, 'P3Q-00010 Wireless Display Adapter', 'Stream movies, view personal photos, or display a presentation on a big screen � all wirelessly.', 0x576972656c65737320446973706c617920416461707465722e6a7067, 200, 2, '#D803', 'Available', 'USB and HDMI', 'Display Adapter', 'Microsoft');
--
-- 转储表的索引
--
--
-- 表的索引 `admin`
--
ALTER TABLE `admin`
ADD PRIMARY KEY (`id`);
--
-- 表的索引 `brand`
--
ALTER TABLE `brand`
ADD PRIMARY KEY (`Brand_ID`);
--
-- 表的索引 `cart`
--
ALTER TABLE `cart`
ADD PRIMARY KEY (`Cart_ID`);
--
-- 表的索引 `category`
--
ALTER TABLE `category`
ADD PRIMARY KEY (`Category_ID`);
--
-- 表的索引 `contact`
--
ALTER TABLE `contact`
ADD PRIMARY KEY (`id`);
--
-- 表的索引 `member`
--
ALTER TABLE `member`
ADD PRIMARY KEY (`id`);
--
-- 表的索引 `orderdetail`
--
ALTER TABLE `orderdetail`
ADD PRIMARY KEY (`Order_ID`);
--
-- 表的索引 `password_resets`
--
ALTER TABLE `password_resets`
ADD PRIMARY KEY (`id`);
--
-- 表的索引 `product`
--
ALTER TABLE `product`
ADD PRIMARY KEY (`Product_ID`);
--
-- 在导出的表使用AUTO_INCREMENT
--
--
-- 使用表AUTO_INCREMENT `admin`
--
ALTER TABLE `admin`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- 使用表AUTO_INCREMENT `brand`
--
ALTER TABLE `brand`
MODIFY `Brand_ID` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=50;
--
-- 使用表AUTO_INCREMENT `cart`
--
ALTER TABLE `cart`
MODIFY `Cart_ID` int(64) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=27;
--
-- 使用表AUTO_INCREMENT `category`
--
ALTER TABLE `category`
MODIFY `Category_ID` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=49;
--
-- 使用表AUTO_INCREMENT `contact`
--
ALTER TABLE `contact`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- 使用表AUTO_INCREMENT `member`
--
ALTER TABLE `member`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=15;
--
-- 使用表AUTO_INCREMENT `orderdetail`
--
ALTER TABLE `orderdetail`
MODIFY `Order_ID` int(240) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- 使用表AUTO_INCREMENT `password_resets`
--
ALTER TABLE `password_resets`
MODIFY `id` int(10) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- 使用表AUTO_INCREMENT `product`
--
ALTER TABLE `product`
MODIFY `Product_ID` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=49;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 52.213307 | 461 | 0.704246 |
96fac2cfa1fb32ecff15676e97ea80e230f9bd6b | 19,171 | ps1 | PowerShell | OktaPosh/public/User.ps1 | Seekatar/OktaPosh | e773e52f2f1f90316362a01e2e4fdab0ab8efd0d | [
"MIT"
] | 2 | 2021-02-05T17:35:12.000Z | 2021-05-13T18:55:01.000Z | OktaPosh/public/User.ps1 | Seekatar/OktaPosh | e773e52f2f1f90316362a01e2e4fdab0ab8efd0d | [
"MIT"
] | 15 | 2020-11-01T21:25:51.000Z | 2021-12-31T18:58:09.000Z | OktaPosh/public/User.ps1 | Seekatar/OktaPosh | e773e52f2f1f90316362a01e2e4fdab0ab8efd0d | [
"MIT"
] | null | null | null | # https://developer.okta.com/docs/reference/api/users/
function addUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory)]
[HashTable] $body,
[ValidateCount(0,20)]
[string[]] $GroupIds,
[switch] $Activate,
[switch] $NextLogin,
[string] $Provider = ""
)
Set-StrictMode -Version Latest
if ($GroupIds) {
$body['groupIds'] = @($GroupIds)
}
Invoke-OktaApi -RelativeUri "users?activate=$(ternary $Activate 'true' 'false')$Provider$(ternary $NextLogin '&nextLogin=changePassword' '')" `
-Body $body -Method POST
}
# not sure what this does
# function Convert-OktaUserToFederated {
# [CmdletBinding(SupportsShouldProcess)]
# param (
# [Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
# [string] $UserId
# )
# process {
# Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/reset_password?provider=FEDERATION&sendEmail=false" -Method POST
# }
# }
function New-OktaAuthProviderUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipelineByPropertyName,Position=0)]
[Alias("given_name")]
[string] $FirstName,
[Parameter(Mandatory,ValueFromPipelineByPropertyName,Position=1)]
[Alias("family_name")]
[string] $LastName,
[Parameter(Mandatory,ValueFromPipelineByPropertyName,Position=2)]
[string] $Email,
[Parameter(ValueFromPipelineByPropertyName,Position=3)]
[string] $Login,
[Parameter(Mandatory,Position=4)]
[ValidateSet('OKTA', 'ACTIVE_DIRECTORY', 'LDAP', 'FEDERATION', 'SOCIAL', 'IMPORT')]
[string] $ProviderType,
[string] $ProviderName,
[ValidateCount(0,20)]
[string[]] $GroupIds,
[switch] $Activate
)
process {
Set-StrictMode -Version Latest
if (!$Login) {
$Login = $Email
}
$body = @{
profile = @{
firstName = $FirstName
lastName = $LastName
email = $Email
login = $Login
}
credentials = @{
provider = @{
type = $ProviderType
}
}
}
if ($ProviderName) {
$body.credentials.provider['name'] = $ProviderName
}
addUser -Body $body -GroupIds $GroupIds -Activate:$Activate -NextLogin:$false -Provider "&provider=true"
}
}
function New-OktaUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipelineByPropertyName,Position=0)]
[Alias("given_name")]
[string] $FirstName,
[Parameter(Mandatory,ValueFromPipelineByPropertyName)]
[Alias("family_name")]
[string] $LastName,
[Parameter(Mandatory,ValueFromPipelineByPropertyName)]
[string] $Email,
[Parameter(ValueFromPipelineByPropertyName)]
[string] $Login,
[Parameter(ValueFromPipelineByPropertyName)]
[string] $MobilePhone,
[Parameter(ValueFromPipelineByPropertyName)]
[switch] $Activate,
[Parameter(ValueFromPipelineByPropertyName)]
[ValidateLength(1,72)]
[string] $Pw,
[ValidateCount(0,20)]
[string[]] $GroupIds,
[switch] $NextLogin,
[Parameter(ValueFromPipelineByPropertyName)]
[string] $RecoveryQuestion,
[Parameter(ValueFromPipelineByPropertyName)]
[ValidateLength(4,100)]
[string] $RecoveryAnswer,
[HashTable] $PasswordHash
)
begin {
if ($NextLogin -and !$Activate) {
throw "Must set Activate to use NextLogin"
}
}
process {
Set-StrictMode -Version Latest
if ($Pw -and $PasswordHash) {
throw "Can't supply both Pw and PasswordHash parameters"
}
if (!$Login) {
$Login = $Email
}
$body = @{
profile = @{
firstName = $FirstName
lastName = $LastName
email = $Email
login = $Login
mobilePhone = $MobilePhone
}
}
if ($Pw) {
$body["credentials"] = @{
password = @{
value = $Pw
}
}
} elseif ($PasswordHash) {
$body["credentials"] = @{
password = $PasswordHash
}
}
if ($RecoveryQuestion -and $RecoveryQuestion) {
if (!$body["credentials"]) {
$body["credentials"] = @{}
} elseif ($RecoveryQuestion -and !$RecoveryQuestion -or
!$RecoveryQuestion -and $RecoveryQuestion) {
throw "Must supply question and answer."
}
$body["credentials"]["recovery_question"] = @{
question = $RecoveryQuestion
answer = $RecoveryAnswer
}
}
addUser -Body $body -GroupIds $GroupIds -Activate:$Activate -NextLogin:$NextLogin
# Quirk! if don't pass in Login on a subsequent pipeline object
# Login is set to previous value!
$Login = $null
}
}
function Disable-OktaUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess, ConfirmImpact="High")]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $SendEmail,
[switch] $Async
)
process {
$additionalHeaders = ternary $Async @{Prefer='respond-async'} $null
if ($PSCmdlet.ShouldProcess($userId,"Disable User")) {
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/deactivate?sendEmail=$(ternary $SendEmail 'true' 'false')" -Method POST -NotFoundOk -AdditionalHeaders $additionalHeaders
}
}
}
<#
for federated, users created ACTIVE
new -> STAGED
STAGED -> Enable -> PROVISIONED
PROVISIONED -> user activates -> ACTIVE
STAGED|ACTIVE -> Disable -> DEPROVISIONED
Suspend -> SUSPENDED
Resume -> PROVISIONED
Can only delete if DEPROVISIONED
#>
function Enable-OktaUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $SendEmail
)
process {
$user = Get-OktaUser -UserId $UserId
if ($user) {
if ($user.Status -eq 'STAGED' -or $user.Status -eq 'DEPROVISIONED' ) {
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/activate?sendEmail=$(ternary $SendEmail 'true' 'false')" -Method POST
} elseif ($user.Status -eq 'PROVISIONED') {
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/reactivate?sendEmail=$(ternary $SendEmail 'true' 'false')" -Method POST
} else {
Write-Warning "User status is '$($user.Status)'. Can't enable."
}
} else {
Write-Warning "UserId: '$UserId' not found"
}
}
}
function Suspend-OktaUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $CheckCurrentStatus
)
process {
if ($CheckCurrentStatus) {
$user = Get-OktaUser -UserId $UserId
if ($user) {
if ($user.Status -ne 'ACTIVE') {
Write-Warning "User status is '$($user.Status)'. Can't suspend."
return
}
} else {
Write-Warning "UserId: '$UserId' not found"
return
}
}
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/suspend" -Method POST
}
}
function Resume-OktaUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $CheckCurrentStatus
)
process {
if ($CheckCurrentStatus) {
$user = Get-OktaUser -UserId $UserId
if ($user) {
if ($user.Status -ne 'SUSPENDED') {
Write-Warning "User status is '$($user.Status)'. Can't resume."
return
}
} else {
Write-Warning "UserId: '$UserId' not found"
return
}
}
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/unsuspend" -Method POST
}
}
function Get-OktaUser {
[CmdletBinding(DefaultParameterSetName="Query")]
param (
[Parameter(Mandatory,ParameterSetName="ById",ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[Alias("Login")]
[string] $UserId,
[Parameter(ParameterSetName="Query",Position=0)]
[Parameter(ParameterSetName="Search")]
[string] $Query,
[Parameter(ParameterSetName="Query")]
[Parameter(ParameterSetName="Search")]
[string] $Filter,
[Parameter(ParameterSetName="Query")]
[Parameter(ParameterSetName="Search")]
[uint32] $Limit,
[Parameter(ParameterSetName="Next")]
[switch] $Next,
[Parameter(ParameterSetName="Search")]
[string] $Search,
[Parameter(ParameterSetName="Search")]
[string] $SortBy,
[Parameter(ParameterSetName="Search")]
[ValidateSet("asc","desc")]
[string] $SortOrder,
[switch] $Json,
[Parameter(ParameterSetName="Next")]
[switch] $NoWarn
)
process {
$UserId = testQueryForId $UserId $Query '00u'
if ($UserId) {
Invoke-OktaApi -RelativeUri "users/$UserId" -Json:$Json
} else {
Invoke-OktaApi -RelativeUri "users$(Get-QueryParameters `
-Query $Query -Limit $Limit `
-Filter $Filter `
-Search $Search -SortBy $SortBy -SortOrder $SortOrder `
)" -Json:$Json -Next:$Next -NoWarn:$NoWarn
}
}
}
function Get-OktaUserApplication {
[CmdletBinding(DefaultParameterSetName="Other")]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[Alias("Login")]
[string] $UserId,
[Parameter(ParameterSetName="Limit")]
[uint32] $Limit,
[Parameter(ParameterSetName="Next")]
[switch] $Next,
[switch] $Json,
[Parameter(ParameterSetName="Next")]
[switch] $NoWarn
)
process {
$query = Get-QueryParameters -Filter "user.id eq `"$UserId`"" -Limit $Limit
Invoke-OktaApi -RelativeUri "apps$query&expand=user%2F$UserId" -Json:$Json -Next:$Next -NoWarn:$NoWarn
}
}
function Get-OktaUserGroup {
[CmdletBinding(DefaultParameterSetName="Other")]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[Alias("Login")]
[string] $UserId,
[Parameter(ParameterSetName="Limit")]
[uint32] $Limit,
[Parameter(ParameterSetName="Next")]
[switch] $Next,
[switch] $Json,
[Parameter(ParameterSetName="Next")]
[switch] $NoWarn
)
process {
Invoke-OktaApi -RelativeUri "users/$UserId/groups$(Get-QueryParameters -Limit $Limit)" -Json:$Json -Next:$Next -NoWarn:$NoWarn
}
}
function Remove-OktaUser {
[CmdletBinding(SupportsShouldProcess, ConfirmImpact = "High")]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $Async
)
process {
Set-StrictMode -Version Latest
$additionalHeaders = ternary $Async @{Prefer='respond-async'} $null
$user = Get-OktaUser -UserId $UserId
if ($user) {
$prompt = $user.profile.email
if ($user.profile.email -ne $user.profile.login) {
$prompt = "$($user.profile.email)/$($user.profile.login)"
}
if ($PSCmdlet.ShouldProcess($prompt,"Remove User")) {
if ($user.Status -ne 'DEPROVISIONED') {
$null = Disable-OktaUser -UserId $UserId -Confirm:$false
}
Invoke-OktaApi -RelativeUri "users/$UserId" -Method DELETE -AdditionalHeaders $additionalHeaders
}
} else {
Write-Warning "User with id '$UserId' not found"
}
}
}
function Remove-OktaUserSession {
[CmdletBinding(SupportsShouldProcess, ConfirmImpact = "High")]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $RevokeOauthTokens
)
process {
if ($PSCmdlet.ShouldProcess($UserId,"Remove User sessions")) {
Invoke-OktaApi -RelativeUri "users/$UserId/sessions?oauthTokens=$(ternary $RevokeOauthTokens 'true' 'false')" -Method DELETE
}
}
}
function Reset-OktaUserMfa {
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId
)
process {
Set-StrictMode -Version Latest
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/reset_factors" -Method POST
}
}
function Reset-OktaPassword {
[CmdletBinding(SupportsShouldProcess,DefaultParameterSetName="Reset")]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $SendEmail
)
process {
Set-StrictMode -Version Latest
# Seems can't use forgot_password getting this message, even though user is ACTIVE
# --- Forgot password is not allowed in the user's current status
# if ($UseRecoveryQuestion) {
# Invoke-OktaApi -RelativeUri "users/$UserId/credentials/forgot_password?sendEmail=$(ternary $SendEmail 'true' 'false')" -Method POST
# }
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/reset_password?sendEmail=$(ternary $SendEmail 'true' 'false')" -Method POST
}
}
function Reset-OktaPasswordWithAnswer {
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[Parameter(Mandatory,Position=1)]
[ValidateLength(4,100)]
[string] $Answer,
[Parameter(Mandatory,Position=2)]
[ValidateLength(1,72)]
[string] $NewPw
)
process {
Set-StrictMode -Version Latest
$body = @{ password = @{ value = $NewPw }; recovery_question = @{ answer = $Answer }}
Invoke-OktaApi -RelativeUri "users/$UserId/credentials/forgot_password" -Method POST -Body $body
}
}
function Revoke-OktaPassword {
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $TempPw
)
process {
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/expire_password?tempPassword=$(ternary $TempPw 'true' 'false')" -Method POST
}
}
function Set-OktaPassword {
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[Parameter(Mandatory,Position=1)]
[string] $OldPw,
[Parameter(Mandatory,Position=2)]
[ValidateLength(1,72)]
[string] $NewPw,
[switch] $Strict
)
process {
$body = @{
oldPassword = @{ value = $OldPw }
newPassword = @{ value = $NewPw }
}
Invoke-OktaApi -RelativeUri "users/$UserId/credentials/change_password?strict=$(ternary $Strict 'true' 'false')" -Method POST -Body $body
}
}
function Set-OktaUserRecoveryQuestion {
[CmdletBinding(SupportsShouldProcess)]
param(
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[Parameter(Mandatory,Position=1)]
[ValidateLength(1,72)]
[string] $Pw,
[Parameter(Mandatory,Position=2)]
[ValidateLength(1,100)]
[string] $Question,
[Parameter(Mandatory,Position=3)]
[ValidateLength(4,100)]
[string] $Answer
)
process {
Set-StrictMode -Version Latest
$body = @{
password = @{ value = $Pw}
recovery_question = @{ question = $Question; answer = $Answer }
}
Invoke-OktaApi -RelativeUri "users/$UserId/credentials/change_recovery_question" -Method POST -Body $body
}
}
function Set-OktaUser {
[CmdletBinding(SupportsShouldProcess)]
param (
[Parameter(Mandatory,Position=0,ValueFromPipeline)]
[PSCustomObject]$User
)
process {
if ($PSCmdlet.ShouldProcess($User.id,"Update User")) {
Invoke-OktaApi -RelativeUri "users/$($User.id)" -Method PUT -Body (ConvertTo-Json $User -Depth 10)
}
}
}
function Unlock-OktaUser
{
[Diagnostics.CodeAnalysis.SuppressMessageAttribute("PSShouldProcess", "")]
[CmdletBinding(SupportsShouldProcess)]
param (
[Parameter(Mandatory,ValueFromPipeline,ValueFromPipelineByPropertyName,Position=0)]
[Alias("Id")]
[string] $UserId,
[switch] $CheckCurrentStatus
)
process {
if ($CheckCurrentStatus) {
$user = Get-OktaUser -UserId $UserId
if ($user) {
if ($user.Status -ne 'LOCKED_OUT') {
Write-Warning "User status is '$($user.Status)'. Can't unlock."
return
}
} else {
Write-Warning "UserId: '$UserId' not found"
return
}
}
Invoke-OktaApi -RelativeUri "users/$UserId/lifecycle/unlock" -Method Post
}
}
| 32.603741 | 186 | 0.593135 |
d16657b58e1571fa249010d6e561ad87c9cfe661 | 1,879 | kt | Kotlin | core/src/main/kotlin/com/heroslender/hmf/core/ui/components/Progress.kt | heroslender/menu-framework | 603c227b4b80d0bd7f992167fee07e36678ccc25 | [
"MIT"
] | 51 | 2021-02-22T20:02:02.000Z | 2022-02-19T20:38:45.000Z | core/src/main/kotlin/com/heroslender/hmf/core/ui/components/Progress.kt | heroslender/menu-framework | 603c227b4b80d0bd7f992167fee07e36678ccc25 | [
"MIT"
] | 4 | 2021-05-12T19:41:18.000Z | 2021-06-14T18:53:53.000Z | core/src/main/kotlin/com/heroslender/hmf/core/ui/components/Progress.kt | heroslender/menu-framework | 603c227b4b80d0bd7f992167fee07e36678ccc25 | [
"MIT"
] | 4 | 2021-03-08T21:35:16.000Z | 2021-05-16T11:06:18.000Z | package com.heroslender.hmf.core.ui.components
import com.heroslender.hmf.core.Canvas
import com.heroslender.hmf.core.IColor
import com.heroslender.hmf.core.State
import com.heroslender.hmf.core.ui.Composable
import com.heroslender.hmf.core.ui.Placeable
import com.heroslender.hmf.core.ui.modifier.Modifier
import com.heroslender.hmf.core.ui.modifier.type.DrawerModifier
import com.heroslender.hmf.core.ui.withState
fun Composable.ProgressBar(
progressState: State<Int>,
filledColor: IColor,
backgroundColor: IColor,
modifier: Modifier = Modifier,
) {
val mod = modifier.then(StatedProgressBarDrawer(progressState, filledColor, backgroundColor))
appendComponent(mod).withState(progressState)
}
fun Composable.ProgressBar(
progress: Int,
filledColor: IColor,
backgroundColor: IColor,
modifier: Modifier = Modifier,
) {
val mod = modifier.then(ProgressBarDrawer(progress, filledColor, backgroundColor))
appendComponent(mod)
}
class StatedProgressBarDrawer(
private val progressState: State<Int>,
filledColor: IColor,
backgroundColor: IColor,
) : ProgressBarDrawer(0, filledColor, backgroundColor) {
override val percentage: Int
get() = progressState.value
}
open class ProgressBarDrawer(
open val percentage: Int,
private val filledColor: IColor,
private val backgroundColor: IColor,
) : DrawerModifier {
override fun Placeable.onDraw(canvas: Canvas) {
val width = width
val height = height
val mark: Int = (width * (percentage / 100.0)).toInt()
for (x in 0 until mark) {
for (y in 0 until height) {
canvas.setPixel(x, y, filledColor)
}
}
for (x in mark until width) {
for (y in 0 until height) {
canvas.setPixel(x, y, backgroundColor)
}
}
}
}
| 28.469697 | 97 | 0.690793 |
cfbe112a2710ae1beb4a732e6980849cf337eb73 | 1,357 | lua | Lua | bin/omnia.lua | tongson/OmniaJIT | d386337ebe330b09c3f29c448500a2d101b48d95 | [
"MIT"
] | 5 | 2020-12-15T03:57:06.000Z | 2021-04-26T19:08:19.000Z | bin/omnia.lua | tongson/OmniaJIT | d386337ebe330b09c3f29c448500a2d101b48d95 | [
"MIT"
] | 1 | 2021-03-18T11:02:12.000Z | 2021-03-20T11:50:43.000Z | bin/omnia.lua | tongson/OmniaJIT | d386337ebe330b09c3f29c448500a2d101b48d95 | [
"MIT"
] | null | null | null | local script = arg[1]
local lib = require "lib"
local func = lib.func
local argv = {}
argv[0] = arg[1]
if #arg > 1 then
for i = 2, #arg do
argv[i-1] = arg[i]
end
end
rawset(_G, "arg", argv)
local ENV = {
lib = lib,
argparse = require "argparse",
lfs = require "lfs",
}
local string = string
setmetatable(ENV, {__index = _G})
local fmt, util = lib.fmt, lib.util
local spath = util.split(script)
package.path = string.format("%s/?.lua;%s/?/init.lua;./?.lua;./?/init.lua", spath, spath)
func.try(fmt.panic)(lib.file.test(script), "error: problem reading script '%s'.\n", script)
do
local tbl = {}
for ln in io.lines(script) do
tbl[#tbl + 1] = ln
end
local chunk, err = loadstring(table.concat(tbl, "\n"), script)
local run = func.try(function(re, rt)
local ln = string.match(re, "^.+:([%d]):.*")
if not ln then
fmt.warn("bug: Unhandled condition or error string.\n")
fmt.warn("error:\n %s\n", re)
return fmt.panic("Exiting.\n")
end
local sp = string.rep(" ", string.len(ln))
re = string.match(re, "^.+:[%d]:(.*)")
return fmt.panic("error: %s\n %s |\n %s | %s\n %s |\n", re, sp, ln, rt[tonumber(ln)], sp)
end)
run(chunk, err, tbl)
setfenv(chunk, ENV)
local pr, pe = pcall(chunk)
run(pr, pe, tbl)
end
| 30.155556 | 97 | 0.563744 |
8f79cce95b7ef1947f46959bb95ec1b1df7c3909 | 714 | swift | Swift | WooKeyDash/Modules/Wallet management/ViewModels/WalletManagementViewModel.swift | WooKeyWallet/dash-wallet-ios-app | 1b7a9954340b4f7fbe6ee92bb4245fe04f1f082e | [
"MIT"
] | 2 | 2019-07-03T04:02:45.000Z | 2020-02-13T18:00:32.000Z | WooKeyDash/Modules/Wallet management/ViewModels/WalletManagementViewModel.swift | WooKeyWallet/dash-wallet-ios-app | 1b7a9954340b4f7fbe6ee92bb4245fe04f1f082e | [
"MIT"
] | null | null | null | WooKeyDash/Modules/Wallet management/ViewModels/WalletManagementViewModel.swift | WooKeyWallet/dash-wallet-ios-app | 1b7a9954340b4f7fbe6ee92bb4245fe04f1f082e | [
"MIT"
] | null | null | null | //
// WalletManagementViewModel.swift
import UIKit
class WalletManagementViewModel: NSObject {
// MARK: - Properties (Lazy)
// MARK: - Life Cycles
override init() {
super.init()
}
// MARK: - Methods (Public)
public func updateActive(_ walletId: String) {
WalletService.shared.updateActive(walletId)
guard
let rootViewController = AppManager.default.rootViewController,
let tabBarController = rootViewController.rootViewController as? TabBarController
else {
return
}
tabBarController.tab = .assets
rootViewController.popViewController(animated: true)
}
}
| 21.636364 | 93 | 0.617647 |
f9b6c67114357ad15238ca4d2492cbeb3b4477a4 | 312 | go | Go | app/templates/_server_echo.go | yeoman-projects/generator-go-microservice | f86d8c906fdcf2a1b695e55612b91f883a06d991 | [
"MIT"
] | 12 | 2015-10-23T14:48:26.000Z | 2019-12-02T13:30:03.000Z | app/templates/_server_echo.go | yeoman-projects/generator-go-microservice | f86d8c906fdcf2a1b695e55612b91f883a06d991 | [
"MIT"
] | 1 | 2020-07-15T20:16:18.000Z | 2020-07-15T20:16:18.000Z | app/templates/_server_echo.go | yeoman-projects/generator-go-microservice | f86d8c906fdcf2a1b695e55612b91f883a06d991 | [
"MIT"
] | 3 | 2015-10-27T22:29:35.000Z | 2021-05-12T04:48:08.000Z | package main
import (
"<%= baseName %>/controllers"
"github.com/labstack/echo"
mw "github.com/labstack/echo/middleware"
)
func main() {
// Echo instance
e := echo.New()
// Middleware
e.Use(mw.Logger())
e.Use(mw.Recover())
e.Get("/", controllers.Hello)
// Start server
e.Run(":9001")
}
| 14.181818 | 42 | 0.615385 |
a2f6758cac921e9d85cb41f4d180eb66e24d2a75 | 172 | asm | Assembly | src/firmware-tests/Platform/Main/PollStubToAssertTest.asm | pete-restall/Cluck2Sesame-Prototype | 99119b6748847a7b6aeadc4bee42cbed726f7fdc | [
"MIT"
] | 1 | 2019-12-12T09:07:08.000Z | 2019-12-12T09:07:08.000Z | src/firmware-tests/Platform/Main/PollStubToAssertTest.asm | pete-restall/Cluck2Sesame-Prototype | 99119b6748847a7b6aeadc4bee42cbed726f7fdc | [
"MIT"
] | null | null | null | src/firmware-tests/Platform/Main/PollStubToAssertTest.asm | pete-restall/Cluck2Sesame-Prototype | 99119b6748847a7b6aeadc4bee42cbed726f7fdc | [
"MIT"
] | null | null | null | #include "Platform.inc"
#include "TailCalls.inc"
radix decimal
extern testAssert
PollStubToAssertTest code
global pollForWork
pollForWork:
tcall testAssert
end
| 11.466667 | 25 | 0.784884 |
d2cc7b643c2133f87e4a1c727de8236f2ddff547 | 798 | php | PHP | app/Http/Controllers/ClientMainController.php | nguyengiathanh99/Laravel_shop | 9c6c52831462f22877f18686f472b51d56868f52 | [
"MIT"
] | null | null | null | app/Http/Controllers/ClientMainController.php | nguyengiathanh99/Laravel_shop | 9c6c52831462f22877f18686f472b51d56868f52 | [
"MIT"
] | null | null | null | app/Http/Controllers/ClientMainController.php | nguyengiathanh99/Laravel_shop | 9c6c52831462f22877f18686f472b51d56868f52 | [
"MIT"
] | null | null | null | <?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Http\Services\Menu\MenuService;
use App\Http\Services\Slider\SliderService;
use App\Http\Services\Product\ProductService;
class ClientMainController extends Controller
{
protected $menu;
protected $slider;
protected $product;
public function __construct(SliderService $slider, MenuService $menu, ProductService $product)
{
$this->menu = $menu;
$this->slider = $slider;
$this->product = $product;
}
public function index() {
return view('home',[
'title' => 'Shop nước hoa',
'menus' => $this->menu->showMenu(),
'sliders' => $this->slider->getSlider(),
'products' => $this->product->getAll(),
]);
}
}
| 24.9375 | 98 | 0.621554 |
dddf14e9457cfef0484e366fbcc4216657f4bdde | 876 | php | PHP | src/Cocina/ComprasBundle/DataFixtures/ORM/Cambios_precio.php | viejalospelos/cocina | c32fdcb10c0c2917c604912c1755a75892945157 | [
"MIT"
] | null | null | null | src/Cocina/ComprasBundle/DataFixtures/ORM/Cambios_precio.php | viejalospelos/cocina | c32fdcb10c0c2917c604912c1755a75892945157 | [
"MIT"
] | null | null | null | src/Cocina/ComprasBundle/DataFixtures/ORM/Cambios_precio.php | viejalospelos/cocina | c32fdcb10c0c2917c604912c1755a75892945157 | [
"MIT"
] | null | null | null | <?php
namespace Cocina\ComprasBundle\DataFixtures\ORM;
use Doctrine\Common\DataFixtures\AbstractFixture;
use Doctrine\Common\DataFixtures\OrderedFixtureInterface;
use Doctrine\Common\Persistence\ObjectManager;
use Cocina\ComprasBundle\Entity\Cambios_precio;
class CambiosPrecio extends AbstractFixture implements OrderedFixtureInterface
{
public function getOrder(){
return 80;
}
public function load(ObjectManager $manager)
{
$productos=$manager->getRepository('ComprasBundle:Productos')->findAll();
foreach ($productos as $producto){
$CambiosPrecio=new Cambios_precio();
$CambiosPrecio->setIdProducto($producto);
$CambiosPrecio->setPrecioAnterior(0);
$CambiosPrecio->setPrecioNuevo(mt_rand(5,20));
$CambiosPrecio->setFechaCambioPrecio(new \DateTime('now'));
$manager->persist($CambiosPrecio);
}
$manager->flush();
}
} | 27.375 | 78 | 0.762557 |
9ed049e9a7a30c84e555ff793ba5e04219758201 | 881 | asm | Assembly | Example/Example_Easy.asm | arnaud-carre/LSPlayer | 7187ee0be4f9da9615245c9b33e4532560ccc58b | [
"MIT"
] | 45 | 2021-03-10T21:45:45.000Z | 2022-03-20T16:52:25.000Z | Example/Example_Easy.asm | arnaud-carre/LSPlayer | 7187ee0be4f9da9615245c9b33e4532560ccc58b | [
"MIT"
] | 5 | 2021-03-11T08:37:50.000Z | 2022-02-15T21:40:03.000Z | Example/Example_Easy.asm | arnaud-carre/LSPlayer | 7187ee0be4f9da9615245c9b33e4532560ccc58b | [
"MIT"
] | 2 | 2021-03-15T13:46:33.000Z | 2021-03-15T14:29:51.000Z | ;
; LightSpeedPlayer usage example
;
code
move.w #(1<<5)|(1<<6)|(1<<7)|(1<<8),$dff096
bsr clearSprites
move.w #$0,$dff1fc
move.w #$200,$dff100 ; 0 bitplan
move.w #$04f,$dff180
; Init LSP and start replay using easy CIA toolbox
lea LSPMusic,a0
lea LSPBank,a1
suba.l a2,a2 ; suppose VBR=0 ( A500 )
moveq #0,d0 ; suppose PAL machine
bsr LSP_MusicDriver_CIA_Start
move.w #$e000,$dff09a
mainLoop: bra.s mainLoop
; Include simple CIA toolkit
include "..\LightSpeedPlayer_cia.asm"
; Include generic LSP player
include "..\LightSpeedPlayer.asm"
clearSprites:
lea $dff140,a0
moveq #8-1,d0 ; 8 sprites to clear
moveq #0,d1
.clspr: move.l d1,(a0)+
move.l d1,(a0)+
dbf d0,.clspr
rts
data_c
LSPBank: incbin "rink-a-dink.lsbank"
even
data
LSPMusic: incbin "rink-a-dink.lsmusic"
even
| 16.942308 | 52 | 0.635641 |
92c672e3d0503196ff6a179333886629c6f90700 | 5,998 | c | C | SuiteSparse/GraphBLAS/Source/GB_kron_kernel.c | Jenny19880324/suitesparse-metis-for-windows | e8d953dffb8a99aa8b65ff3ff03e12a3ed72f90c | [
"BSD-3-Clause"
] | 370 | 2015-01-30T01:04:37.000Z | 2022-03-26T18:48:39.000Z | SuiteSparse/GraphBLAS/Source/GB_kron_kernel.c | nTopology/suitesparse | f9a698f35d6903e88d6daea0ca811a302963215d | [
"BSD-3-Clause"
] | 85 | 2015-02-03T22:57:35.000Z | 2021-12-17T12:39:55.000Z | SuiteSparse/GraphBLAS/Source/GB_kron_kernel.c | nTopology/suitesparse | f9a698f35d6903e88d6daea0ca811a302963215d | [
"BSD-3-Clause"
] | 234 | 2015-01-14T15:09:09.000Z | 2022-03-26T18:48:41.000Z | //------------------------------------------------------------------------------
// GB_kron_kernel: Kronecker product, C = kron (A,B)
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2018, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
//------------------------------------------------------------------------------
// C = kron(A,B) where op determines the binary multiplier to use. The type of
// A and B are compatible with the x and y inputs of z=op(x,y), but can be
// different. The type of C is the type of z. C is hypersparse if either A
// or B are hypersparse.
#include "GB.h"
GrB_Info GB_kron_kernel // C = kron (A,B)
(
GrB_Matrix *Chandle, // output matrix
const bool C_is_csc, // desired format of C
const GrB_BinaryOp op, // multiply operator
const GrB_Matrix A, // input matrix
const GrB_Matrix B, // input matrix
GB_Context Context
)
{
//--------------------------------------------------------------------------
// check inputs
//--------------------------------------------------------------------------
ASSERT (Chandle != NULL) ;
ASSERT_OK (GB_check (A, "A for kron (A,B)", GB0)) ;
ASSERT_OK (GB_check (B, "B for kron (A,B)", GB0)) ;
ASSERT_OK (GB_check (op, "op for kron (A,B)", GB0)) ;
ASSERT (!GB_PENDING (A)) ; ASSERT (!GB_ZOMBIES (A)) ;
ASSERT (!GB_PENDING (B)) ; ASSERT (!GB_ZOMBIES (B)) ;
//--------------------------------------------------------------------------
// get inputs
//--------------------------------------------------------------------------
GrB_Info info ;
(*Chandle) = NULL ;
const int64_t *restrict Ai = A->i ;
const GB_void *restrict Ax = A->x ;
const int64_t asize = A->type->size ;
const int64_t *restrict Bi = B->i ;
const GB_void *restrict Bx = B->x ;
const int64_t bsize = B->type->size ;
const int64_t bvlen = B->vlen ;
const int64_t bvdim = B->vdim ;
//--------------------------------------------------------------------------
// allocate the output matrix C
//--------------------------------------------------------------------------
// C has the same type as z for the multiply operator, z=op(x,y)
GrB_Index cvlen, cvdim, cnzmax ;
bool ok = GB_Index_multiply (&cvlen, A->vlen, bvlen) ;
ok = ok & GB_Index_multiply (&cvdim, A->vdim, bvdim) ;
ok = ok & GB_Index_multiply (&cnzmax, GB_NNZ (A), GB_NNZ (B)) ;
ASSERT (ok) ;
// C is hypersparse if either A or B are hypersparse
bool C_is_hyper = (cvdim > 1) && (A->is_hyper || B->is_hyper) ;
GrB_Matrix C = NULL ; // allocate a new header for C
GB_CREATE (&C, op->ztype, (int64_t) cvlen, (int64_t) cvdim, GB_Ap_calloc,
C_is_csc, GB_SAME_HYPER_AS (C_is_hyper), B->hyper_ratio,
A->nvec_nonempty * B->nvec_nonempty, cnzmax, true) ;
if (info != GrB_SUCCESS)
{
// out of memory
return (info) ;
}
//--------------------------------------------------------------------------
// get C and workspace
//--------------------------------------------------------------------------
int64_t *restrict Ci = C->i ;
GB_void *restrict Cx = C->x ;
const int64_t csize = C->type->size ;
char awork [asize] ;
char bwork [bsize] ;
GxB_binary_function fmult = op->function ;
GB_cast_function
cast_A = GB_cast_factory (op->xtype->code, A->type->code),
cast_B = GB_cast_factory (op->ytype->code, B->type->code) ;
// FUTURE: this could be done faster with built-in types and operators
//--------------------------------------------------------------------------
// C = kron (A,B)
//--------------------------------------------------------------------------
int64_t cnz, cnz_last, cj_last ;
GB_jstartup (C, &cj_last, &cnz, &cnz_last) ;
GBI_iterator A_iter ;
for (GB_each_vector (A_iter, A))
{
int64_t GBI1_initj (A_iter, aj, pA_start, pA_end) ;
int64_t ajblock = aj * bvdim ;
GBI_iterator B_iter ;
for (GB_each_vector (B_iter, B))
{
int64_t GBI1_initj (B_iter, bj, pB_start, pB_end) ;
int64_t cj = ajblock + bj ;
for (int64_t pa = pA_start ; pa < pA_end ; pa++)
{
// awork = A(ai,aj), typecasted to op->xtype
int64_t ai = Ai [pa] ;
int64_t aiblock = ai * bvlen ;
cast_A (awork, Ax +(pa*asize), asize) ;
for (int64_t pb = pB_start ; pb < pB_end ; pb++)
{
// bwork = B(bi,bj), typecasted to op->ytype
int64_t bi = Bi [pb] ;
cast_B (bwork, Bx +(pb*bsize), bsize) ;
// C(ci,cj) = A(ai,aj) * B(bi,bj)
int64_t ci = aiblock + bi ;
Ci [cnz] = ci ;
fmult (Cx +(cnz*csize), awork, bwork) ;
cnz++ ;
}
}
// cannot fail since C->plen is the upper bound: the product of
// number of non empty vectors of A and B
GrB_Info info = GB_jappend (C, cj, &cj_last, cnz, &cnz_last,
Context) ;
ASSERT (info == GrB_SUCCESS) ;
#if 0
// if it could fail, do this:
if (info != GrB_SUCCESS) { GB_MATRIX_FREE (&C) ; return (info) ; }
#endif
}
}
GB_jwrapup (C, cj_last, cnz) ;
//--------------------------------------------------------------------------
// return result
//--------------------------------------------------------------------------
ASSERT (cnz == GB_NNZ (A) * GB_NNZ (B)) ;
ASSERT_OK (GB_check (C, "C=kron(A,B)", GB0)) ;
(*Chandle) = C ;
return (GrB_SUCCESS) ;
}
| 35.702381 | 80 | 0.437479 |
929d4683b9d81e81bc5214fb8fc3a2d5fc301a18 | 134 | sql | SQL | src/main/resources/db/migration/V1__inital_tables.sql | saiaku-gaming/notification-service-server | 395a0dc7a6a4dd4e998e6cdd42686c2d1364add7 | [
"MIT"
] | null | null | null | src/main/resources/db/migration/V1__inital_tables.sql | saiaku-gaming/notification-service-server | 395a0dc7a6a4dd4e998e6cdd42686c2d1364add7 | [
"MIT"
] | null | null | null | src/main/resources/db/migration/V1__inital_tables.sql | saiaku-gaming/notification-service-server | 395a0dc7a6a4dd4e998e6cdd42686c2d1364add7 | [
"MIT"
] | null | null | null | CREATE TABLE registered_server (
registered_server_id TEXT NOT NULL PRIMARY KEY,
ip_address TEXT NOT NULL,
port INTEGER NOT NULL
); | 26.8 | 48 | 0.80597 |
120c7ad31e3f578e3ff916a83527257c5f2c117d | 1,142 | swift | Swift | swiftui-ios-swift/SwiftUIApp/ContentView.swift | turlodales/here-ios-sdk-examples | 23d4086d6f9093d4eb2306ea405911614a0ccd67 | [
"MIT"
] | 63 | 2017-03-03T13:14:44.000Z | 2021-11-23T18:44:57.000Z | swiftui-ios-swift/SwiftUIApp/ContentView.swift | turlodales/here-ios-sdk-examples | 23d4086d6f9093d4eb2306ea405911614a0ccd67 | [
"MIT"
] | 122 | 2017-05-02T15:28:18.000Z | 2022-03-28T20:23:53.000Z | swiftui-ios-swift/SwiftUIApp/ContentView.swift | turlodales/here-ios-sdk-examples | 23d4086d6f9093d4eb2306ea405911614a0ccd67 | [
"MIT"
] | 60 | 2017-03-22T20:19:12.000Z | 2021-12-01T10:39:06.000Z | /*
* Copyright (c) 2011-2021 HERE Europe B.V.
* All rights reserved.
*/
import SwiftUI
import NMAKit
struct ContentView: View {
@EnvironmentObject var state: GlobalMapStateModel // Map Settings state
@State var activeMapScheme = NMAMapSchemeNormalDay // Scheme selected by Picker
var body: some View {
VStack {
HStack {
MapSettinText(text: "Zoom", value: state.zoom)
MapSettinText(text: "Tilt", value: state.tilt)
}
// A wrapped NMAMapView for SwiftUI usage
HEREMapsView(stateModel: _state, mapScheme: $activeMapScheme)
// SwiftUI way of creating UISegmentedControl equivalent
SchemePicker(selectedMapScheme: $activeMapScheme).padding()
}
}
}
#if DEBUG
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView().environmentObject(GlobalMapStateModel())
}
}
#endif
class GlobalMapStateModel : ObservableObject {
@Published var zoom: Float = 0
@Published var tilt: Float = 0
//Add another settigns for observation here
}
| 25.954545 | 83 | 0.652364 |
81be7b98c3c46b8a316ce0302f0aebba31019bdc | 406 | rs | Rust | src/state.rs | coco-sha/arcswap | 81a69f3933dc46a51c8e17fe09da1a7df654d2e2 | [
"Apache-2.0"
] | 3 | 2021-12-09T21:32:09.000Z | 2022-03-29T22:48:11.000Z | src/state.rs | coco-sha/arcswap | 81a69f3933dc46a51c8e17fe09da1a7df654d2e2 | [
"Apache-2.0"
] | null | null | null | src/state.rs | coco-sha/arcswap | 81a69f3933dc46a51c8e17fe09da1a7df654d2e2 | [
"Apache-2.0"
] | 2 | 2021-12-30T09:58:31.000Z | 2022-03-29T22:07:06.000Z | use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use cosmwasm_std::{Addr, Uint128};
use cw_storage_plus::Item;
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct Token {
pub reserve: Uint128,
pub address: Option<Addr>,
pub denom: String,
}
pub const TOKEN1: Item<Token> = Item::new("token1");
pub const TOKEN2: Item<Token> = Item::new("token2");
| 25.375 | 70 | 0.711823 |
a5909bc29bf4f6a9bc1c44318f011d1c3c2ccd58 | 3,337 | swift | Swift | Example/BLPay/AppPay.swift | iosRober/BLPay | b76e86de81e43b8bd3ea419e76293cf520ba5ca1 | [
"MIT"
] | null | null | null | Example/BLPay/AppPay.swift | iosRober/BLPay | b76e86de81e43b8bd3ea419e76293cf520ba5ca1 | [
"MIT"
] | null | null | null | Example/BLPay/AppPay.swift | iosRober/BLPay | b76e86de81e43b8bd3ea419e76293cf520ba5ca1 | [
"MIT"
] | null | null | null | //
// AppPay.swift
// BLPay_Example
//
// Created by lin bo on 2019/8/7.
// Copyright © 2019 CocoaPods. All rights reserved.
//
import Foundation
/// 商品列表
enum ACG_PAY_ID: BL_APP_ID {
case pay50
case pay98
case pay148
case pay198
case pay248
case pay298
func value() -> String {
switch self {
case .pay50:
return "ACG_PAY_50"
case .pay98:
return "ACG_PAY_98"
case .pay148:
return "ACG_PAY_148"
case .pay198:
return "ACG_PAY_198"
case .pay248:
return "ACG_PAY_248"
case .pay298:
return "ACG_PAY_298"
}
}
func price() -> Int {
switch self {
case .pay50: return 50
case .pay98: return 98
case .pay148: return 148
case .pay198: return 198
case .pay248: return 248
case .pay298: return 298
}
}
}
class AppPay {
static let shared = AppPay()
func configIAP() {
// 设置商品列表
BLPay.shared.configApplePay(keychainGroup: "my app share key chain",
productIDs: [ACG_PAY_ID.pay50,
ACG_PAY_ID.pay98,
ACG_PAY_ID.pay148,
ACG_PAY_ID.pay198,
ACG_PAY_ID.pay248,
ACG_PAY_ID.pay298])
/// 注册支付后端校验
BLPay.shared.regiestPayCheckBlock { (payload, callback) in
self.requestCheckIAP(payload: payload) { (b) in
callback(b)
}
}
/// 在一次充值情况下 提示用户,进入支付业务场景就失效了
BLPay.shared.setProgressCallback { (result, id) in
switch result {
case .checking:
BLShowAlert("充值中")
case .checkedSuccess:
BLShowAlert("充值成功")
case .checkedButError:
BLShowAlert("服务器验证失败")
case .checkedFailed:
BLShowAlert("充值失败,请检测网络")
default:
break
}
}
}
/// App服务器接口请求
fileprivate func requestCheckIAP(payload: BLPayloadModel, callback:@escaping ((BLIAPResultCheck) -> ())) {
// 模拟成功业务
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
callback(.checkedSuccess)
}
/* 业务代码
guard UserHelper.checklogin() else {
callback(.checkedFailed)
return
}
OrderServer.shared.requestCheckIAP(data) {(code, msg, result) in
switch code {
case CODE_SUCCESS:
callback(.checkedSuccess)
case 1023:
callback(.checkedButError)
default:
callback(.checkedFailed)
}
}
*/
}
/// 购买
@discardableResult
func pay(_ id: ACG_PAY_ID) -> Bool {
var item = BLPayItem()
item.iapPriductId = id
item.userID = "UserHelper...userID"
return BLPay.shared.pay(item: item)
}
}
| 24.902985 | 110 | 0.462691 |
7a394d4b887b528825107991d91df962a38b2106 | 1,330 | ps1 | PowerShell | PS/_Tests/Team.Get-TfsTeam.Tests.ps1 | igoravl/tfscmdlets | 16b4a4f3d9f22d32c9cf9846971336c053c1542c | [
"MIT"
] | 59 | 2015-03-11T10:50:06.000Z | 2019-06-20T09:15:09.000Z | PS/_Tests/Team.Get-TfsTeam.Tests.ps1 | igoravl/tfscmdlets | 16b4a4f3d9f22d32c9cf9846971336c053c1542c | [
"MIT"
] | 51 | 2015-09-09T15:24:59.000Z | 2019-05-23T21:09:18.000Z | PS/_Tests/Team.Get-TfsTeam.Tests.ps1 | igoravl/tfscmdlets | 16b4a4f3d9f22d32c9cf9846971336c053c1542c | [
"MIT"
] | 26 | 2015-01-06T08:48:50.000Z | 2019-06-20T09:53:06.000Z | . $PSScriptRoot/_TestSetup.ps1
Describe (($MyInvocation.MyCommand.Name -split '\.')[-3]) {
Context 'Integration Tests' {
It 'Should throw on parameterless invocation' {
{ Get-TfsTeam } | Should -Throw
}
It 'Should get all teams' {
Get-TfsTeam -Project $tfsProject | Select-Object -ExpandProperty Name | Sort-Object | Should -Be @('PUL', 'PUL-DB', 'TestProject Team')
}
It 'Should get some teams' {
Get-TfsTeam 'PUL*' -Project $tfsProject | Select-Object -ExpandProperty Name | Sort-Object | Should -Be @('PUL', 'PUL-DB')
}
It 'Should get default team' {
Get-TfsTeam -Default -Project $tfsProject | Select-Object -ExpandProperty Name | Should -Be 'TestProject Team'
}
It 'Should get settings with -IncludeSettings' {
(Get-TfsTeam -Default -Project $tfsProject).Settings | Should -BeNullOrEmpty
(Get-TfsTeam -Default -Project $tfsProject -IncludeSettings).Settings | Should -Not -BeNullOrEmpty
}
It 'Should get members with -QueryMembership' {
(Get-TfsTeam -Default -Project $tfsProject).TeamMembers.Length | Should -Be 0
(Get-TfsTeam -Default -Project $tfsProject -QueryMembership).TeamMembers.Length | Should -Be 1
}
}
}
| 38 | 147 | 0.622556 |
01ea9754db84feaef2b8ba940923b8943ff7bb8e | 8,309 | rs | Rust | crates/dts/main.rs | martinohmann/dts | 3b027f4ac4f1cdf3d4b96f8c249954b85474be4b | [
"MIT"
] | 10 | 2021-12-17T16:37:36.000Z | 2022-03-16T18:58:47.000Z | crates/dts/main.rs | martinohmann/dts | 3b027f4ac4f1cdf3d4b96f8c249954b85474be4b | [
"MIT"
] | 21 | 2021-12-17T15:22:46.000Z | 2022-03-27T01:22:47.000Z | crates/dts/main.rs | martinohmann/dts | 3b027f4ac4f1cdf3d4b96f8c249954b85474be4b | [
"MIT"
] | null | null | null | #![doc = include_str!("../../README.md")]
#![deny(missing_docs)]
mod args;
#[cfg(feature = "color")]
mod highlighting;
mod output;
mod paging;
mod utils;
#[cfg(feature = "color")]
use crate::highlighting::{print_themes, ColoredStdoutWriter, HighlightingConfig};
use crate::{
args::{InputOptions, Options, OutputOptions, TransformOptions},
output::StdoutWriter,
paging::PagingConfig,
};
use anyhow::{anyhow, Context, Result};
use clap::{App, IntoApp, Parser};
use clap_generate::{generate, Shell};
use dts_core::{de::Deserializer, jq::Jq, ser::Serializer, Encoding, Error, Sink, Source};
use rayon::prelude::*;
use serde_json::Value;
use std::fs::{self, File};
use std::io::{self, BufWriter};
fn deserialize(source: &Source, opts: &InputOptions) -> Result<Value> {
let reader = source
.to_reader()
.with_context(|| format!("failed to create reader for source `{}`", source))?;
let encoding = opts
.input_encoding
.or_else(|| reader.encoding())
.context("unable to detect input encoding, please provide it explicitly via -i")?;
let mut de = Deserializer::with_options(reader, opts.into());
de.deserialize(encoding)
.with_context(|| format!("failed to deserialize `{}` from `{}`", encoding, source))
}
fn deserialize_many(sources: &[Source], opts: &InputOptions) -> Result<Value> {
let results = if opts.continue_on_error {
sources
.par_iter()
.filter_map(|src| match deserialize(src, opts) {
Ok(val) => Some((src, val)),
Err(_) => {
eprintln!("Warning: Source `{}` skipped due to errors", src);
None
}
})
.collect::<Vec<_>>()
} else {
sources
.par_iter()
.map(|src| deserialize(src, opts).map(|val| (src, val)))
.collect::<Result<Vec<_>>>()?
};
if opts.file_paths {
Ok(Value::Object(
results
.into_iter()
.map(|res| (res.0.to_string(), res.1))
.collect(),
))
} else {
Ok(Value::Array(results.into_iter().map(|res| res.1).collect()))
}
}
fn transform(value: Value, opts: &TransformOptions) -> Result<Value> {
match &opts.jq_expression {
Some(expr) => {
let jq = std::env::var("DTS_JQ")
.ok()
.map(Jq::with_executable)
.unwrap_or_else(Jq::new)
.context(
"install `jq` or provide the `jq` executable path \
in the `DTS_JQ` environment variable",
)?;
let expr = match expr.strip_prefix('@') {
Some(path) => fs::read_to_string(path)?,
None => expr.to_owned(),
};
jq.process(&expr, &value)
.context("failed to transform value")
}
None => Ok(value),
}
}
fn serialize(sink: &Sink, value: Value, opts: &OutputOptions) -> Result<()> {
let encoding = opts
.output_encoding
.or_else(|| sink.encoding())
.unwrap_or(Encoding::Json);
let paging_config = PagingConfig::new(opts.paging, opts.pager.as_deref());
#[cfg(feature = "color")]
let assets = highlighting::load_assets();
let writer: Box<dyn io::Write> = match sink {
#[cfg(feature = "color")]
Sink::Stdout => {
if opts.color.should_colorize() {
let config = HighlightingConfig::new(&assets, paging_config, opts.theme.as_deref());
Box::new(ColoredStdoutWriter::new(encoding, config))
} else {
Box::new(StdoutWriter::new(paging_config))
}
}
#[cfg(not(feature = "color"))]
Sink::Stdout => Box::new(StdoutWriter::new(paging_config)),
Sink::Path(path) => Box::new(
File::create(path)
.with_context(|| format!("failed to create writer for sink `{}`", sink))?,
),
};
let mut ser = Serializer::with_options(BufWriter::new(writer), opts.into());
match ser.serialize(encoding, value) {
Ok(()) => Ok(()),
Err(Error::Io(err)) if err.kind() == io::ErrorKind::BrokenPipe => Ok(()),
Err(err) => Err(err),
}
.with_context(|| format!("failed to serialize `{}` to `{}`", encoding, sink))
}
fn serialize_many(sinks: &[Sink], value: Value, opts: &OutputOptions) -> Result<()> {
let values = match value {
Value::Array(mut values) => {
if sinks.len() < values.len() {
// There are more values than files. The last file takes an array of the left
// over values.
let rest = values.split_off(sinks.len() - 1);
values.push(Value::Array(rest));
}
values
}
_ => {
return Err(anyhow!(
"when using multiple output files, the data must be an array"
))
}
};
if sinks.len() > values.len() {
eprintln!(
"Warning: skipping {} output files due to lack of data",
sinks.len() - values.len()
);
}
sinks
.iter()
.zip(values.into_iter())
.try_for_each(|(file, value)| serialize(file, value, opts))
}
fn print_completions(app: &mut App, shell: Shell) {
generate(shell, app, app.get_name().to_string(), &mut io::stdout());
}
fn main() -> Result<()> {
let opts = Options::parse();
if let Some(shell) = opts.generate_completion {
let mut app = Options::into_app();
print_completions(&mut app, shell);
std::process::exit(0);
}
#[cfg(feature = "color")]
if opts.output.list_themes {
print_themes(opts.output.color)?;
std::process::exit(0);
}
let mut sources = Vec::with_capacity(opts.sources.len());
// If sources contains directories, force deserialization into a collection (array or object
// with sources as keys depending on the input options) even if all directory globs only
// produce a zero or one sources. This will ensure that deserializing the files that resulted
// from directory globs always produces a consistent structure of the data.
let dir_sources = opts.sources.iter().any(|s| s.is_dir());
for source in opts.sources {
match source.as_path() {
Some(path) => {
if path.is_dir() {
let pattern = opts
.input
.glob
.as_ref()
.context("--glob is required if sources contain directories")?;
let mut matches = source.glob_files(pattern)?;
sources.append(&mut matches);
} else {
sources.push(path.into());
}
}
None => sources.push(source),
}
}
if sources.is_empty() && !atty::is(atty::Stream::Stdin) {
// Input is piped on stdin.
sources.push(Source::Stdin);
}
let sinks = opts.sinks;
// Validate sinks to prevent accidentally overwriting existing files.
for sink in &sinks {
if let Sink::Path(path) = sink {
if !path.exists() {
continue;
}
if !path.is_file() {
return Err(anyhow!(
"output file `{}` exists but is not a file",
path.display()
));
} else if !opts.output.overwrite {
return Err(anyhow!(
"output file `{}` exists, pass --overwrite to overwrite it",
path.display()
));
}
}
}
let value = match (sources.len(), dir_sources) {
(0, false) => return Err(anyhow!("input file or data on stdin expected")),
(1, false) => deserialize(&sources[0], &opts.input)?,
(_, _) => deserialize_many(&sources, &opts.input)?,
};
let value = transform(value, &opts.transform)?;
if sinks.len() <= 1 {
serialize(sinks.get(0).unwrap_or(&Sink::Stdout), value, &opts.output)
} else {
serialize_many(&sinks, value, &opts.output)
}
}
| 32.081081 | 100 | 0.534842 |
57a1f0768e50e35f3250bbc2418283915a50a126 | 5,397 | h | C | src/hardware_in_the_loop/HIL_sensors/HILSensor.h | skyward-er/on-board-software | b0739e27f345d6bd07f21948ee06c99757662381 | [
"MIT"
] | 4 | 2021-11-10T09:43:13.000Z | 2022-02-28T07:32:37.000Z | src/hardware_in_the_loop/HIL_sensors/HILSensor.h | skyward-er/on-board-software | b0739e27f345d6bd07f21948ee06c99757662381 | [
"MIT"
] | null | null | null | src/hardware_in_the_loop/HIL_sensors/HILSensor.h | skyward-er/on-board-software | b0739e27f345d6bd07f21948ee06c99757662381 | [
"MIT"
] | null | null | null | /* Copyright (c) 2020 Skyward Experimental Rocketry
* Author: Emilio Corigliano
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include <typeinfo>
#include "HILTimestampManagement.h"
#include "TimestampTimer.h"
#include "hardware_in_the_loop/HILConfig.h"
#include "hardware_in_the_loop/simulator_communication/HILTransceiver.h"
#include "math/Vec3.h"
#include "sensors/Sensor.h"
#include "sensors/SensorData.h"
#include "HILSensorsData.h"
/**
* @brief Fake sensor base used for the simulation. Every sensor for the
* simulation should extend this class.
*
* This class is used to simulate as near as possible the situation of the
* OBSW during the flight, using fake sensors classes instead of the real
* ones, taking their data from the data received from a simulator.
*/
template <typename HILSensorData>
class HILSensor : public virtual HILTimestampManagement,
public virtual Sensor<HILSensorData>
{
public:
/**
* @brief constructor of the fake sensor used for the simulation.
*
* @param matlab reference of the MatlabTransceiver object that deals with
* the simulator
* @param n_data_sensor number of samples in every period of simulation
*/
HILSensor(HILTransceiver *matlab, int n_data_sensor)
{
this->sensorData = matlab->getSensorData();
this->n_data_sensor = n_data_sensor;
/* Registers the sensor on the MatlabTransceiver to be notified when a
* new packet of simulated data arrives */
matlab->addResetSampleCounter(this);
}
/**
* @brief sets the sample counter to 0.
*
* Updates the reference timestamp, resets the sampleCounter and clears the
* last_error variable. Called by the HILTransceiver when receives a new
* simulated period.
*/
void resetSampleCounter() override
{
this->last_error = SensorErrors::NO_ERRORS;
sampleCounter = 0;
}
/**
* @brief Initializes the fake sensor.
*/
bool init() override
{
if (initialized)
{
this->last_error = SensorErrors::ALREADY_INIT;
TRACE("ALREADY INITIALIZED!");
}
else
{
initialized = true;
}
return initialized;
}
bool selfTest() override { return true; }
protected:
/**
* @brief Updates the internal structure of the fake sensor from the
* structure received from the simulator.
*
* Takes the next unread sample available, continues sending the last sample
* with the old timestamp if we already read all the samples.
*/
HILSensorData sampleImpl() override
{
if (initialized)
{
/* updates the last_sensor only if there is still data to be read */
if (sampleCounter >= n_data_sensor)
{
this->last_error = SensorErrors::NO_NEW_DATA;
/*TRACE("[%s] NO NEW DATA! Simulation error\n",
typeid(this).name());*/
}
else if (this->last_error != SensorErrors::NO_NEW_DATA)
{
return updateData();
}
}
else
{
this->last_error = SensorErrors::NOT_INIT;
TRACE(
"[HILSensor] sampleImpl() : not initialized, unable to "
"sample data \n");
}
return this->last_sample;
}
/**
* @brief updates the timestamp and increments the sampleCounter.
* WARNING: You should call this method after all the values has been
* updated, it modifies the sampleCounter!
* @return the timestamp of the sample
*/
uint64_t updateTimestamp()
{
sampleCounter++;
return TimestampTimer::getTimestamp();
}
/**
* @brief Function that updates the sensor structure with new data.
*
* Sensor struct updated from MatlabTransceiver::sensorData.
* WARNING: This method should call **AT THE END** the updateTimestamp
* method.
*/
virtual HILSensorData updateData() = 0;
bool initialized = false;
int sampleCounter = 0; /**< counter of the next sample to take */
int n_data_sensor; /**< number of samples in every period */
SimulatorData *sensorData; /**< reference to the SensorData structure */
}; | 33.943396 | 80 | 0.65592 |
20463f65c8cb403b7a6ccd4db9ca1ff478f942e9 | 989 | sql | SQL | coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/log/Release_2_0_logs/KCAWD-271.sql | mrudulpolus/kc | 55f529e5ff0985f3bf5247e2a1e63c5dec07f560 | [
"ECL-2.0"
] | null | null | null | coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/log/Release_2_0_logs/KCAWD-271.sql | mrudulpolus/kc | 55f529e5ff0985f3bf5247e2a1e63c5dec07f560 | [
"ECL-2.0"
] | null | null | null | coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/log/Release_2_0_logs/KCAWD-271.sql | mrudulpolus/kc | 55f529e5ff0985f3bf5247e2a1e63c5dec07f560 | [
"ECL-2.0"
] | null | null | null | -- Table Script
CREATE TABLE AWARD_HIERARCHY (
ROOT_AWARD_NUMBER VARCHAR2(12) NOT NULL,
AWARD_NUMBER VARCHAR2(12) NOT NULL,
PARENT_AWARD_NUMBER VARCHAR2(12) NOT NULL,
UPDATE_TIMESTAMP DATE NOT NULL,
UPDATE_USER VARCHAR2(60) NOT NULL,
VER_NBR NUMBER(8,0) DEFAULT 1 NOT NULL,
OBJ_ID VARCHAR2(36) DEFAULT SYS_GUID() NOT NULL);
-- Primary Key Constraint
ALTER TABLE AWARD_HIERARCHY
ADD CONSTRAINT PK_AWARD_HIERARCHY
PRIMARY KEY (AWARD_NUMBER);
-- this won't be possible because award_number is not a unique or primary key in Award table.
--ALTER TABLE AWARD_HIERARCHY
--ADD CONSTRAINT FK_AWARD_HIERARCHY
--FOREIGN KEY (AWARD_NUMBER)
--REFERENCES AWARD (AWARD_NUMBER);
-- View for Coeus compatibility
CREATE OR REPLACE VIEW OSP$AWARD_HIERARCHY AS SELECT
ROOT_AWARD_NUMBER ROOT_MIT_AWARD_NUMBER,
AWARD_NUMBER MIT_AWARD_NUMBER,
PARENT_AWARD_NUMBER PARENT_MIT_AWARD_NUMBER,
UPDATE_TIMESTAMP,
UPDATE_USER
FROM AWARD_HIERARCHY; | 32.966667 | 93 | 0.767442 |
c7ba3eaba21d83b4b33669dd232e40c9720d5f55 | 4,589 | py | Python | luft/tasks/generic_embulk_task.py | profesia/luft | de3423fdad561483b1a66aa665167661adbde4e5 | [
"MIT"
] | 1 | 2020-03-20T17:51:50.000Z | 2020-03-20T17:51:50.000Z | luft/tasks/generic_embulk_task.py | profesia/luft | de3423fdad561483b1a66aa665167661adbde4e5 | [
"MIT"
] | 248 | 2020-09-15T06:00:41.000Z | 2021-08-03T05:42:14.000Z | luft/tasks/generic_embulk_task.py | Lacoz/luft_old | 3fcd92a9b9b17335e4b91ac58ef2d9ec79a5a879 | [
"MIT"
] | 2 | 2019-08-20T12:50:01.000Z | 2019-09-02T12:19:03.000Z | # -*- coding: utf-8 -*-
"""Generic Embulk Task."""
from pathlib import Path
from typing import Dict, Optional
from luft.common.config import (AWS_ACCESS_KEY_ID, AWS_BUCKET, AWS_ENDPOINT, AWS_SECRET_ACCESS_KEY,
BLOB_STORAGE, EMBULK_DEFAULT_TEMPLATE, GCS_APP_NAME,
GCS_AUTH_METHOD, GCS_BUCKET, GCS_EMAIL, GCS_JSON_KEYFILE,
GCS_P12_KEYFILE)
from luft.common.utils import NoneStr
from luft.tasks.generic_task import GenericTask
import pkg_resources
class GenericEmbulkTask(GenericTask):
"""Generic Embulk JDBC Task."""
def __init__(self, name: str, task_type: str, source_system: str, source_subsystem: str,
embulk_template: NoneStr = None, yaml_file: NoneStr = None,
env: NoneStr = None, thread_name: NoneStr = None, color: NoneStr = None):
"""Initialize Embulk JDBC Task.
Parameters:
name (str): name of task.
task_type (str): type of task. E.g. embulk-jdbc-load, mongo-load, etc.
source_system (str): name of source system. Usually name of database.
Used for better organization especially on blob storage. E.g. jobs, prace, pzr.
source_subsystem (str): name of source subsystem. Usually name of schema.
Used for better organization especially on blob storage. E.g. public, b2b.
env (str): environment - PROD, DEV.
thread_name(str): name of thread for Airflow parallelization.
color (str): hex code of color. Airflow operator will have this color.
"""
super().__init__(name=name, task_type=task_type,
source_system=source_system,
source_subsystem=source_subsystem,
yaml_file=yaml_file,
env=env, thread_name=thread_name, color=color)
def _set_embulk_template(self, embulk_template: Optional[str]):
"""Set Embulk template if specified.
Else set template from default Embulk templates.
"""
if embulk_template:
if Path(embulk_template).exists():
self.embulk_template = embulk_template
else:
raise FileNotFoundError(
'File `%s` does not exists.' % embulk_template)
else:
tmp_embulk_template = pkg_resources.resource_filename(
'luft', EMBULK_DEFAULT_TEMPLATE[self.task_type])
self.embulk_template = tmp_embulk_template.format(
blob_storage=BLOB_STORAGE)
def _get_embulk_template(self) -> str:
"""Return Embulk template."""
return self.embulk_template
def _get_blob_storage_params(self) -> Dict[str, str]:
"""Get blob storage enviromental variables."""
blob_storage = BLOB_STORAGE.lower()
if blob_storage == 'aws':
return self._get_aws_blob_storage_params()
elif blob_storage == 'gcs':
return self._get_gcs_blob_storage_params()
else:
raise KeyError(
'Blob storage %s you specified is not supported!' % blob_storage)
@staticmethod
def _get_aws_blob_storage_params() -> Dict[str, str]:
"""Get AWS S3 blob storage parameters."""
params = {
'AWS_BUCKET': AWS_BUCKET,
'AWS_ENDPOINT': AWS_ENDPOINT,
'AWS_ACCESS_KEY_ID': AWS_ACCESS_KEY_ID,
'AWS_SECRET_ACCESS_KEY': AWS_SECRET_ACCESS_KEY
}
GenericEmbulkTask.check_mandatory(params)
return params
def _get_gcs_blob_storage_params(self) -> Dict[str, Optional[str]]:
"""Get GCS blob storage parameters."""
mandatory_params = {
'GCS_BUCKET': GCS_BUCKET,
'GCS_AUTH_METHOD': GCS_AUTH_METHOD
}
params = {
'GCS_APP_NAME': self.get_null_param('application_name', GCS_APP_NAME),
'GCS_SERVICE_ACCOUNT_EMAIL': self.get_null_param('service_account_email', GCS_EMAIL),
'GCS_P12_KEYFILE': self.get_null_param('p12_keyfile', GCS_P12_KEYFILE),
'GCS_JSON_KEYFILE': self.get_null_param('json_keyfile', GCS_JSON_KEYFILE)
}
GenericEmbulkTask.check_mandatory(mandatory_params)
params.update(mandatory_params)
return params
@staticmethod
def get_null_param(param: str, value: str) -> Optional[str]:
"""Return value prefixed with param for nullable params in Embulk template."""
return f'{param}: {value}' if value else None
| 42.88785 | 99 | 0.628459 |
381dc5d01190f8c12cb3aebc9baa15cfd865f69f | 135 | ps1 | PowerShell | Chapter23/1.2.6.ArrayComparisonNotMatch.ps1 | wagnerhsu/packt-Mastering-Windows-PowerShell-Scripting-Fourth-Edition | be9f5cad2bf28de7c0a250590c65b72994800aeb | [
"MIT"
] | 27 | 2020-04-21T13:28:29.000Z | 2022-03-09T12:19:24.000Z | Chapter23/1.2.6.ArrayComparisonNotMatch.ps1 | wagnerhsu/packt-Mastering-Windows-PowerShell-Scripting-Fourth-Edition | be9f5cad2bf28de7c0a250590c65b72994800aeb | [
"MIT"
] | null | null | null | Chapter23/1.2.6.ArrayComparisonNotMatch.ps1 | wagnerhsu/packt-Mastering-Windows-PowerShell-Scripting-Fourth-Edition | be9f5cad2bf28de7c0a250590c65b72994800aeb | [
"MIT"
] | 15 | 2020-05-03T01:24:33.000Z | 2022-01-26T04:57:23.000Z | $array = @(
'Anna'
'Ben'
'Chris'
'David'
)
if ($array -notmatch '^[ab]') {
Write-Host "No names starting A or B"
}
| 13.5 | 41 | 0.496296 |
fb8be4f9f63ecc022f78675d2db109c9fd69bdcd | 11,052 | java | Java | api/src/main/java/com/gpudb/protocol/AlterTableResponse.java | pbo-cirus/kinetica-api-java | c838c1591336b3ee4fac489c1e25ff4bc2d54fcb | [
"MIT"
] | null | null | null | api/src/main/java/com/gpudb/protocol/AlterTableResponse.java | pbo-cirus/kinetica-api-java | c838c1591336b3ee4fac489c1e25ff4bc2d54fcb | [
"MIT"
] | null | null | null | api/src/main/java/com/gpudb/protocol/AlterTableResponse.java | pbo-cirus/kinetica-api-java | c838c1591336b3ee4fac489c1e25ff4bc2d54fcb | [
"MIT"
] | null | null | null | /*
* This file was autogenerated by the GPUdb schema processor.
*
* DO NOT EDIT DIRECTLY.
*/
package com.gpudb.protocol;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.IndexedRecord;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A set of results returned by {@link
* com.gpudb.GPUdb#alterTable(AlterTableRequest)}.
*/
public class AlterTableResponse implements IndexedRecord {
private static final Schema schema$ = SchemaBuilder
.record("AlterTableResponse")
.namespace("com.gpudb")
.fields()
.name("tableName").type().stringType().noDefault()
.name("action").type().stringType().noDefault()
.name("value").type().stringType().noDefault()
.name("typeId").type().stringType().noDefault()
.name("typeDefinition").type().stringType().noDefault()
.name("properties").type().map().values().array().items().stringType().noDefault()
.name("label").type().stringType().noDefault()
.name("info").type().map().values().stringType().noDefault()
.endRecord();
private String tableName;
private String action;
private String value;
private String typeId;
private String typeDefinition;
private Map<String, List<String>> properties;
private String label;
private Map<String, String> info;
/**
* Constructs an AlterTableResponse object with default parameters.
*/
public AlterTableResponse() {
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @return the schema for the class.
*/
public static Schema getClassSchema() {
return schema$;
}
/**
* @return Table on which the operation was performed.
*/
public String getTableName() {
return tableName;
}
/**
* @param tableName Table on which the operation was performed.
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setTableName(String tableName) {
this.tableName = (tableName == null) ? "" : tableName;
return this;
}
/**
* @return Modification operation that was performed.
*/
public String getAction() {
return action;
}
/**
* @param action Modification operation that was performed.
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setAction(String action) {
this.action = (action == null) ? "" : action;
return this;
}
/**
* @return The value of the modification that was performed.
*/
public String getValue() {
return value;
}
/**
* @param value The value of the modification that was performed.
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setValue(String value) {
this.value = (value == null) ? "" : value;
return this;
}
/**
* @return return the type_id (when changing a table, a new type may be
* created)
*/
public String getTypeId() {
return typeId;
}
/**
* @param typeId return the type_id (when changing a table, a new type may
* be created)
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setTypeId(String typeId) {
this.typeId = (typeId == null) ? "" : typeId;
return this;
}
/**
* @return return the type_definition (when changing a table, a new type
* may be created)
*/
public String getTypeDefinition() {
return typeDefinition;
}
/**
* @param typeDefinition return the type_definition (when changing a
* table, a new type may be created)
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setTypeDefinition(String typeDefinition) {
this.typeDefinition = (typeDefinition == null) ? "" : typeDefinition;
return this;
}
/**
* @return return the type properties (when changing a table, a new type
* may be created)
*/
public Map<String, List<String>> getProperties() {
return properties;
}
/**
* @param properties return the type properties (when changing a table, a
* new type may be created)
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setProperties(Map<String, List<String>> properties) {
this.properties = (properties == null) ? new LinkedHashMap<String, List<String>>() : properties;
return this;
}
/**
* @return return the type label (when changing a table, a new type may be
* created)
*/
public String getLabel() {
return label;
}
/**
* @param label return the type label (when changing a table, a new type
* may be created)
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setLabel(String label) {
this.label = (label == null) ? "" : label;
return this;
}
/**
* @return Additional information.
*/
public Map<String, String> getInfo() {
return info;
}
/**
* @param info Additional information.
* @return {@code this} to mimic the builder pattern.
*/
public AlterTableResponse setInfo(Map<String, String> info) {
this.info = (info == null) ? new LinkedHashMap<String, String>() : info;
return this;
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @return the schema object describing this class.
*/
@Override
public Schema getSchema() {
return schema$;
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @param index the position of the field to get
* @return value of the field with the given index.
* @throws IndexOutOfBoundsException
*/
@Override
public Object get(int index) {
switch (index) {
case 0:
return this.tableName;
case 1:
return this.action;
case 2:
return this.value;
case 3:
return this.typeId;
case 4:
return this.typeDefinition;
case 5:
return this.properties;
case 6:
return this.label;
case 7:
return this.info;
default:
throw new IndexOutOfBoundsException("Invalid index specified.");
}
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @param index the position of the field to set
* @param value the value to set
* @throws IndexOutOfBoundsException
*/
@Override
@SuppressWarnings("unchecked")
public void put(int index, Object value) {
switch (index) {
case 0:
this.tableName = (String) value;
break;
case 1:
this.action = (String) value;
break;
case 2:
this.value = (String) value;
break;
case 3:
this.typeId = (String) value;
break;
case 4:
this.typeDefinition = (String) value;
break;
case 5:
this.properties = (Map<String, List<String>>) value;
break;
case 6:
this.label = (String) value;
break;
case 7:
this.info = (Map<String, String>) value;
break;
default:
throw new IndexOutOfBoundsException("Invalid index specified.");
}
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if ((obj == null) || (obj.getClass() != this.getClass())) {
return false;
}
AlterTableResponse that = (AlterTableResponse) obj;
return (this.tableName.equals(that.tableName)
&& this.action.equals(that.action)
&& this.value.equals(that.value)
&& this.typeId.equals(that.typeId)
&& this.typeDefinition.equals(that.typeDefinition)
&& this.properties.equals(that.properties)
&& this.label.equals(that.label)
&& this.info.equals(that.info));
}
@Override
public String toString() {
GenericData gd = GenericData.get();
StringBuilder builder = new StringBuilder();
builder.append("{");
builder.append(gd.toString("tableName"));
builder.append(": ");
builder.append(gd.toString(this.tableName));
builder.append(", ");
builder.append(gd.toString("action"));
builder.append(": ");
builder.append(gd.toString(this.action));
builder.append(", ");
builder.append(gd.toString("value"));
builder.append(": ");
builder.append(gd.toString(this.value));
builder.append(", ");
builder.append(gd.toString("typeId"));
builder.append(": ");
builder.append(gd.toString(this.typeId));
builder.append(", ");
builder.append(gd.toString("typeDefinition"));
builder.append(": ");
builder.append(gd.toString(this.typeDefinition));
builder.append(", ");
builder.append(gd.toString("properties"));
builder.append(": ");
builder.append(gd.toString(this.properties));
builder.append(", ");
builder.append(gd.toString("label"));
builder.append(": ");
builder.append(gd.toString(this.label));
builder.append(", ");
builder.append(gd.toString("info"));
builder.append(": ");
builder.append(gd.toString(this.info));
builder.append("}");
return builder.toString();
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = (31 * hashCode) + this.tableName.hashCode();
hashCode = (31 * hashCode) + this.action.hashCode();
hashCode = (31 * hashCode) + this.value.hashCode();
hashCode = (31 * hashCode) + this.typeId.hashCode();
hashCode = (31 * hashCode) + this.typeDefinition.hashCode();
hashCode = (31 * hashCode) + this.properties.hashCode();
hashCode = (31 * hashCode) + this.label.hashCode();
hashCode = (31 * hashCode) + this.info.hashCode();
return hashCode;
}
}
| 29.550802 | 104 | 0.568494 |
86fec89b6f1efc97c79f9bf4eb58b774740fe261 | 2,053 | go | Go | skynet_go/example/client2/client2.go | yefy/skp-go | c1c4e5d74bece5d81898f16ce5d33b76989176bd | [
"MIT"
] | null | null | null | skynet_go/example/client2/client2.go | yefy/skp-go | c1c4e5d74bece5d81898f16ce5d33b76989176bd | [
"MIT"
] | null | null | null | skynet_go/example/client2/client2.go | yefy/skp-go | c1c4e5d74bece5d81898f16ce5d33b76989176bd | [
"MIT"
] | null | null | null | package main
import (
"fmt"
"runtime"
"skp-go/skynet_go/defaultConf"
log "skp-go/skynet_go/logger"
"skp-go/skynet_go/mq"
"skp-go/skynet_go/mq/client"
"skp-go/skynet_go/rpc/rpcE"
"time"
)
//taskkill /im server.test.exe /f
//taskkill /im client.test.exe /f
//go test server_test.go server.go mqMsg.pb.go vector.go
type Client2Test struct {
rpcE.ServerB
}
func (c *Client2Test) RPC_Describe() string {
return "Client2Test"
}
func (c *Client2Test) Client2TestHello(in *mq.RegisteRequest, out *mq.RegisterReply) error {
log.Fatal("Client2Test in = %+v", in)
out.Harbor = in.Harbor
return nil
}
func NewClient(index int) *client.Client {
key := fmt.Sprintf("Client2Test_%d", index)
mqClient := client.NewClient(key, ":5673")
mqClient.Subscribe(key, "*")
mqClient.RegisterServer(&Client2Test{})
mqClient.Start()
return mqClient
}
func RunClient(index int) {
var harbor int32 = 1000000000
mqClient := NewClient(index)
time.Sleep(time.Second * 5)
var timeout int = 0
for {
request := mq.RegisteRequest{}
request.Instance = "Instance"
harbor++
request.Harbor = harbor
request.Topic = "Topic"
request.Tag = "Tag"
reply := mq.RegisterReply{}
msg := client.Msg{Topic: "MqTest", Tag: "0"}
time1 := time.Now().UnixNano() / 1e6
if err := mqClient.Call(&msg, "MqTest.OnMqTestHello", &request, &reply); err != nil {
log.Fatal("error")
}
time2 := time.Now().UnixNano() / 1e6
diff := time2 - time1
//log.Fatal("diff = %d, time1 = %d, time2 = %d", diff, time1, time2)
if diff > 1000 {
log.Fatal("diff = %d > 1", diff)
timeout++
}
log.Fatal("reply.Harbor = %d, timeout = %d", reply.Harbor, timeout)
if harbor != reply.Harbor {
panic(reply.Harbor)
}
}
mqClient.Close()
}
func main() {
runtime.GOMAXPROCS(runtime.NumCPU())
defaultConf.SetDebug()
log.NewGlobalLogger("./global.log", "", log.LstdFlags, log.Ltrace, log.Lscreen)
log.Fatal("client1 start NumCPU = %d", runtime.NumCPU())
for i := 0; i < 100; i++ {
index := i
go RunClient(index)
}
wait := make(chan bool)
<-wait
}
| 22.56044 | 92 | 0.665855 |
ef05e15bf8e0fe949b9652ff9869887dd8e7736d | 5,562 | swift | Swift | Contents/UserModules/Nodes.playgroundmodule/Sources/Game.swift | benjaminhtr/WWDC21 | 589d5c889bf6f0cc88f55abe8d9a6c3de169d38e | [
"MIT"
] | 4 | 2021-04-20T14:49:17.000Z | 2021-06-01T16:34:33.000Z | Contents/UserModules/Nodes.playgroundmodule/Sources/Game.swift | benjaminhtr/WWDC21 | 589d5c889bf6f0cc88f55abe8d9a6c3de169d38e | [
"MIT"
] | null | null | null | Contents/UserModules/Nodes.playgroundmodule/Sources/Game.swift | benjaminhtr/WWDC21 | 589d5c889bf6f0cc88f55abe8d9a6c3de169d38e | [
"MIT"
] | null | null | null | import SpriteKit
import Models
import Protocols
import Extensions
public class Game: SKNode {
// game stats - single source of truth for all games
private let gameStats: GameStats
// correct traffic sign for current round
private var correctTrafficSign: TrafficSign {
didSet {
correctTrafficSignImageNode.texture = SKTexture(imageNamed: correctTrafficSign.imageName)
}
}
// current traffic signs for current round
private var currentTrafficSigns: [TrafficSign] {
didSet {
guard let randomCurrentTrafficSign = currentTrafficSigns.randomElement() else { fatalError("Could not get a random element from current traffic signs.") }
correctTrafficSign = randomCurrentTrafficSign
}
}
// user interface nodes
private var progressLabel: Label!
private var correctTrafficSignImageNode: SKSpriteNode!
private var answerPossibilities: AnswerPossibilities!
private var achievements: Achievements!
// game delegate
public weak var delegate: GameDelegate?
// init
public required init(totalRounds: Int) {
gameStats = GameStats(totalRounds: totalRounds)
// shuffle traffic signs and get first three results
let shuffledTrafficSigns = trafficSigns.shuffled()
currentTrafficSigns = Array(shuffledTrafficSigns[0..<3])
// get ramdom traffic signs from shuffled which is going to be the correct one
guard let randomCurrentTrafficSign = currentTrafficSigns.randomElement() else { fatalError("Could not get a random element from current traffic signs.") }
correctTrafficSign = randomCurrentTrafficSign
// set texture of correct traffic sign image node after its initialization
defer {
correctTrafficSignImageNode.texture = SKTexture(imageNamed: correctTrafficSign.imageName)
}
super.init()
// add progress label
progressLabel = Label(text: "Question \(gameStats.roundCount + 1) of \(gameStats.totalRounds)", font: .systemFont(ofSize: 16, weight: .bold))
progressLabel.alpha = 0.5
progressLabel.position = CGPoint(x: 0, y: 353)
addChild(progressLabel)
// add title label
let titleLabel = Label(text: "What’s the meaning of\nthis traffic sign?", font: .systemFont(ofSize: 24, weight: .bold))
titleLabel.position = CGPoint(x: 0, y: 300)
addChild(titleLabel)
// add sprite node for the current traffic sign image in order for the user to know what answer to click
correctTrafficSignImageNode = SKSpriteNode()
correctTrafficSignImageNode.size = CGSize(width: 226, height: 169)
correctTrafficSignImageNode.position = CGPoint(x: 0, y: 155)
addChild(correctTrafficSignImageNode)
// add the answer possibilities after traffic signs have been initialized
answerPossibilities = AnswerPossibilities(correctTrafficSignId: correctTrafficSign.id, currentTrafficSigns: currentTrafficSigns)
addChild(answerPossibilities)
// add achievements node to inform the user about their progress in the current game
achievements = Achievements()
achievements.position = CGPoint(x: 0, y: -325)
addChild(achievements)
// set up observers triggering a method to handle tapping on an answer from answer possibilities
NotificationCenter.default.addObserver(self, selector: #selector(answerDidChoose), name: .answerNode, object: nil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// start new game
public func startNewGame() {
alpha = 1
gameStats.newGame()
achievements.update(gameStats)
progressLabel.setText("Question \(gameStats.roundCount + 1) of \(gameStats.totalRounds)")
handleNewRound()
}
// hide the node
public func hide() {
alpha = 0
}
// handle tapping on an answer from answer possibilities
@objc private func answerDidChoose(_ notification: NSNotification) {
if let id = notification.userInfo?["id"] as? Int, id == correctTrafficSign.id {
gameStats.starsCount += 1
achievements.update(gameStats)
let sound = SKAction.playSoundFileNamed("correct.mp3", waitForCompletion: false)
run(sound)
} else {
let sound = SKAction.playSoundFileNamed("bonk.mp3", waitForCompletion: false)
run(sound)
}
gameStats.newRound()
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
self.handleNewRound()
}
}
// fire delegate method if total rounds have been reached, otherwise start a new round
private func handleNewRound() {
progressLabel.setText("Question \(gameStats.roundCount + 1) of \(gameStats.totalRounds)")
if gameStats.roundCount == gameStats.totalRounds {
delegate?.endGame(gameStats)
} else {
let shuffledTrafficSigns = trafficSigns.shuffled()
let newTrafficSigns = Array(shuffledTrafficSigns[0..<3])
currentTrafficSigns = newTrafficSigns
answerPossibilities.startNewRound(correctTrafficSignId: correctTrafficSign.id, newTrafficSigns: newTrafficSigns)
}
}
}
| 38.09589 | 166 | 0.65552 |
8734f513516e5f24f71f1d3f4681ea127ccb2016 | 356,927 | sql | SQL | db/structure.sql | consected/restructure | 79262471f5be6fe41b43429e1dcbe53c7beeed3d | [
"BSD-3-Clause"
] | 4 | 2021-01-20T19:20:40.000Z | 2021-11-08T16:50:14.000Z | db/structure.sql | consected/restructure | 79262471f5be6fe41b43429e1dcbe53c7beeed3d | [
"BSD-3-Clause"
] | 2 | 2021-01-07T09:03:40.000Z | 2021-02-09T10:53:55.000Z | db/structure.sql | consected/restructure | 79262471f5be6fe41b43429e1dcbe53c7beeed3d | [
"BSD-3-Clause"
] | 1 | 2021-11-01T19:58:08.000Z | 2021-11-01T19:58:08.000Z | SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: ml_app; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA ml_app;
--
-- Name: ref_data; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA ref_data;
--
-- Name: add_study_update_entry(integer, character varying, character varying, date, character varying, integer, integer, character varying); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.add_study_update_entry(master_id integer, update_type character varying, update_name character varying, event_date date, update_notes character varying, user_id integer, item_id integer, item_type character varying) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
new_tracker_id integer;
protocol_record RECORD;
BEGIN
SELECT add_tracker_entry_by_name(master_id, 'Updates', 'record updates', (update_type || ' ' || update_name), event_date, update_notes, user_id, item_id, item_type) into new_tracker_id;
/*
SELECT p.id protocol_id, sp.id sub_process_id, pe.id protocol_event_id
INTO protocol_record
FROM protocol_events pe
INNER JOIN sub_processes sp on pe.sub_process_id = sp.id
INNER JOIN protocols p on sp.protocol_id = p.id
WHERE p.name = 'Updates'
AND sp.name = 'record updates'
AND pe.name = (update_type || ' ' || update_name)
AND (p.disabled IS NULL or p.disabled = FALSE) AND (sp.disabled IS NULL or sp.disabled = FALSE) AND (pe.disabled IS NULL or pe.disabled = FALSE);
IF NOT FOUND THEN
RAISE EXCEPTION 'Nonexistent protocol record --> %', (update_type || ' ' || update_name );
ELSE
INSERT INTO trackers
(master_id, protocol_id, sub_process_id, protocol_event_id, item_type, item_id, user_id, event_date, updated_at, created_at, notes)
VALUES
(master_id, protocol_record.protocol_id, protocol_record.sub_process_id, protocol_record.protocol_event_id,
item_type, item_id, user_id, now(), now(), now(), update_notes);
RETURN new_tracker_id;
END IF;
*/
RETURN new_tracker_id;
END;
$$;
--
-- Name: add_tracker_entry_by_name(integer, character varying, character varying, character varying, character varying, integer, integer, character varying); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.add_tracker_entry_by_name(master_id integer, protocol_name character varying, sub_process_name character varying, protocol_event_name character varying, set_notes character varying, user_id integer, item_id integer, item_type character varying) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
new_tracker_id integer;
protocol_record RECORD;
BEGIN
SELECT p.id protocol_id, sp.id sub_process_id, pe.id protocol_event_id
INTO protocol_record
FROM protocol_events pe
INNER JOIN sub_processes sp on pe.sub_process_id = sp.id
INNER JOIN protocols p on sp.protocol_id = p.id
WHERE p.name = protocol_name
AND sp.name = sub_process_name
AND pe.name = protocol_event_name
AND (p.disabled IS NULL or p.disabled = FALSE) AND (sp.disabled IS NULL or sp.disabled = FALSE) AND (pe.disabled IS NULL or pe.disabled = FALSE);
IF NOT FOUND THEN
RAISE EXCEPTION 'Nonexistent protocol record --> %', (protocol_name || ' ' || sub_process_name || ' ' || protocol_event_name);
ELSE
INSERT INTO trackers
(master_id, protocol_id, sub_process_id, protocol_event_id, item_type, item_id, user_id, event_date, updated_at, created_at, notes)
VALUES
(master_id, protocol_record.protocol_id, protocol_record.sub_process_id, protocol_record.protocol_event_id,
item_type, item_id, user_id, now(), now(), now(), set_notes);
RETURN new_tracker_id;
END IF;
END;
$$;
--
-- Name: add_tracker_entry_by_name(integer, character varying, character varying, character varying, date, character varying, integer, integer, character varying); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.add_tracker_entry_by_name(master_id integer, protocol_name character varying, sub_process_name character varying, protocol_event_name character varying, event_date date, set_notes character varying, user_id integer, item_id integer, item_type character varying) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
new_tracker_id integer;
protocol_record RECORD;
BEGIN
SELECT p.id protocol_id, sp.id sub_process_id, pe.id protocol_event_id
INTO protocol_record
FROM protocol_events pe
INNER JOIN sub_processes sp on pe.sub_process_id = sp.id
INNER JOIN protocols p on sp.protocol_id = p.id
WHERE lower(p.name) = lower(protocol_name)
AND lower(sp.name) = lower(sub_process_name)
AND lower(pe.name) = lower(protocol_event_name)
AND (p.disabled IS NULL or p.disabled = FALSE) AND (sp.disabled IS NULL or sp.disabled = FALSE) AND (pe.disabled IS NULL or pe.disabled = FALSE);
IF NOT FOUND THEN
RAISE EXCEPTION 'Nonexistent protocol record --> %', (protocol_name || ' ' || sub_process_name || ' ' || protocol_event_name);
ELSE
INSERT INTO trackers
(master_id, protocol_id, sub_process_id, protocol_event_id, item_type, item_id, user_id, event_date, updated_at, created_at, notes)
VALUES
(master_id, protocol_record.protocol_id, protocol_record.sub_process_id, protocol_record.protocol_event_id,
item_type, item_id, user_id, now(), now(), now(), set_notes);
RETURN new_tracker_id;
END IF;
END;
$$;
--
-- Name: assign_sage_ids_to_players(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.assign_sage_ids_to_players() RETURNS record
LANGUAGE plpgsql
AS $$
DECLARE
min_sa integer;
max_sa integer;
res record;
BEGIN
-- update the precreated Sage ID records with the master_id from the player info, based on matching ID.
-- apply an offset here if the Sage ID does not start at zero
-- find the first unassigned Sage ID
select min(id) into min_sa from sage_assignments where master_id is null;
-- update the sage assignments in a block starting from the minimum unassigned ID
update sage_assignments sa set master_id = (select master_id from temp_pit where id = sa.id - min_sa) where sa.master_id is null and sa.id >= min_sa;
-- get the max value to return the results
select max(id) into max_sa from sage_assignments where master_id is not null;
select min_sa, max_sa into res;
return res;
END;
$$;
--
-- Name: create_message_notification_email(character varying, character varying, character varying, json, character varying[], character varying, timestamp without time zone); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.create_message_notification_email(layout_template_name character varying, content_template_name character varying, subject character varying, data json, recipient_emails character varying[], from_user_email character varying, run_at timestamp without time zone DEFAULT NULL::timestamp without time zone) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
last_id INTEGER;
BEGIN
IF run_at IS NULL THEN
run_at := now();
END IF;
INSERT INTO ml_app.message_notifications
(
message_type,
created_at,
updated_at,
layout_template_name,
content_template_name,
subject,
data,
recipient_emails,
from_user_email
)
VALUES
(
'email',
now(),
now(),
layout_template_name,
content_template_name,
subject,
data,
recipient_emails,
from_user_email
)
RETURNING id
INTO last_id
;
SELECT create_message_notification_job(last_id, run_at)
INTO last_id
;
RETURN last_id;
END;
$$;
--
-- Name: create_message_notification_email(integer, integer, integer, character varying, integer, integer[], character varying, character varying, character varying, timestamp without time zone); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.create_message_notification_email(app_type_id integer, master_id integer, item_id integer, item_type character varying, user_id integer, recipient_user_ids integer[], layout_template_name character varying, content_template_name character varying, subject character varying, run_at timestamp without time zone DEFAULT NULL::timestamp without time zone) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
last_id INTEGER;
BEGIN
IF run_at IS NULL THEN
run_at := now();
END IF;
INSERT INTO ml_app.message_notifications
(
subject,
app_type_id,
user_id,
recipient_user_ids,
layout_template_name,
content_template_name,
item_type,
item_id,
master_id,
message_type,
created_at,
updated_at
)
VALUES
(
subject,
app_type_id,
user_id,
recipient_user_ids,
layout_template_name,
content_template_name,
item_type,
item_id,
master_id,
'email',
now(),
now()
)
RETURNING id
INTO last_id
;
SELECT create_message_notification_job(last_id, run_at)
INTO last_id
;
RETURN last_id;
END;
$$;
--
-- Name: create_message_notification_job(integer, timestamp without time zone); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.create_message_notification_job(message_notification_id integer, run_at timestamp without time zone DEFAULT NULL::timestamp without time zone) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
last_id INTEGER;
BEGIN
IF run_at IS NULL THEN
run_at := now();
END IF;
INSERT INTO ml_app.delayed_jobs
(
priority,
attempts,
handler,
run_at,
queue,
created_at,
updated_at
)
VALUES
(
0,
0,
'--- !ruby/object:ActiveJob::QueueAdapters::DelayedJobAdapter::JobWrapper
job_data:
job_class: HandleMessageNotificationJob
job_id: ' || gen_random_uuid() || '
queue_name: default
arguments:
- _aj_globalid: gid://fpa1/MessageNotification/' || message_notification_id::varchar || '
locale: :en',
run_at,
'default',
now(),
now()
)
RETURNING id
INTO last_id
;
RETURN last_id;
END;
$$;
--
-- Name: current_user_id(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.current_user_id() RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
user_id integer;
BEGIN
user_id := (select id from users where email = current_user limit 1);
return user_id;
END;
$$;
SET default_tablespace = '';
SET default_with_oids = false;
--
-- Name: nfs_store_archived_files; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_archived_files (
id integer NOT NULL,
file_hash character varying,
file_name character varying NOT NULL,
content_type character varying NOT NULL,
archive_file character varying NOT NULL,
path character varying NOT NULL,
file_size bigint NOT NULL,
file_updated_at timestamp without time zone,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
nfs_store_container_id integer,
user_id integer,
title character varying,
description character varying,
nfs_store_stored_file_id integer,
file_metadata jsonb
);
--
-- Name: nfs_store_stored_files; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_stored_files (
id integer NOT NULL,
file_hash character varying NOT NULL,
file_name character varying NOT NULL,
content_type character varying NOT NULL,
file_size bigint NOT NULL,
path character varying,
file_updated_at timestamp without time zone,
user_id integer,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
title character varying,
description character varying,
last_process_name_run character varying,
file_metadata jsonb
);
--
-- Name: filestore_report_file_path(ml_app.nfs_store_stored_files, ml_app.nfs_store_archived_files); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.filestore_report_file_path(sf ml_app.nfs_store_stored_files, af ml_app.nfs_store_archived_files) RETURNS character varying
LANGUAGE plpgsql
AS $$
BEGIN
return CASE WHEN af.id IS NOT NULL THEN
coalesce(sf.path, '') || '/' || sf.file_name || '/' || af.path
ELSE sf.path
END;
END;
$$;
--
-- Name: filestore_report_full_file_path(ml_app.nfs_store_stored_files, ml_app.nfs_store_archived_files); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.filestore_report_full_file_path(sf ml_app.nfs_store_stored_files, af ml_app.nfs_store_archived_files) RETURNS character varying
LANGUAGE plpgsql
AS $$
BEGIN
return CASE WHEN af.id IS NOT NULL THEN
coalesce(sf.path, '') || '/' || sf.file_name || '/' || af.path || '/' || af.file_name
ELSE coalesce(sf.path, '') || '/' || sf.file_name
END;
END;
$$;
--
-- Name: filestore_report_perform_action(integer, character varying, integer, ml_app.nfs_store_stored_files, ml_app.nfs_store_archived_files); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.filestore_report_perform_action(cid integer, altype character varying, alid integer, sf ml_app.nfs_store_stored_files, af ml_app.nfs_store_archived_files) RETURNS jsonb
LANGUAGE plpgsql
AS $$
DECLARE
jo jsonb;
rt varchar;
fn varchar;
alt varchar;
BEGIN
rt := '"' || (CASE WHEN af.id IS NOT NULL THEN 'archived_file' ELSE 'stored_file' END) || '"';
fn := '"' || (CASE WHEN af.id IS NOT NULL THEN af.file_name ELSE sf.file_name END) || '"';
alt := '"' || altype || '"';
jo := '{}';
jo := jsonb_set(jo, '{perform_action}', '"/nfs_store/downloads/!container_id"');
jo := jsonb_set(jo, '{container_id}', cid::varchar::jsonb);
jo := jsonb_set(jo, '{download_id}', coalesce(af.id, sf.id)::varchar::jsonb);
jo := jsonb_set(jo, '{activity_log_type}', alt::jsonb);
jo := jsonb_set(jo, '{activity_log_id}', alid::varchar::jsonb);
jo := jsonb_set(jo, '{retrieval_type}', rt::jsonb );
jo := jsonb_set(jo, '{label}', fn::jsonb);
return jo;
END;
$$;
--
-- Name: filestore_report_select_fields(integer, character varying, integer, integer, integer); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.filestore_report_select_fields(cid integer, altype character varying, alid integer, sfid integer, afid integer) RETURNS jsonb
LANGUAGE plpgsql
AS $$
DECLARE
jo jsonb;
joid jsonb;
rt varchar;
alt varchar;
BEGIN
rt := '"' || CASE WHEN afid IS NOT NULL THEN 'archived_file' ELSE 'stored_file' END || '"';
alt := '"' || altype || '"';
joid := '{}'::jsonb;
joid := jsonb_set(joid, '{id}', coalesce(afid, sfid)::varchar::jsonb);
joid := jsonb_set(joid, '{retrieval_type}', rt::jsonb );
joid := jsonb_set(joid, '{container_id}', cid::varchar::jsonb);
joid := jsonb_set(joid, '{activity_log_type}', alt::jsonb);
joid := jsonb_set(joid, '{activity_log_id}', alid::varchar::jsonb);
jo := '{}'::jsonb;
jo := jsonb_set(jo, '{field_name}', '"nfs_store_download[selected_items][]"');
jo := jsonb_set(jo, '{value}', joid);
return jo;
END;
$$;
--
-- Name: format_update_notes(character varying, character varying, character varying); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.format_update_notes(field_name character varying, old_val character varying, new_val character varying) RETURNS character varying
LANGUAGE plpgsql
AS $$
DECLARE
res VARCHAR;
BEGIN
res := '';
old_val := lower(coalesce(old_val, '-')::varchar);
new_val := lower(coalesce(new_val, '')::varchar);
IF old_val <> new_val THEN
res := field_name;
IF old_val <> '-' THEN
res := res || ' from ' || old_val ;
END IF;
res := res || ' to ' || new_val || '; ';
END IF;
RETURN res;
END;
$$;
--
-- Name: handle_address_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.handle_address_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW.street := lower(NEW.street);
NEW.street2 := lower(NEW.street2);
NEW.street3 := lower(NEW.street3);
NEW.city := lower(NEW.city);
NEW.state := lower(NEW.state);
NEW.zip := lower(NEW.zip);
NEW.country := lower(NEW.country);
NEW.postal_code := lower(NEW.postal_code);
NEW.region := lower(NEW.region);
NEW.source := lower(NEW.source);
RETURN NEW;
END;
$$;
--
-- Name: handle_delete(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.handle_delete() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
latest_tracker tracker_history%ROWTYPE;
BEGIN
-- Find the most recent remaining item in tracker_history for the master/protocol pair,
-- now that the target record has been deleted.
-- tracker_id is the foreign key onto the trackers table master/protocol record.
SELECT * INTO latest_tracker
FROM tracker_history
WHERE tracker_id = OLD.tracker_id
ORDER BY event_date DESC NULLS last, updated_at DESC NULLS last LIMIT 1;
IF NOT FOUND THEN
-- No record was found in tracker_history for the master/protocol pair.
-- Therefore there should be no corresponding trackers record either. Delete it.
DELETE FROM trackers WHERE trackers.id = OLD.tracker_id;
ELSE
-- A record was found in tracker_history. Since it is the latest one for the master/protocol pair,
-- just go ahead and update the corresponding record in trackers.
UPDATE trackers
SET
event_date = latest_tracker.event_date,
sub_process_id = latest_tracker.sub_process_id,
protocol_event_id = latest_tracker.protocol_event_id,
item_id = latest_tracker.item_id,
item_type = latest_tracker.item_type,
updated_at = latest_tracker.updated_at,
notes = latest_tracker.notes,
user_id = latest_tracker.user_id
WHERE trackers.id = OLD.tracker_id;
END IF;
RETURN OLD;
END
$$;
--
-- Name: handle_player_contact_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.handle_player_contact_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW.rec_type := lower(NEW.rec_type);
NEW.data := lower(NEW.data);
NEW.source := lower(NEW.source);
RETURN NEW;
END;
$$;
--
-- Name: handle_player_info_before_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.handle_player_info_before_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW.first_name := lower(NEW.first_name);
NEW.last_name := lower(NEW.last_name);
NEW.middle_name := lower(NEW.middle_name);
NEW.nick_name := lower(NEW.nick_name);
NEW.college := lower(NEW.college);
NEW.source := lower(NEW.source);
RETURN NEW;
END;
$$;
--
-- Name: handle_rc_cis_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.handle_rc_cis_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
new_master_id integer;
new_msid integer;
updated_item_id integer;
register_tracker boolean;
update_notes VARCHAR;
event_date DATE;
track_p varchar;
track_sp varchar;
track_pe varchar;
res_status varchar;
BEGIN
track_p := 'study';
track_sp := 'CIS-received';
track_pe := 'REDCap';
register_tracker := FALSE;
update_notes := '';
res_status := NEW.status;
event_date := NEW.time_stamp::date;
IF coalesce(NEW.status,'') <> '' THEN
IF NEW.status = 'create master' THEN
IF NEW.master_id IS NOT NULL THEN
RAISE EXCEPTION 'Can not create a master when the master ID is already set. Review the linked Master record, or to create a new Master record clear the master_id first and try again.';
END IF;
SELECT MAX(msid) + 1 INTO new_msid FROM masters;
INSERT INTO masters
(msid, created_at, updated_at, user_id)
VALUES
(new_msid, now(), now(), NEW.user_id)
RETURNING id INTO new_master_id;
INSERT INTO player_infos
(master_id, first_name, last_name, source, created_at, updated_at, user_id)
VALUES
(new_master_id, NEW.first_name, NEW.last_name, 'cis-redcap', now(), now(), NEW.user_id);
register_tracker := TRUE;
ELSE
SELECT id INTO new_master_id FROM masters WHERE id = NEW.master_id;
END IF;
IF NEW.status = 'update name' OR NEW.status = 'update all' OR NEW.status = 'create master' THEN
IF new_master_id IS NULL THEN
RAISE EXCEPTION 'Must set a master ID to %', NEW.status;
END IF;
SELECT format_update_notes('first name', first_name, NEW.first_name) ||
format_update_notes('last name', last_name, NEW.last_name) ||
format_update_notes('middle name', middle_name, NEW.middle_name) ||
format_update_notes('nick name', nick_name, NEW.nick_name)
INTO update_notes
FROM player_infos
WHERE master_id = new_master_id order by rank desc limit 1;
UPDATE player_infos SET
master_id = new_master_id, first_name = NEW.first_name, last_name = NEW.last_name,
middle_name = NEW.middle_name, nick_name = NEW.nick_name,
source = 'cis-redcap', created_at = now(), updated_at = now(), user_id = NEW.user_id
WHERE master_id = new_master_id
RETURNING id INTO updated_item_id;
PERFORM add_study_update_entry(new_master_id, 'updated', 'player info', event_date, update_notes, NEW.user_id, updated_item_id, 'PlayerInfo');
register_tracker := TRUE;
res_status := 'updated name';
END IF;
IF NEW.status = 'update address' OR NEW.status = 'update all' OR NEW.status = 'create master' THEN
IF new_master_id IS NULL THEN
RAISE EXCEPTION 'Must set a master ID to %', NEW.status;
END IF;
IF NEW.street IS NOT NULL AND trim(NEW.street) <> '' OR
NEW.state IS NOT NULL AND trim(NEW.state) <> '' OR
NEW.zipcode IS NOT NULL AND trim(NEW.zipcode) <> '' THEN
SELECT format_update_notes('street', NULL, NEW.street) ||
format_update_notes('street2', NULL, NEW.street2) ||
format_update_notes('city', NULL, NEW.city) ||
format_update_notes('state', NULL, NEW.state) ||
format_update_notes('zip', NULL, NEW.zipcode)
INTO update_notes;
-- FROM addresses
-- WHERE master_id = new_master_id;
INSERT INTO addresses
(master_id, street, street2, city, state, zip, source, rank, created_at, updated_at, user_id)
VALUES
(new_master_id, NEW.street, NEW.street2, NEW.city, NEW.state, NEW.zipcode, 'cis-redcap', 10, now(), now(), NEW.user_id)
RETURNING id INTO updated_item_id;
PERFORM update_address_ranks(new_master_id);
PERFORM add_study_update_entry(new_master_id, 'updated', 'address', event_date, update_notes, NEW.user_id, updated_item_id, 'Address');
register_tracker := TRUE;
res_status := 'updated address';
ELSE
res_status := 'address not updated - details blank';
END IF;
END IF;
IF NEW.status = 'update email' OR NEW.status = 'update all' OR NEW.status = 'create master' THEN
IF new_master_id IS NULL THEN
RAISE EXCEPTION 'Must set a master ID to %', NEW.status;
END IF;
IF NEW.email IS NOT NULL AND trim(NEW.email) <> '' THEN
SELECT format_update_notes('data', NULL, NEW.email)
INTO update_notes;
INSERT INTO player_contacts
(master_id, data, rec_type, source, rank, created_at, updated_at, user_id)
VALUES
(new_master_id, NEW.email, 'email', 'cis-redcap', 10, now(), now(), NEW.user_id)
RETURNING id INTO updated_item_id;
PERFORM update_player_contact_ranks(new_master_id, 'email');
PERFORM add_study_update_entry(new_master_id, 'updated', 'player contact', event_date, update_notes, NEW.user_id, updated_item_id, 'PlayerContact');
register_tracker := TRUE;
res_status := 'updated email';
ELSE
res_status := 'email not updated - details blank';
END IF;
END IF;
IF NEW.status = 'update phone' OR NEW.status = 'update all' OR NEW.status = 'create master' THEN
IF new_master_id IS NULL THEN
RAISE EXCEPTION 'Must set a master ID to %', NEW.status;
END IF;
IF NEW.phone IS NOT NULL AND trim(NEW.phone) <> '' THEN
SELECT format_update_notes('data', NULL, NEW.phone)
INTO update_notes;
INSERT INTO player_contacts
(master_id, data, rec_type, source, rank, created_at, updated_at, user_id)
VALUES
(new_master_id, NEW.phone, 'phone', 'cis-redcap', 10, now(), now(), NEW.user_id)
RETURNING id INTO updated_item_id;
PERFORM update_player_contact_ranks(new_master_id, 'phone');
PERFORM add_study_update_entry(new_master_id, 'updated', 'player contact', event_date, update_notes, NEW.user_id, updated_item_id, 'PlayerContact');
register_tracker := TRUE;
res_status := 'updated phone';
ELSE
res_status := 'phone not updated - details blank';
END IF;
END IF;
CASE
WHEN NEW.status = 'create master' THEN
res_status := 'created master';
WHEN NEW.status = 'update all' THEN
res_status := 'updated all';
ELSE
END CASE;
-- the master_id was set and an action performed. Register the tracker event
IF coalesce(NEW.added_tracker, FALSE) = FALSE AND new_master_id IS NOT NULL AND register_tracker THEN
PERFORM add_tracker_entry_by_name(new_master_id, track_p, track_sp, track_pe, OLD.time_stamp::date, ('Heard about: ' || coalesce(OLD.hearabout, '(not set)') || E'
Submitted by REDCap ID '|| OLD.redcap_survey_identifier), NEW.user_id, NULL, NULL);
NEW.added_tracker = TRUE;
END IF;
NEW.master_id := new_master_id;
NEW.updated_at := now();
NEW.status := res_status;
END IF;
RETURN NEW;
END;
$$;
--
-- Name: handle_tracker_history_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.handle_tracker_history_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
DELETE FROM tracker_history WHERE id = OLD.id;
INSERT INTO trackers
(master_id, protocol_id,
protocol_event_id, event_date, sub_process_id, notes,
item_id, item_type,
created_at, updated_at, user_id)
SELECT NEW.master_id, NEW.protocol_id,
NEW.protocol_event_id, NEW.event_date,
NEW.sub_process_id, NEW.notes,
NEW.item_id, NEW.item_type,
NEW.created_at, NEW.updated_at, NEW.user_id ;
RETURN NULL;
END;
$$;
--
-- Name: log_accuracy_score_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_accuracy_score_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO accuracy_score_history
(
accuracy_score_id,
name ,
value ,
created_at ,
updated_at ,
disabled ,
admin_id
)
SELECT
NEW.id,
NEW.name ,
NEW.value ,
NEW.created_at ,
NEW.updated_at ,
NEW.disabled ,
NEW.admin_id
;
RETURN NEW;
END;
$$;
--
-- Name: log_activity_log_player_contact_phone_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_activity_log_player_contact_phone_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO activity_log_player_contact_phone_history
(
master_id,
player_contact_id,
data,
select_call_direction,
select_who,
called_when,
select_result,
select_next_step,
follow_up_when,
notes,
protocol_id,
set_related_player_contact_rank,
extra_log_type,
user_id,
created_at,
updated_at,
activity_log_player_contact_phone_id
)
SELECT
NEW.master_id,
NEW.player_contact_id,
NEW.data,
NEW.select_call_direction,
NEW.select_who,
NEW.called_when,
NEW.select_result,
NEW.select_next_step,
NEW.follow_up_when,
NEW.notes,
NEW.protocol_id,
NEW.set_related_player_contact_rank,
NEW.extra_log_type,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_activity_log_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_activity_log_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO activity_log_history
(
name,
activity_log_id,
admin_id,
created_at,
updated_at,
item_type,
rec_type,
disabled,
action_when_attribute,
field_list,
blank_log_field_list,
blank_log_name,
extra_log_types,
hide_item_list_panel,
main_log_name,
process_name,
table_name,
category,
schema_name
)
SELECT
NEW.name,
NEW.id,
NEW.admin_id,
NEW.created_at,
NEW.updated_at,
NEW.item_type,
NEW.rec_type,
NEW.disabled,
NEW.action_when_attribute,
NEW.field_list,
NEW.blank_log_field_list,
NEW.blank_log_name,
NEW.extra_log_types,
NEW.hide_item_list_panel,
NEW.main_log_name,
NEW.process_name,
NEW.table_name,
NEW.category,
NEW.schema_name
;
RETURN NEW;
END;
$$;
--
-- Name: log_address_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_address_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO address_history
(
master_id,
street,
street2,
street3,
city,
state,
zip,
source,
rank,
rec_type,
user_id,
created_at,
updated_at,
country,
postal_code,
region,
address_id
)
SELECT
NEW.master_id,
NEW.street,
NEW.street2,
NEW.street3,
NEW.city,
NEW.state,
NEW.zip,
NEW.source,
NEW.rank,
NEW.rec_type,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.country,
NEW.postal_code,
NEW.region,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_admin_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_admin_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO admin_history
(
admin_id,
email,
encrypted_password,
sign_in_count,
current_sign_in_at,
last_sign_in_at,
current_sign_in_ip ,
last_sign_in_ip ,
created_at ,
updated_at,
failed_attempts,
unlock_token,
locked_at,
disabled,
encrypted_otp_secret,
encrypted_otp_secret_iv,
encrypted_otp_secret_salt,
consumed_timestep,
otp_required_for_login,
reset_password_sent_at,
password_updated_at,
updated_by_admin_id
)
SELECT
NEW.id,
NEW.email,
NEW.encrypted_password,
NEW.sign_in_count,
NEW.current_sign_in_at,
NEW.last_sign_in_at,
NEW.current_sign_in_ip ,
NEW.last_sign_in_ip ,
NEW.created_at ,
NEW.updated_at,
NEW.failed_attempts,
NEW.unlock_token,
NEW.locked_at,
NEW.disabled,
NEW.encrypted_otp_secret,
NEW.encrypted_otp_secret_iv,
NEW.encrypted_otp_secret_salt,
NEW.consumed_timestep,
NEW.otp_required_for_login,
NEW.reset_password_sent_at,
NEW.password_updated_at,
NEW.admin_id
;
RETURN NEW;
END;
$$;
--
-- Name: log_app_configuration_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_app_configuration_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO app_configuration_history
(
name,
value,
app_type_id,
user_id,
role_name,
admin_id,
disabled,
created_at,
updated_at,
app_configuration_id
)
SELECT
NEW.name,
NEW.value,
NEW.app_type_id,
NEW.user_id,
NEW.role_name,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_app_type_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_app_type_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO app_type_history
(
name,
label,
admin_id,
disabled,
created_at,
updated_at,
app_type_id
)
SELECT
NEW.name,
NEW.label,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_college_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_college_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO college_history
(
college_id,
name ,
synonym_for_id,
created_at ,
updated_at ,
disabled ,
admin_id,
user_id
)
SELECT
NEW.id,
NEW.name ,
NEW.synonym_for_id ,
NEW.created_at ,
NEW.updated_at ,
NEW.disabled ,
NEW.admin_id,
NEW.user_id
;
RETURN NEW;
END;
$$;
--
-- Name: log_config_library_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_config_library_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO config_library_history
(
config_library_id,
category,
name,
options,
format,
disabled,
admin_id,
updated_at,
created_at
)
SELECT
NEW.id,
NEW.category,
NEW.name,
NEW.options,
NEW.format,
NEW.disabled,
NEW.admin_id,
NEW.updated_at,
NEW.created_at
;
RETURN NEW;
END;
$$;
--
-- Name: log_dynamic_model_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_dynamic_model_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO dynamic_model_history
(
name,
table_name,
schema_name,
primary_key_name,
foreign_key_name,
description,
position,
category,
table_key_name,
field_list,
result_order,
options,
admin_id,
disabled,
created_at,
updated_at,
dynamic_model_id
)
SELECT
NEW.name,
NEW.table_name,
NEW.schema_name,
NEW.primary_key_name,
NEW.foreign_key_name,
NEW.description,
NEW.position,
NEW.category,
NEW.table_key_name,
NEW.field_list,
NEW.result_order,
NEW.options,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_external_identifier_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_external_identifier_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO external_identifier_history
(
name,
label,
external_id_attribute,
external_id_view_formatter,
external_id_edit_pattern,
prevent_edit,
pregenerate_ids,
min_id,
max_id,
alphanumeric,
extra_fields,
admin_id,
disabled,
created_at,
updated_at,
external_identifier_id,
schema_name,
options
)
SELECT
NEW.name,
NEW.label,
NEW.external_id_attribute,
NEW.external_id_view_formatter,
NEW.external_id_edit_pattern,
NEW.prevent_edit,
NEW.pregenerate_ids,
NEW.min_id,
NEW.max_id,
NEW.alphanumeric,
NEW.extra_fields,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id,
NEW.schema_name,
NEW.options
;
RETURN NEW;
END;
$$;
--
-- Name: log_external_link_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_external_link_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO external_link_history
(
external_link_id,
name,
value,
admin_id,
disabled,
created_at,
updated_at
)
SELECT
NEW.id,
NEW.name,
NEW.value,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at
;
RETURN NEW;
END;
$$;
--
-- Name: log_general_selection_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_general_selection_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO general_selection_history
(
general_selection_id,
name ,
value ,
item_type ,
created_at ,
updated_at ,
disabled ,
admin_id ,
create_with ,
edit_if_set ,
edit_always ,
position ,
description ,
lock
)
SELECT
NEW.id,
NEW.name ,
NEW.value ,
NEW.item_type ,
NEW.created_at ,
NEW.updated_at ,
NEW.disabled ,
NEW.admin_id ,
NEW.create_with ,
NEW.edit_if_set ,
NEW.edit_always ,
NEW.position "position",
NEW.description ,
NEW.lock
;
RETURN NEW;
END;
$$;
--
-- Name: log_item_flag_name_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_item_flag_name_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO item_flag_name_history
(
item_flag_name_id,
name ,
item_type,
created_at ,
updated_at ,
disabled ,
admin_id
)
SELECT
NEW.id,
NEW.name ,
NEW.item_type ,
NEW.created_at ,
NEW.updated_at ,
NEW.disabled ,
NEW.admin_id
;
RETURN NEW;
END;
$$;
--
-- Name: log_item_flag_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_item_flag_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO item_flag_history
(
item_flag_id,
item_id ,
item_type,
item_flag_name_id,
created_at ,
updated_at ,
user_id ,
disabled
)
SELECT
NEW.id,
NEW.item_id ,
NEW.item_type,
NEW.item_flag_name_id,
NEW.created_at ,
NEW.updated_at ,
NEW.user_id ,
NEW.disabled
;
RETURN NEW;
END;
$$;
--
-- Name: log_message_template_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_message_template_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO message_template_history
(
name,
template_type,
message_type,
template,
category,
admin_id,
disabled,
created_at,
updated_at,
message_template_id
)
SELECT
NEW.name,
NEW.template_type,
NEW.message_type,
NEW.template,
NEW.category,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_nfs_store_archived_file_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_nfs_store_archived_file_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO nfs_store_archived_file_history
(
file_hash,
file_name,
content_type,
archive_file,
path,
file_size,
file_updated_at,
nfs_store_container_id,
title,
description,
file_metadata,
nfs_store_stored_file_id,
user_id,
created_at,
updated_at,
nfs_store_archived_file_id
)
SELECT
NEW.file_hash,
NEW.file_name,
NEW.content_type,
NEW.archive_file,
NEW.path,
NEW.file_size,
NEW.file_updated_at,
NEW.nfs_store_container_id,
NEW.title,
NEW.description,
NEW.file_metadata,
NEW.nfs_store_stored_file_id,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_nfs_store_container_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_nfs_store_container_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO nfs_store_container_history
(
master_id,
name,
app_type_id,
orig_nfs_store_container_id,
user_id,
created_at,
updated_at,
nfs_store_container_id
)
SELECT
NEW.master_id,
NEW.name,
NEW.app_type_id,
NEW.nfs_store_container_id,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_nfs_store_filter_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_nfs_store_filter_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO nfs_store_filter_history
(
app_type_id,
role_name,
user_id,
resource_name,
filter,
description,
admin_id,
disabled,
created_at,
updated_at,
nfs_store_filter_id
)
SELECT
NEW.app_type_id,
NEW.role_name,
NEW.user_id,
NEW.resource_name,
NEW.filter,
NEW.description,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_nfs_store_stored_file_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_nfs_store_stored_file_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO nfs_store_stored_file_history
(
file_hash,
file_name,
content_type,
path,
file_size,
file_updated_at,
nfs_store_container_id,
title,
description,
file_metadata,
last_process_name_run,
user_id,
created_at,
updated_at,
nfs_store_stored_file_id
)
SELECT
NEW.file_hash,
NEW.file_name,
NEW.content_type,
NEW.path,
NEW.file_size,
NEW.file_updated_at,
NEW.nfs_store_container_id,
NEW.title,
NEW.description,
NEW.file_metadata,
NEW.last_process_name_run,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_page_layout_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_page_layout_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO page_layout_history
(
page_layout_id,
app_type_id,
layout_name,
panel_name,
panel_label,
panel_position,
options,
disabled,
admin_id,
created_at,
updated_at,
description
)
SELECT
NEW.id,
NEW.app_type_id,
NEW.layout_name,
NEW.panel_name,
NEW.panel_label,
NEW.panel_position,
NEW.options,
NEW.disabled,
NEW.admin_id,
NEW.created_at,
NEW.updated_at,
NEW.description
;
RETURN NEW;
END;
$$;
--
-- Name: log_player_contact_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_player_contact_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO player_contact_history
(
player_contact_id,
master_id,
rec_type,
data,
source,
rank,
user_id,
created_at,
updated_at
)
SELECT
NEW.id,
NEW.master_id,
NEW.rec_type,
NEW.data,
NEW.source,
NEW.rank,
NEW.user_id,
NEW.created_at,
NEW.updated_at
;
RETURN NEW;
END;
$$;
--
-- Name: log_player_info_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_player_info_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO player_info_history
(
master_id,
first_name,
last_name,
middle_name,
nick_name,
birth_date,
death_date,
user_id,
created_at,
updated_at,
contact_pref,
start_year,
rank,
notes,
contact_id,
college,
end_year,
source,
player_info_id
)
SELECT
NEW.master_id,
NEW.first_name,
NEW.last_name,
NEW.middle_name,
NEW.nick_name,
NEW.birth_date,
NEW.death_date,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.contact_pref,
NEW.start_year,
NEW.rank,
NEW.notes,
NEW.contact_id,
NEW.college,
NEW.end_year,
NEW.source,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_protocol_event_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_protocol_event_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO protocol_event_history
(
protocol_event_id,
name ,
admin_id,
created_at,
updated_at,
disabled ,
sub_process_id,
milestone ,
description
)
SELECT
NEW.id,
NEW.name ,
NEW.admin_id,
NEW.created_at,
NEW.updated_at,
NEW.disabled ,
NEW.sub_process_id,
NEW.milestone ,
NEW.description
;
RETURN NEW;
END;
$$;
--
-- Name: log_protocol_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_protocol_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO protocol_history
(
protocol_id,
name ,
created_at ,
updated_at ,
disabled,
admin_id ,
"position"
)
SELECT
NEW.id,
NEW.name ,
NEW.created_at ,
NEW.updated_at ,
NEW.disabled,
NEW.admin_id ,
NEW.position
;
RETURN NEW;
END;
$$;
--
-- Name: log_report_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_report_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO report_history
(
report_id,
name,
description,
sql,
search_attrs,
admin_id,
disabled,
report_type,
auto,
searchable,
position,
created_at,
updated_at,
edit_field_names,
selection_fields,
item_type,
short_name,
options
)
SELECT
NEW.id,
NEW.name,
NEW.description,
NEW.sql,
NEW.search_attrs,
NEW.admin_id,
NEW.disabled,
NEW.report_type,
NEW.auto,
NEW.searchable,
NEW.position,
NEW.created_at,
NEW.updated_at,
NEW.edit_field_names,
NEW.selection_fields,
NEW.item_type,
NEW.short_name,
NEW.options
;
RETURN NEW;
END;
$$;
--
-- Name: log_scantron_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_scantron_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO scantron_history
(
master_id,
scantron_id,
user_id,
created_at,
updated_at,
scantron_table_id
)
SELECT
NEW.master_id,
NEW.scantron_id,
NEW.user_id,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_sub_process_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_sub_process_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO sub_process_history
(
sub_process_id,
name,
disabled,
protocol_id,
admin_id ,
created_at,
updated_at
)
SELECT
NEW.id,
NEW.name,
NEW.disabled,
NEW.protocol_id,
NEW.admin_id ,
NEW.created_at,
NEW.updated_at
;
RETURN NEW;
END;
$$;
--
-- Name: log_tracker_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_tracker_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
-- Check to see if there is an existing record in tracker_history that matches the
-- that inserted or updated in trackers.
-- If there is, just skip the insert into tracker_history, otherwise make the insert happen.
PERFORM * from tracker_history
WHERE
master_id = NEW.master_id
AND protocol_id = NEW.protocol_id
AND coalesce(protocol_event_id,-1) = coalesce(NEW.protocol_event_id,-1)
AND coalesce(event_date, '1900-01-01'::date)::date = coalesce(NEW.event_date, '1900-01-01')::date
AND sub_process_id = NEW.sub_process_id
AND coalesce(notes,'') = coalesce(NEW.notes,'')
AND coalesce(item_id,-1) = coalesce(NEW.item_id,-1)
AND coalesce(item_type,'') = coalesce(NEW.item_type,'')
-- do not check created_at --
AND updated_at::timestamp = NEW.updated_at::timestamp
AND coalesce(user_id,-1) = coalesce(NEW.user_id,-1);
IF NOT FOUND THEN
INSERT INTO tracker_history
(tracker_id, master_id, protocol_id,
protocol_event_id, event_date, sub_process_id, notes,
item_id, item_type,
created_at, updated_at, user_id)
SELECT NEW.id, NEW.master_id, NEW.protocol_id,
NEW.protocol_event_id, NEW.event_date,
NEW.sub_process_id, NEW.notes,
NEW.item_id, NEW.item_type,
NEW.created_at, NEW.updated_at, NEW.user_id ;
END IF;
RETURN NEW;
END;
$$;
--
-- Name: log_user_access_control_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_user_access_control_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO user_access_control_history
(
user_id,
resource_type,
resource_name,
options,
access,
app_type_id,
role_name,
admin_id,
disabled,
created_at,
updated_at,
user_access_control_id
)
SELECT
NEW.user_id,
NEW.resource_type,
NEW.resource_name,
NEW.options,
NEW.access,
NEW.app_type_id,
NEW.role_name,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_user_authorization_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_user_authorization_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO user_authorization_history
(
user_authorization_id,
user_id,
has_authorization,
admin_id,
disabled,
created_at,
updated_at
)
SELECT
NEW.id,
NEW.user_id,
NEW.has_authorization,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at
;
RETURN NEW;
END;
$$;
--
-- Name: log_user_role_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_user_role_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO user_role_history
(
app_type_id,
role_name,
user_id,
admin_id,
disabled,
created_at,
updated_at,
user_role_id
)
SELECT
NEW.app_type_id,
NEW.role_name,
NEW.user_id,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
--
-- Name: log_user_update(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.log_user_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO user_history
(
user_id,
email,
encrypted_password,
reset_password_token,
reset_password_sent_at,
remember_created_at,
sign_in_count,
current_sign_in_at,
last_sign_in_at,
current_sign_in_ip ,
last_sign_in_ip ,
created_at ,
updated_at,
failed_attempts,
unlock_token,
locked_at,
disabled ,
admin_id,
app_type_id,
authentication_token,
encrypted_otp_secret,
encrypted_otp_secret_iv,
encrypted_otp_secret_salt,
consumed_timestep,
otp_required_for_login,
password_updated_at,
first_name,
last_name
)
SELECT
NEW.id,
NEW.email,
NEW.encrypted_password,
NEW.reset_password_token,
NEW.reset_password_sent_at,
NEW.remember_created_at,
NEW.sign_in_count,
NEW.current_sign_in_at,
NEW.last_sign_in_at,
NEW.current_sign_in_ip ,
NEW.last_sign_in_ip ,
NEW.created_at ,
NEW.updated_at,
NEW.failed_attempts,
NEW.unlock_token,
NEW.locked_at,
NEW.disabled ,
NEW.admin_id,
NEW.app_type_id,
NEW.authentication_token,
NEW.encrypted_otp_secret,
NEW.encrypted_otp_secret_iv,
NEW.encrypted_otp_secret_salt,
NEW.consumed_timestep,
NEW.otp_required_for_login,
NEW.password_updated_at,
NEW.first_name,
NEW.last_name
;
RETURN NEW;
END;
$$;
--
-- Name: role_description_history_upd(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.role_description_history_upd() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO role_description_history (
app_type_id, role_name, role_template, name, description,
disabled,
admin_id,
created_at,
updated_at,
role_description_id)
SELECT
NEW.app_type_id, NEW.role_name, NEW.role_template, NEW.name, NEW.description,
NEW.disabled,
NEW.admin_id,
NEW.created_at,
NEW.updated_at,
NEW.id;
RETURN NEW;
END;
$$;
--
-- Name: tracker_upsert(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.tracker_upsert() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
latest_tracker trackers%ROWTYPE;
BEGIN
-- Look for a row in trackers for the inserted master / protocol pair
SELECT * into latest_tracker
FROM trackers
WHERE
master_id = NEW.master_id
AND protocol_id = NEW.protocol_id
ORDER BY
event_date DESC NULLS LAST, updated_at DESC NULLS LAST
LIMIT 1
;
IF NOT FOUND THEN
-- Nothing was found, so just allow the insert to continue
RETURN NEW;
ELSE
-- A trackers row for the master / protocol pair was found.
-- Check if it is more recent, by having an event date either later than the insert, or
-- has an event_date the same as the insert but with later updated_at time (unlikely)
IF latest_tracker.event_date > NEW.event_date OR
latest_tracker.event_date = NEW.event_date AND latest_tracker.updated_at > NEW.updated_at
THEN
-- The retrieved record was more recent, we should not make a change to the trackers table,
-- but instead we need to ensure an insert into the tracker_history table does happen even though there
-- is no actual insert or update trigger to fire.
-- We use the trackers record ID that was retrieved as the tracker_id in tracker_history
INSERT INTO tracker_history (
tracker_id, master_id, protocol_id,
protocol_event_id, event_date, sub_process_id, notes,
item_id, item_type,
created_at, updated_at, user_id
)
SELECT
latest_tracker.id, NEW.master_id, NEW.protocol_id,
NEW.protocol_event_id, NEW.event_date,
NEW.sub_process_id, NEW.notes,
NEW.item_id, NEW.item_type,
NEW.created_at, NEW.updated_at, NEW.user_id ;
RETURN NULL;
ELSE
-- The tracker record for the master / protocol pair exists and was not more recent, therefore it
-- needs to be replaced by the intended NEW record. Complete with an update and allow the cascading
-- trackers update trigger to handle the insert into tracker_history.
UPDATE trackers SET
master_id = NEW.master_id,
protocol_id = NEW.protocol_id,
protocol_event_id = NEW.protocol_event_id,
event_date = NEW.event_date,
sub_process_id = NEW.sub_process_id,
notes = NEW.notes,
item_id = NEW.item_id,
item_type = NEW.item_type,
-- do not update created_at --
updated_at = NEW.updated_at,
user_id = NEW.user_id
WHERE master_id = NEW.master_id AND
protocol_id = NEW.protocol_id
;
-- Prevent the original insert from actually completing.
RETURN NULL;
END IF;
END IF;
END;
$$;
--
-- Name: update_address_ranks(integer); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.update_address_ranks(set_master_id integer) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
latest_primary RECORD;
BEGIN
SELECT * into latest_primary
FROM addresses
WHERE master_id = set_master_id
AND rank = 10
ORDER BY updated_at DESC
LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
UPDATE addresses SET rank = 5
WHERE
master_id = set_master_id
AND rank = 10
AND id <> latest_primary.id;
RETURN 1;
END;
$$;
--
-- Name: update_master_with_player_info(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.update_master_with_player_info() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
UPDATE masters
set rank = (
case when NEW.rank is null then null
when (NEW.rank > 12) then NEW.rank * -1
else new.rank
end
)
WHERE masters.id = NEW.master_id;
RETURN NEW;
END;
$$;
--
-- Name: update_master_with_pro_info(); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.update_master_with_pro_info() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
UPDATE masters
set pro_info_id = NEW.id, pro_id = NEW.pro_id
WHERE masters.id = NEW.master_id;
RETURN NEW;
END;
$$;
--
-- Name: update_player_contact_ranks(integer, character varying); Type: FUNCTION; Schema: ml_app; Owner: -
--
CREATE FUNCTION ml_app.update_player_contact_ranks(set_master_id integer, set_rec_type character varying) RETURNS integer
LANGUAGE plpgsql
AS $$
DECLARE
latest_primary RECORD;
BEGIN
SELECT * into latest_primary
FROM player_contacts
WHERE master_id = set_master_id
AND rank = 10
AND rec_type = set_rec_type
ORDER BY updated_at DESC
LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
UPDATE player_contacts SET rank = 5
WHERE
master_id = set_master_id
AND rank = 10
AND rec_type = set_rec_type
AND id <> latest_primary.id;
RETURN 1;
END;
$$;
--
-- Name: redcap_data_collection_instrument_history_upd(); Type: FUNCTION; Schema: ref_data; Owner: -
--
CREATE FUNCTION ref_data.redcap_data_collection_instrument_history_upd() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO redcap_data_collection_instrument_history (
redcap_project_admin_id, name, label,
disabled,
admin_id,
created_at,
updated_at,
redcap_data_collection_instrument_id)
SELECT
NEW.redcap_project_admin_id, NEW.name, NEW.label,
NEW.disabled,
NEW.admin_id,
NEW.created_at,
NEW.updated_at,
NEW.id;
RETURN NEW;
END;
$$;
--
-- Name: redcap_project_user_history_upd(); Type: FUNCTION; Schema: ref_data; Owner: -
--
CREATE FUNCTION ref_data.redcap_project_user_history_upd() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO redcap_project_user_history (
redcap_project_admin_id, username, email, expiration,
disabled,
admin_id,
created_at,
updated_at,
redcap_project_user_id)
SELECT
NEW.redcap_project_admin_id, NEW.username, NEW.email, NEW.expiration,
NEW.disabled,
NEW.admin_id,
NEW.created_at,
NEW.updated_at,
NEW.id;
RETURN NEW;
END;
$$;
--
-- Name: accuracy_score_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.accuracy_score_history (
id integer NOT NULL,
name character varying,
value integer,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
accuracy_score_id integer
);
--
-- Name: accuracy_score_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.accuracy_score_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accuracy_score_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.accuracy_score_history_id_seq OWNED BY ml_app.accuracy_score_history.id;
--
-- Name: accuracy_scores; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.accuracy_scores (
id integer NOT NULL,
name character varying,
value integer,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean
);
--
-- Name: accuracy_scores_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.accuracy_scores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accuracy_scores_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.accuracy_scores_id_seq OWNED BY ml_app.accuracy_scores.id;
--
-- Name: activity_log_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.activity_log_history (
id integer NOT NULL,
activity_log_id integer,
name character varying,
item_type character varying,
rec_type character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
action_when_attribute character varying,
field_list character varying,
blank_log_field_list character varying,
blank_log_name character varying,
extra_log_types character varying,
hide_item_list_panel boolean,
main_log_name character varying,
process_name character varying,
table_name character varying,
category character varying,
schema_name character varying
);
--
-- Name: activity_log_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.activity_log_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: activity_log_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.activity_log_history_id_seq OWNED BY ml_app.activity_log_history.id;
--
-- Name: activity_log_player_contact_phone_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.activity_log_player_contact_phone_history (
id integer NOT NULL,
master_id integer,
player_contact_id integer,
data character varying,
select_call_direction character varying,
select_who character varying,
called_when date,
select_result character varying,
select_next_step character varying,
follow_up_when date,
notes character varying,
protocol_id integer,
set_related_player_contact_rank character varying,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
activity_log_player_contact_phone_id integer,
extra_log_type character varying
);
--
-- Name: activity_log_player_contact_phone_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.activity_log_player_contact_phone_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: activity_log_player_contact_phone_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.activity_log_player_contact_phone_history_id_seq OWNED BY ml_app.activity_log_player_contact_phone_history.id;
--
-- Name: activity_log_player_contact_phones; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.activity_log_player_contact_phones (
id integer NOT NULL,
data character varying,
select_call_direction character varying,
select_who character varying,
called_when date,
select_result character varying,
select_next_step character varying,
follow_up_when date,
protocol_id integer,
notes character varying,
user_id integer,
player_contact_id integer,
master_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
set_related_player_contact_rank character varying,
extra_log_type character varying
);
--
-- Name: activity_log_player_contact_phones_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.activity_log_player_contact_phones_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: activity_log_player_contact_phones_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.activity_log_player_contact_phones_id_seq OWNED BY ml_app.activity_log_player_contact_phones.id;
--
-- Name: activity_logs; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.activity_logs (
id integer NOT NULL,
name character varying,
item_type character varying,
rec_type character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
action_when_attribute character varying,
field_list character varying,
blank_log_field_list character varying,
blank_log_name character varying,
extra_log_types character varying,
hide_item_list_panel boolean,
main_log_name character varying,
process_name character varying,
table_name character varying,
category character varying,
schema_name character varying
);
--
-- Name: activity_logs_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.activity_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: activity_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.activity_logs_id_seq OWNED BY ml_app.activity_logs.id;
--
-- Name: address_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.address_history (
id integer NOT NULL,
master_id integer,
street character varying,
street2 character varying,
street3 character varying,
city character varying,
state character varying,
zip character varying,
source character varying,
rank integer,
rec_type character varying,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now(),
country character varying(3),
postal_code character varying,
region character varying,
address_id integer
);
--
-- Name: address_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.address_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: address_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.address_history_id_seq OWNED BY ml_app.address_history.id;
--
-- Name: addresses; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.addresses (
id integer NOT NULL,
master_id integer,
street character varying,
street2 character varying,
street3 character varying,
city character varying,
state character varying,
zip character varying,
source character varying,
rank integer,
rec_type character varying,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now(),
country character varying(3),
postal_code character varying,
region character varying
);
--
-- Name: addresses_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.addresses_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: addresses_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.addresses_id_seq OWNED BY ml_app.addresses.id;
--
-- Name: admin_action_logs; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.admin_action_logs (
id integer NOT NULL,
admin_id integer,
item_type character varying,
item_id integer,
action character varying,
url character varying,
prev_value json,
new_value json,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: admin_action_logs_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.admin_action_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: admin_action_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.admin_action_logs_id_seq OWNED BY ml_app.admin_action_logs.id;
--
-- Name: admin_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.admin_history (
id integer NOT NULL,
email character varying DEFAULT ''::character varying NOT NULL,
encrypted_password character varying DEFAULT ''::character varying NOT NULL,
sign_in_count integer DEFAULT 0,
current_sign_in_at timestamp without time zone,
last_sign_in_at timestamp without time zone,
current_sign_in_ip character varying,
last_sign_in_ip character varying,
failed_attempts integer DEFAULT 0,
unlock_token character varying,
locked_at timestamp without time zone,
created_at timestamp without time zone,
updated_at timestamp without time zone,
disabled boolean,
admin_id integer,
encrypted_otp_secret character varying,
encrypted_otp_secret_iv character varying,
encrypted_otp_secret_salt character varying,
consumed_timestep integer,
otp_required_for_login boolean,
reset_password_sent_at timestamp without time zone,
password_updated_at timestamp without time zone,
updated_by_admin_id integer
);
--
-- Name: admin_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.admin_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: admin_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.admin_history_id_seq OWNED BY ml_app.admin_history.id;
--
-- Name: admins; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.admins (
id integer NOT NULL,
email character varying DEFAULT ''::character varying NOT NULL,
encrypted_password character varying DEFAULT ''::character varying NOT NULL,
sign_in_count integer DEFAULT 0,
current_sign_in_at timestamp without time zone,
last_sign_in_at timestamp without time zone,
current_sign_in_ip character varying,
last_sign_in_ip character varying,
failed_attempts integer DEFAULT 0,
unlock_token character varying,
locked_at timestamp without time zone,
created_at timestamp without time zone,
updated_at timestamp without time zone,
disabled boolean,
encrypted_otp_secret character varying,
encrypted_otp_secret_iv character varying,
encrypted_otp_secret_salt character varying,
consumed_timestep integer,
otp_required_for_login boolean,
reset_password_sent_at timestamp without time zone,
password_updated_at timestamp without time zone,
first_name character varying,
last_name character varying,
do_not_email boolean DEFAULT false,
admin_id bigint
);
--
-- Name: admins_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.admins_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: admins_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.admins_id_seq OWNED BY ml_app.admins.id;
--
-- Name: app_configuration_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.app_configuration_history (
id integer NOT NULL,
name character varying,
value character varying,
app_type_id bigint,
user_id bigint,
role_name character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
app_configuration_id integer
);
--
-- Name: app_configuration_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.app_configuration_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: app_configuration_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.app_configuration_history_id_seq OWNED BY ml_app.app_configuration_history.id;
--
-- Name: app_configurations; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.app_configurations (
id integer NOT NULL,
name character varying,
value character varying,
disabled boolean,
admin_id integer,
user_id integer,
app_type_id integer,
role_name character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: app_configurations_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.app_configurations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: app_configurations_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.app_configurations_id_seq OWNED BY ml_app.app_configurations.id;
--
-- Name: app_type_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.app_type_history (
id integer NOT NULL,
name character varying,
label character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
app_type_id integer
);
--
-- Name: app_type_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.app_type_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: app_type_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.app_type_history_id_seq OWNED BY ml_app.app_type_history.id;
--
-- Name: app_types; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.app_types (
id integer NOT NULL,
name character varying,
label character varying,
disabled boolean,
admin_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
default_schema_name character varying
);
--
-- Name: app_types_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.app_types_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: app_types_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.app_types_id_seq OWNED BY ml_app.app_types.id;
--
-- Name: ar_internal_metadata; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.ar_internal_metadata (
key character varying NOT NULL,
value character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: college_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.college_history (
id integer NOT NULL,
name character varying,
synonym_for_id integer,
disabled boolean,
admin_id integer,
user_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
college_id integer
);
--
-- Name: college_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.college_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: college_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.college_history_id_seq OWNED BY ml_app.college_history.id;
--
-- Name: colleges; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.colleges (
id integer NOT NULL,
name character varying,
synonym_for_id integer,
disabled boolean,
admin_id integer,
user_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: colleges_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.colleges_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: colleges_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.colleges_id_seq OWNED BY ml_app.colleges.id;
--
-- Name: config_libraries; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.config_libraries (
id integer NOT NULL,
category character varying,
name character varying,
options character varying,
format character varying,
disabled boolean DEFAULT false,
admin_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: config_libraries_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.config_libraries_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: config_libraries_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.config_libraries_id_seq OWNED BY ml_app.config_libraries.id;
--
-- Name: config_library_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.config_library_history (
id integer NOT NULL,
category character varying,
name character varying,
options character varying,
format character varying,
disabled boolean DEFAULT false,
admin_id integer,
config_library_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: config_library_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.config_library_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: config_library_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.config_library_history_id_seq OWNED BY ml_app.config_library_history.id;
--
-- Name: copy_player_infos; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.copy_player_infos (
id integer,
master_id integer,
first_name character varying,
last_name character varying,
middle_name character varying,
nick_name character varying,
birth_date date,
death_date date,
user_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
contact_pref character varying,
start_year integer,
rank integer,
notes character varying,
contactid integer,
college character varying,
end_year integer,
source character varying
);
--
-- Name: delayed_jobs; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.delayed_jobs (
id integer NOT NULL,
priority integer DEFAULT 0 NOT NULL,
attempts integer DEFAULT 0 NOT NULL,
handler text NOT NULL,
last_error text,
run_at timestamp without time zone,
locked_at timestamp without time zone,
failed_at timestamp without time zone,
locked_by character varying,
queue character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: delayed_jobs_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.delayed_jobs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: delayed_jobs_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.delayed_jobs_id_seq OWNED BY ml_app.delayed_jobs.id;
--
-- Name: dynamic_model_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.dynamic_model_history (
id integer NOT NULL,
name character varying,
table_name character varying,
schema_name character varying,
primary_key_name character varying,
foreign_key_name character varying,
description character varying,
admin_id integer,
disabled boolean,
"position" integer,
category character varying,
table_key_name character varying,
field_list character varying,
result_order character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
dynamic_model_id integer,
options character varying
);
--
-- Name: dynamic_model_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.dynamic_model_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: dynamic_model_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.dynamic_model_history_id_seq OWNED BY ml_app.dynamic_model_history.id;
--
-- Name: dynamic_models; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.dynamic_models (
id integer NOT NULL,
name character varying,
table_name character varying,
schema_name character varying,
primary_key_name character varying,
foreign_key_name character varying,
description character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
"position" integer,
category character varying,
table_key_name character varying,
field_list character varying,
result_order character varying,
options character varying
);
--
-- Name: dynamic_models_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.dynamic_models_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: dynamic_models_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.dynamic_models_id_seq OWNED BY ml_app.dynamic_models.id;
--
-- Name: exception_logs; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.exception_logs (
id integer NOT NULL,
message character varying,
main character varying,
backtrace character varying,
user_id integer,
admin_id integer,
notified_at timestamp without time zone,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: exception_logs_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.exception_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: exception_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.exception_logs_id_seq OWNED BY ml_app.exception_logs.id;
--
-- Name: external_identifier_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.external_identifier_history (
id integer NOT NULL,
name character varying,
label character varying,
external_id_attribute character varying,
external_id_view_formatter character varying,
external_id_edit_pattern character varying,
prevent_edit boolean,
pregenerate_ids boolean,
min_id bigint,
max_id bigint,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
external_identifier_id integer,
extra_fields character varying,
alphanumeric boolean,
schema_name character varying,
options character varying
);
--
-- Name: external_identifier_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.external_identifier_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: external_identifier_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.external_identifier_history_id_seq OWNED BY ml_app.external_identifier_history.id;
--
-- Name: external_identifiers; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.external_identifiers (
id integer NOT NULL,
name character varying,
label character varying,
external_id_attribute character varying,
external_id_view_formatter character varying,
external_id_edit_pattern character varying,
prevent_edit boolean,
pregenerate_ids boolean,
min_id bigint,
max_id bigint,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
alphanumeric boolean,
extra_fields character varying,
category character varying,
schema_name character varying,
options character varying
);
--
-- Name: external_identifiers_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.external_identifiers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: external_identifiers_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.external_identifiers_id_seq OWNED BY ml_app.external_identifiers.id;
--
-- Name: external_link_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.external_link_history (
id integer NOT NULL,
name character varying,
value character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
external_link_id integer
);
--
-- Name: external_link_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.external_link_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: external_link_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.external_link_history_id_seq OWNED BY ml_app.external_link_history.id;
--
-- Name: external_links; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.external_links (
id integer NOT NULL,
name character varying,
value character varying,
disabled boolean,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: external_links_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.external_links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: external_links_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.external_links_id_seq OWNED BY ml_app.external_links.id;
--
-- Name: general_selection_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.general_selection_history (
id integer NOT NULL,
name character varying,
value character varying,
item_type character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
admin_id integer,
create_with boolean,
edit_if_set boolean,
edit_always boolean,
"position" integer,
description character varying,
lock boolean,
general_selection_id integer
);
--
-- Name: general_selection_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.general_selection_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: general_selection_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.general_selection_history_id_seq OWNED BY ml_app.general_selection_history.id;
--
-- Name: general_selections; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.general_selections (
id integer NOT NULL,
name character varying,
value character varying,
item_type character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
admin_id integer,
create_with boolean,
edit_if_set boolean,
edit_always boolean,
"position" integer,
description character varying,
lock boolean
);
--
-- Name: general_selections_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.general_selections_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: general_selections_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.general_selections_id_seq OWNED BY ml_app.general_selections.id;
--
-- Name: imports; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.imports (
id integer NOT NULL,
primary_table character varying,
item_count integer,
filename character varying,
imported_items integer[],
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: imports_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.imports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: imports_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.imports_id_seq OWNED BY ml_app.imports.id;
--
-- Name: imports_model_generators; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.imports_model_generators (
id bigint NOT NULL,
name character varying,
dynamic_model_table character varying,
options json,
description character varying,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: imports_model_generators_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.imports_model_generators_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: imports_model_generators_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.imports_model_generators_id_seq OWNED BY ml_app.imports_model_generators.id;
--
-- Name: item_flag_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.item_flag_history (
id integer NOT NULL,
item_id integer,
item_type character varying,
item_flag_name_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
user_id integer,
item_flag_id integer,
disabled boolean
);
--
-- Name: item_flag_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.item_flag_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: item_flag_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.item_flag_history_id_seq OWNED BY ml_app.item_flag_history.id;
--
-- Name: item_flag_name_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.item_flag_name_history (
id integer NOT NULL,
name character varying,
item_type character varying,
disabled boolean,
admin_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
item_flag_name_id integer
);
--
-- Name: item_flag_name_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.item_flag_name_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: item_flag_name_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.item_flag_name_history_id_seq OWNED BY ml_app.item_flag_name_history.id;
--
-- Name: item_flag_names; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.item_flag_names (
id integer NOT NULL,
name character varying,
item_type character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
admin_id integer
);
--
-- Name: item_flag_names_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.item_flag_names_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: item_flag_names_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.item_flag_names_id_seq OWNED BY ml_app.item_flag_names.id;
--
-- Name: item_flags; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.item_flags (
id integer NOT NULL,
item_id integer,
item_type character varying,
item_flag_name_id integer NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
user_id integer,
disabled boolean
);
--
-- Name: item_flags_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.item_flags_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: item_flags_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.item_flags_id_seq OWNED BY ml_app.item_flags.id;
--
-- Name: manage_users; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.manage_users (
id integer NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: manage_users_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.manage_users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: manage_users_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.manage_users_id_seq OWNED BY ml_app.manage_users.id;
--
-- Name: masters; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.masters (
id integer NOT NULL,
msid integer,
pro_id integer,
pro_info_id integer,
rank integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
user_id integer,
contact_id integer
);
--
-- Name: masters_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.masters_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: masters_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.masters_id_seq OWNED BY ml_app.masters.id;
--
-- Name: message_notifications; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.message_notifications (
id integer NOT NULL,
app_type_id integer,
master_id integer,
user_id integer,
item_id integer,
item_type character varying,
message_type character varying,
recipient_user_ids integer[],
layout_template_name character varying,
content_template_name character varying,
generated_content character varying,
status character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status_changed character varying,
subject character varying,
data json,
recipient_data character varying[],
from_user_email character varying,
role_name character varying,
content_template_text character varying,
importance character varying,
extra_substitutions character varying,
content_hash character varying
);
--
-- Name: message_notifications_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.message_notifications_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: message_notifications_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.message_notifications_id_seq OWNED BY ml_app.message_notifications.id;
--
-- Name: message_template_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.message_template_history (
id integer NOT NULL,
name character varying,
template_type character varying,
template character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
message_template_id integer,
message_type character varying,
category character varying
);
--
-- Name: message_template_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.message_template_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: message_template_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.message_template_history_id_seq OWNED BY ml_app.message_template_history.id;
--
-- Name: message_templates; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.message_templates (
id integer NOT NULL,
name character varying,
message_type character varying,
template_type character varying,
template character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
category character varying
);
--
-- Name: message_templates_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.message_templates_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: message_templates_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.message_templates_id_seq OWNED BY ml_app.message_templates.id;
--
-- Name: model_references; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.model_references (
id integer NOT NULL,
from_record_type character varying,
from_record_id integer,
from_record_master_id integer,
to_record_type character varying,
to_record_id integer,
to_record_master_id integer,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean
);
--
-- Name: model_references_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.model_references_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: model_references_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.model_references_id_seq OWNED BY ml_app.model_references.id;
--
-- Name: msid_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.msid_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_archived_file_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_archived_file_history (
id integer NOT NULL,
file_hash character varying,
file_name character varying,
content_type character varying,
archive_file character varying,
path character varying,
file_size character varying,
file_updated_at character varying,
nfs_store_container_id bigint,
title character varying,
description character varying,
file_metadata character varying,
nfs_store_stored_file_id bigint,
user_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
nfs_store_archived_file_id integer
);
--
-- Name: nfs_store_archived_file_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_archived_file_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_archived_file_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_archived_file_history_id_seq OWNED BY ml_app.nfs_store_archived_file_history.id;
--
-- Name: nfs_store_archived_files_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_archived_files_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_archived_files_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_archived_files_id_seq OWNED BY ml_app.nfs_store_archived_files.id;
--
-- Name: nfs_store_container_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_container_history (
id integer NOT NULL,
master_id integer,
name character varying,
app_type_id bigint,
orig_nfs_store_container_id bigint,
user_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
nfs_store_container_id integer
);
--
-- Name: nfs_store_container_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_container_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_container_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_container_history_id_seq OWNED BY ml_app.nfs_store_container_history.id;
--
-- Name: nfs_store_containers; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_containers (
id integer NOT NULL,
name character varying,
user_id integer,
app_type_id integer,
nfs_store_container_id integer,
master_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: nfs_store_containers_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_containers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_containers_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_containers_id_seq OWNED BY ml_app.nfs_store_containers.id;
--
-- Name: nfs_store_downloads; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_downloads (
id integer NOT NULL,
user_groups integer[] DEFAULT '{}'::integer[],
path character varying,
retrieval_path character varying,
retrieved_items character varying,
user_id integer NOT NULL,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
nfs_store_container_ids integer[]
);
--
-- Name: nfs_store_downloads_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_downloads_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_downloads_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_downloads_id_seq OWNED BY ml_app.nfs_store_downloads.id;
--
-- Name: nfs_store_filter_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_filter_history (
id integer NOT NULL,
app_type_id bigint,
role_name character varying,
user_id bigint,
resource_name character varying,
filter character varying,
description character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
nfs_store_filter_id integer
);
--
-- Name: nfs_store_filter_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_filter_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_filter_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_filter_history_id_seq OWNED BY ml_app.nfs_store_filter_history.id;
--
-- Name: nfs_store_filters; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_filters (
id integer NOT NULL,
app_type_id integer,
role_name character varying,
user_id integer,
resource_name character varying,
filter character varying,
description character varying,
disabled boolean,
admin_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: nfs_store_filters_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_filters_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_filters_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_filters_id_seq OWNED BY ml_app.nfs_store_filters.id;
--
-- Name: nfs_store_imports; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_imports (
id integer NOT NULL,
file_hash character varying,
file_name character varying,
user_id integer,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
path character varying
);
--
-- Name: nfs_store_imports_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_imports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_imports_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_imports_id_seq OWNED BY ml_app.nfs_store_imports.id;
--
-- Name: nfs_store_move_actions; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_move_actions (
id integer NOT NULL,
user_groups integer[],
path character varying,
new_path character varying,
retrieval_path character varying,
moved_items character varying,
nfs_store_container_ids integer[],
user_id integer NOT NULL,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: nfs_store_move_actions_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_move_actions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_move_actions_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_move_actions_id_seq OWNED BY ml_app.nfs_store_move_actions.id;
--
-- Name: nfs_store_stored_file_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_stored_file_history (
id integer NOT NULL,
file_hash character varying,
file_name character varying,
content_type character varying,
path character varying,
file_size character varying,
file_updated_at character varying,
nfs_store_container_id bigint,
title character varying,
description character varying,
file_metadata character varying,
last_process_name_run character varying,
user_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
nfs_store_stored_file_id integer
);
--
-- Name: nfs_store_stored_file_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_stored_file_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_stored_file_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_stored_file_history_id_seq OWNED BY ml_app.nfs_store_stored_file_history.id;
--
-- Name: nfs_store_stored_files_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_stored_files_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_stored_files_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_stored_files_id_seq OWNED BY ml_app.nfs_store_stored_files.id;
--
-- Name: nfs_store_trash_actions; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_trash_actions (
id integer NOT NULL,
user_groups integer[] DEFAULT '{}'::integer[],
path character varying,
retrieval_path character varying,
trashed_items character varying,
nfs_store_container_ids integer[],
user_id integer NOT NULL,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: nfs_store_trash_actions_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_trash_actions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_trash_actions_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_trash_actions_id_seq OWNED BY ml_app.nfs_store_trash_actions.id;
--
-- Name: nfs_store_uploads; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_uploads (
id integer NOT NULL,
file_hash character varying NOT NULL,
file_name character varying NOT NULL,
content_type character varying NOT NULL,
file_size bigint NOT NULL,
chunk_count integer,
completed boolean,
file_updated_at timestamp without time zone,
user_id integer,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone,
path character varying,
nfs_store_stored_file_id integer,
upload_set character varying
);
--
-- Name: nfs_store_uploads_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_uploads_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_uploads_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_uploads_id_seq OWNED BY ml_app.nfs_store_uploads.id;
--
-- Name: nfs_store_user_file_actions; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.nfs_store_user_file_actions (
id integer NOT NULL,
user_groups integer[],
path character varying,
new_path character varying,
action character varying,
retrieval_path character varying,
action_items character varying,
nfs_store_container_ids integer[],
user_id integer NOT NULL,
nfs_store_container_id integer,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: nfs_store_user_file_actions_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.nfs_store_user_file_actions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: nfs_store_user_file_actions_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.nfs_store_user_file_actions_id_seq OWNED BY ml_app.nfs_store_user_file_actions.id;
--
-- Name: page_layout_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.page_layout_history (
id integer NOT NULL,
layout_name character varying,
panel_name character varying,
panel_label character varying,
panel_position character varying,
options character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
page_layout_id integer,
app_type_id character varying,
description character varying
);
--
-- Name: page_layout_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.page_layout_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: page_layout_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.page_layout_history_id_seq OWNED BY ml_app.page_layout_history.id;
--
-- Name: page_layouts; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.page_layouts (
id integer NOT NULL,
app_type_id integer,
layout_name character varying,
panel_name character varying,
panel_label character varying,
panel_position integer,
options character varying,
disabled boolean,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
description character varying
);
--
-- Name: page_layouts_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.page_layouts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: page_layouts_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.page_layouts_id_seq OWNED BY ml_app.page_layouts.id;
--
-- Name: player_contact_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.player_contact_history (
id integer NOT NULL,
master_id integer,
rec_type character varying,
data character varying,
source character varying,
rank integer,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now(),
player_contact_id integer
);
--
-- Name: player_contact_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.player_contact_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: player_contact_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.player_contact_history_id_seq OWNED BY ml_app.player_contact_history.id;
--
-- Name: player_contacts; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.player_contacts (
id integer NOT NULL,
master_id integer,
rec_type character varying,
data character varying,
source character varying,
rank integer,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now()
);
--
-- Name: player_contacts_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.player_contacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: player_contacts_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.player_contacts_id_seq OWNED BY ml_app.player_contacts.id;
--
-- Name: player_info_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.player_info_history (
id integer NOT NULL,
master_id integer,
first_name character varying,
last_name character varying,
middle_name character varying,
nick_name character varying,
birth_date date,
death_date date,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now(),
contact_pref character varying,
start_year integer,
rank integer,
notes character varying,
contact_id integer,
college character varying,
end_year integer,
source character varying,
player_info_id integer
);
--
-- Name: player_info_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.player_info_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: player_info_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.player_info_history_id_seq OWNED BY ml_app.player_info_history.id;
--
-- Name: player_infos; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.player_infos (
id integer NOT NULL,
master_id integer,
first_name character varying,
last_name character varying,
middle_name character varying,
nick_name character varying,
birth_date date,
death_date date,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now(),
contact_pref character varying,
start_year integer,
rank integer,
notes character varying,
contact_id integer,
college character varying,
end_year integer,
source character varying
);
--
-- Name: player_infos_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.player_infos_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: player_infos_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.player_infos_id_seq OWNED BY ml_app.player_infos.id;
--
-- Name: pro_infos; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.pro_infos (
id integer NOT NULL,
master_id integer,
pro_id integer,
first_name character varying,
middle_name character varying,
nick_name character varying,
last_name character varying,
birth_date date,
death_date date,
start_year integer,
end_year integer,
college character varying,
birthplace character varying,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone DEFAULT now()
);
--
-- Name: pro_infos_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.pro_infos_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pro_infos_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.pro_infos_id_seq OWNED BY ml_app.pro_infos.id;
--
-- Name: protocol_event_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.protocol_event_history (
id integer NOT NULL,
name character varying,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
sub_process_id integer,
milestone character varying,
description character varying,
protocol_event_id integer
);
--
-- Name: protocol_event_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.protocol_event_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: protocol_event_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.protocol_event_history_id_seq OWNED BY ml_app.protocol_event_history.id;
--
-- Name: protocol_events; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.protocol_events (
id integer NOT NULL,
name character varying,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
sub_process_id integer,
milestone character varying,
description character varying
);
--
-- Name: protocol_events_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.protocol_events_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: protocol_events_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.protocol_events_id_seq OWNED BY ml_app.protocol_events.id;
--
-- Name: protocol_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.protocol_history (
id integer NOT NULL,
name character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
admin_id integer,
"position" integer,
protocol_id integer
);
--
-- Name: protocol_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.protocol_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: protocol_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.protocol_history_id_seq OWNED BY ml_app.protocol_history.id;
--
-- Name: protocols; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.protocols (
id integer NOT NULL,
name character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
disabled boolean,
admin_id integer,
"position" integer,
app_type_id bigint
);
--
-- Name: protocols_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.protocols_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: protocols_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.protocols_id_seq OWNED BY ml_app.protocols.id;
--
-- Name: rc_cis; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.rc_cis (
id integer NOT NULL,
fname character varying,
lname character varying,
status character varying,
created_at timestamp without time zone DEFAULT now(),
updated_at timestamp without time zone DEFAULT now(),
user_id integer,
master_id integer,
street character varying,
street2 character varying,
city character varying,
state character varying,
zip character varying,
phone character varying,
email character varying,
form_date timestamp without time zone
);
--
-- Name: rc_cis2; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.rc_cis2 (
id integer,
fname character varying,
lname character varying,
status character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone,
user_id integer
);
--
-- Name: rc_cis_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.rc_cis_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: rc_cis_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.rc_cis_id_seq OWNED BY ml_app.rc_cis.id;
--
-- Name: rc_stage_cif_copy; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.rc_stage_cif_copy (
record_id integer,
redcap_survey_identifier integer,
time_stamp timestamp without time zone,
first_name character varying,
middle_name character varying,
last_name character varying,
nick_name character varying,
street character varying,
street2 character varying,
city character varying,
state character varying,
zipcode character varying,
phone character varying,
email character varying,
hearabout character varying,
completed integer,
id integer NOT NULL,
status character varying,
created_at timestamp without time zone DEFAULT now(),
user_id integer,
master_id integer,
updated_at timestamp without time zone DEFAULT now(),
added_tracker boolean
);
--
-- Name: rc_stage_cif_copy_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.rc_stage_cif_copy_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: rc_stage_cif_copy_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.rc_stage_cif_copy_id_seq OWNED BY ml_app.rc_stage_cif_copy.id;
--
-- Name: report_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.report_history (
id integer NOT NULL,
name character varying,
description character varying,
sql character varying,
search_attrs character varying,
admin_id integer,
disabled boolean,
report_type character varying,
auto boolean,
searchable boolean,
"position" integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
report_id integer,
item_type character varying,
edit_model character varying,
edit_field_names character varying,
selection_fields character varying,
short_name character varying,
options character varying
);
--
-- Name: report_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.report_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: report_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.report_history_id_seq OWNED BY ml_app.report_history.id;
--
-- Name: reports; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.reports (
id integer NOT NULL,
name character varying,
description character varying,
sql character varying,
search_attrs character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
report_type character varying,
auto boolean,
searchable boolean,
"position" integer,
edit_model character varying,
edit_field_names character varying,
selection_fields character varying,
item_type character varying,
short_name character varying,
options character varying
);
--
-- Name: reports_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.reports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: reports_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.reports_id_seq OWNED BY ml_app.reports.id;
--
-- Name: role_description_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.role_description_history (
id bigint NOT NULL,
role_description_id bigint,
app_type_id bigint,
role_name character varying,
role_template character varying,
name character varying,
description character varying,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: role_description_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.role_description_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: role_description_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.role_description_history_id_seq OWNED BY ml_app.role_description_history.id;
--
-- Name: role_descriptions; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.role_descriptions (
id bigint NOT NULL,
app_type_id bigint,
role_name character varying,
role_template character varying,
name character varying,
description character varying,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: role_descriptions_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.role_descriptions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: role_descriptions_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.role_descriptions_id_seq OWNED BY ml_app.role_descriptions.id;
--
-- Name: sage_assignments; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.sage_assignments (
id integer NOT NULL,
sage_id character varying(10),
assigned_by character varying,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
master_id integer,
admin_id integer
);
--
-- Name: sage_assignments_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.sage_assignments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: sage_assignments_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.sage_assignments_id_seq OWNED BY ml_app.sage_assignments.id;
--
-- Name: scantron_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.scantron_history (
id integer NOT NULL,
master_id integer,
scantron_id integer,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
scantron_table_id integer
);
--
-- Name: scantron_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.scantron_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: scantron_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.scantron_history_id_seq OWNED BY ml_app.scantron_history.id;
--
-- Name: scantrons; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.scantrons (
id integer NOT NULL,
master_id integer,
scantron_id integer,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: scantrons_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.scantrons_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: scantrons_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.scantrons_id_seq OWNED BY ml_app.scantrons.id;
--
-- Name: schema_migrations; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.schema_migrations (
version character varying NOT NULL
);
--
-- Name: sessions; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.sessions (
id bigint NOT NULL,
session_id character varying NOT NULL,
data text,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: sessions_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.sessions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: sessions_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.sessions_id_seq OWNED BY ml_app.sessions.id;
--
-- Name: smback; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.smback (
version character varying
);
--
-- Name: sub_process_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.sub_process_history (
id integer NOT NULL,
name character varying,
disabled boolean,
protocol_id integer,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
sub_process_id integer
);
--
-- Name: sub_process_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.sub_process_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: sub_process_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.sub_process_history_id_seq OWNED BY ml_app.sub_process_history.id;
--
-- Name: sub_processes; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.sub_processes (
id integer NOT NULL,
name character varying,
disabled boolean,
protocol_id integer,
admin_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: sub_processes_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.sub_processes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: sub_processes_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.sub_processes_id_seq OWNED BY ml_app.sub_processes.id;
--
-- Name: tracker_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.tracker_history (
id integer NOT NULL,
master_id integer,
protocol_id integer,
tracker_id integer,
event_date timestamp without time zone,
user_id integer,
notes character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone,
sub_process_id integer,
protocol_event_id integer,
item_id integer,
item_type character varying
);
--
-- Name: tracker_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.tracker_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tracker_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.tracker_history_id_seq OWNED BY ml_app.tracker_history.id;
--
-- Name: trackers; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.trackers (
id integer NOT NULL,
master_id integer,
protocol_id integer NOT NULL,
event_date timestamp without time zone,
user_id integer DEFAULT ml_app.current_user_id(),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
notes character varying,
sub_process_id integer NOT NULL,
protocol_event_id integer,
item_id integer,
item_type character varying
);
--
-- Name: trackers_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.trackers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: trackers_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.trackers_id_seq OWNED BY ml_app.trackers.id;
--
-- Name: user_access_control_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_access_control_history (
id integer NOT NULL,
user_id bigint,
resource_type character varying,
resource_name character varying,
options character varying,
access character varying,
app_type_id bigint,
role_name character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
user_access_control_id integer
);
--
-- Name: user_access_control_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_access_control_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_access_control_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_access_control_history_id_seq OWNED BY ml_app.user_access_control_history.id;
--
-- Name: user_access_controls; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_access_controls (
id integer NOT NULL,
user_id integer,
resource_type character varying,
resource_name character varying,
options character varying,
access character varying,
disabled boolean,
admin_id integer,
app_type_id integer,
role_name character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: user_access_controls_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_access_controls_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_access_controls_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_access_controls_id_seq OWNED BY ml_app.user_access_controls.id;
--
-- Name: user_action_logs; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_action_logs (
id integer NOT NULL,
user_id integer,
app_type_id integer,
master_id integer,
item_type character varying,
item_id integer,
index_action_ids integer[],
action character varying,
url character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: user_action_logs_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_action_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_action_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_action_logs_id_seq OWNED BY ml_app.user_action_logs.id;
--
-- Name: user_authorization_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_authorization_history (
id integer NOT NULL,
user_id character varying,
has_authorization character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
user_authorization_id integer
);
--
-- Name: user_authorization_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_authorization_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_authorization_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_authorization_history_id_seq OWNED BY ml_app.user_authorization_history.id;
--
-- Name: user_authorizations; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_authorizations (
id integer NOT NULL,
user_id integer,
has_authorization character varying,
admin_id integer,
disabled boolean,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: user_authorizations_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_authorizations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_authorizations_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_authorizations_id_seq OWNED BY ml_app.user_authorizations.id;
--
-- Name: user_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_history (
id integer NOT NULL,
email character varying DEFAULT ''::character varying NOT NULL,
encrypted_password character varying DEFAULT ''::character varying NOT NULL,
reset_password_token character varying,
reset_password_sent_at timestamp without time zone,
remember_created_at timestamp without time zone,
sign_in_count integer DEFAULT 0 NOT NULL,
current_sign_in_at timestamp without time zone,
last_sign_in_at timestamp without time zone,
current_sign_in_ip inet,
last_sign_in_ip inet,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
failed_attempts integer DEFAULT 0 NOT NULL,
unlock_token character varying,
locked_at timestamp without time zone,
disabled boolean,
admin_id integer,
user_id integer,
app_type_id integer,
authentication_token character varying,
encrypted_otp_secret character varying,
encrypted_otp_secret_iv character varying,
encrypted_otp_secret_salt character varying,
consumed_timestep integer,
otp_required_for_login boolean,
password_updated_at timestamp without time zone,
first_name character varying,
last_name character varying
);
--
-- Name: user_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_history_id_seq OWNED BY ml_app.user_history.id;
--
-- Name: user_role_history; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_role_history (
id integer NOT NULL,
app_type_id bigint,
role_name character varying,
user_id bigint,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
user_role_id integer
);
--
-- Name: user_role_history_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_role_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_role_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_role_history_id_seq OWNED BY ml_app.user_role_history.id;
--
-- Name: user_roles; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.user_roles (
id integer NOT NULL,
app_type_id integer,
role_name character varying,
user_id integer,
admin_id integer,
disabled boolean DEFAULT false NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: user_roles_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.user_roles_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_roles_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.user_roles_id_seq OWNED BY ml_app.user_roles.id;
--
-- Name: users; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.users (
id integer NOT NULL,
email character varying DEFAULT ''::character varying NOT NULL,
encrypted_password character varying DEFAULT ''::character varying NOT NULL,
reset_password_token character varying,
reset_password_sent_at timestamp without time zone,
remember_created_at timestamp without time zone,
sign_in_count integer DEFAULT 0 NOT NULL,
current_sign_in_at timestamp without time zone,
last_sign_in_at timestamp without time zone,
current_sign_in_ip inet,
last_sign_in_ip inet,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
failed_attempts integer DEFAULT 0 NOT NULL,
unlock_token character varying,
locked_at timestamp without time zone,
disabled boolean,
admin_id integer,
app_type_id integer,
authentication_token character varying(30),
encrypted_otp_secret character varying,
encrypted_otp_secret_iv character varying,
encrypted_otp_secret_salt character varying,
consumed_timestep integer,
otp_required_for_login boolean,
password_updated_at timestamp without time zone,
first_name character varying,
last_name character varying,
do_not_email boolean DEFAULT false
);
--
-- Name: users_contact_infos; Type: TABLE; Schema: ml_app; Owner: -
--
CREATE TABLE ml_app.users_contact_infos (
id integer NOT NULL,
user_id integer,
sms_number character varying,
phone_number character varying,
alt_email character varying,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
admin_id integer,
disabled boolean
);
--
-- Name: users_contact_infos_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.users_contact_infos_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: users_contact_infos_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.users_contact_infos_id_seq OWNED BY ml_app.users_contact_infos.id;
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: ml_app; Owner: -
--
CREATE SEQUENCE ml_app.users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: ml_app; Owner: -
--
ALTER SEQUENCE ml_app.users_id_seq OWNED BY ml_app.users.id;
--
-- Name: datadic_choice_history; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.datadic_choice_history (
id bigint NOT NULL,
datadic_choice_id bigint,
source_name character varying,
source_type character varying,
form_name character varying,
field_name character varying,
value character varying,
label character varying,
disabled boolean,
admin_id bigint,
redcap_data_dictionary_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: datadic_choice_history_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.datadic_choice_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: datadic_choice_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.datadic_choice_history_id_seq OWNED BY ref_data.datadic_choice_history.id;
--
-- Name: datadic_choices; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.datadic_choices (
id bigint NOT NULL,
source_name character varying,
source_type character varying,
form_name character varying,
field_name character varying,
value character varying,
label character varying,
disabled boolean,
admin_id bigint,
redcap_data_dictionary_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: datadic_choices_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.datadic_choices_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: datadic_choices_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.datadic_choices_id_seq OWNED BY ref_data.datadic_choices.id;
--
-- Name: datadic_variable_history; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.datadic_variable_history (
id bigint NOT NULL,
datadic_variable_id bigint,
study character varying,
source_name character varying,
source_type character varying,
domain character varying,
form_name character varying,
variable_name character varying,
variable_type character varying,
presentation_type character varying,
label character varying,
label_note character varying,
annotation character varying,
is_required boolean,
valid_type character varying,
valid_min character varying,
valid_max character varying,
multi_valid_choices character varying[],
is_identifier boolean,
is_derived_var boolean,
multi_derived_from_id bigint[],
doc_url character varying,
target_type character varying,
owner_email character varying,
classification character varying,
other_classification character varying,
multi_timepoints character varying[],
equivalent_to_id bigint,
storage_type character varying,
db_or_fs character varying,
schema_or_path character varying,
table_or_file character varying,
disabled boolean,
admin_id bigint,
redcap_data_dictionary_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
"position" integer,
section_id integer,
sub_section_id integer,
title character varying,
storage_varname character varying
);
--
-- Name: COLUMN datadic_variable_history.study; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.study IS 'Study name';
--
-- Name: COLUMN datadic_variable_history.source_name; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.source_name IS 'Source of variable';
--
-- Name: COLUMN datadic_variable_history.source_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.source_type IS 'Source type';
--
-- Name: COLUMN datadic_variable_history.domain; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.domain IS 'Domain';
--
-- Name: COLUMN datadic_variable_history.form_name; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.form_name IS 'Form name (if the source was a type of form)';
--
-- Name: COLUMN datadic_variable_history.variable_name; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.variable_name IS 'Variable name';
--
-- Name: COLUMN datadic_variable_history.variable_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.variable_type IS 'Variable type';
--
-- Name: COLUMN datadic_variable_history.presentation_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.presentation_type IS 'Data type for presentation purposes';
--
-- Name: COLUMN datadic_variable_history.label; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.label IS 'Primary label or title (if source was a form, the label presented for the field)';
--
-- Name: COLUMN datadic_variable_history.label_note; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.label_note IS 'Description (if source was a form, a note presented for the field)';
--
-- Name: COLUMN datadic_variable_history.annotation; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.annotation IS 'Annotations (if source was a form, annotations not presented to the user)';
--
-- Name: COLUMN datadic_variable_history.is_required; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.is_required IS 'Was required in source';
--
-- Name: COLUMN datadic_variable_history.valid_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.valid_type IS 'Source data type';
--
-- Name: COLUMN datadic_variable_history.valid_min; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.valid_min IS 'Minimum value';
--
-- Name: COLUMN datadic_variable_history.valid_max; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.valid_max IS 'Maximum value';
--
-- Name: COLUMN datadic_variable_history.multi_valid_choices; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.multi_valid_choices IS 'List of valid choices for categorical variables';
--
-- Name: COLUMN datadic_variable_history.is_identifier; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.is_identifier IS 'Represents identifiable information';
--
-- Name: COLUMN datadic_variable_history.is_derived_var; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.is_derived_var IS 'Is a derived variable';
--
-- Name: COLUMN datadic_variable_history.multi_derived_from_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.multi_derived_from_id IS 'If a derived variable, ids of variables used to calculate it';
--
-- Name: COLUMN datadic_variable_history.doc_url; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.doc_url IS 'URL to additional documentation';
--
-- Name: COLUMN datadic_variable_history.target_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.target_type IS 'Type of participant this variable relates to';
--
-- Name: COLUMN datadic_variable_history.owner_email; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.owner_email IS 'Owner, especially for derived variables';
--
-- Name: COLUMN datadic_variable_history.classification; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.classification IS 'Category of sensitivity from a privacy perspective';
--
-- Name: COLUMN datadic_variable_history.other_classification; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.other_classification IS 'Additional information regarding classification';
--
-- Name: COLUMN datadic_variable_history.multi_timepoints; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.multi_timepoints IS 'Timepoints this data is collected (in longitudinal studies)';
--
-- Name: COLUMN datadic_variable_history.equivalent_to_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.equivalent_to_id IS 'Primary variable id this is equivalent to';
--
-- Name: COLUMN datadic_variable_history.storage_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.storage_type IS 'Type of storage for dataset';
--
-- Name: COLUMN datadic_variable_history.db_or_fs; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.db_or_fs IS 'Database or Filesystem name';
--
-- Name: COLUMN datadic_variable_history.schema_or_path; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.schema_or_path IS 'Database schema or Filesystem directory path';
--
-- Name: COLUMN datadic_variable_history.table_or_file; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.table_or_file IS 'Database table (or view, if derived or equivalent to another variable), or filename in directory';
--
-- Name: COLUMN datadic_variable_history.redcap_data_dictionary_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.redcap_data_dictionary_id IS 'Reference to REDCap data dictionary representation';
--
-- Name: COLUMN datadic_variable_history."position"; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history."position" IS 'Relative position (for source forms or other variables where order of collection matters)';
--
-- Name: COLUMN datadic_variable_history.section_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.section_id IS 'Section this belongs to';
--
-- Name: COLUMN datadic_variable_history.sub_section_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.sub_section_id IS 'Sub-section this belongs to';
--
-- Name: COLUMN datadic_variable_history.title; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.title IS 'Section caption';
--
-- Name: COLUMN datadic_variable_history.storage_varname; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variable_history.storage_varname IS 'Database field name, or variable name in data file';
--
-- Name: datadic_variable_history_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.datadic_variable_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: datadic_variable_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.datadic_variable_history_id_seq OWNED BY ref_data.datadic_variable_history.id;
--
-- Name: datadic_variables; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.datadic_variables (
id bigint NOT NULL,
study character varying,
source_name character varying,
source_type character varying,
domain character varying,
form_name character varying,
variable_name character varying,
variable_type character varying,
presentation_type character varying,
label character varying,
label_note character varying,
annotation character varying,
is_required boolean,
valid_type character varying,
valid_min character varying,
valid_max character varying,
multi_valid_choices character varying[],
is_identifier boolean,
is_derived_var boolean,
multi_derived_from_id bigint[],
doc_url character varying,
target_type character varying,
owner_email character varying,
classification character varying,
other_classification character varying,
multi_timepoints character varying[],
equivalent_to_id bigint,
storage_type character varying,
db_or_fs character varying,
schema_or_path character varying,
table_or_file character varying,
disabled boolean,
admin_id bigint,
redcap_data_dictionary_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
"position" integer,
section_id integer,
sub_section_id integer,
title character varying,
storage_varname character varying
);
--
-- Name: COLUMN datadic_variables.study; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.study IS 'Study name';
--
-- Name: COLUMN datadic_variables.source_name; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.source_name IS 'Source of variable';
--
-- Name: COLUMN datadic_variables.source_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.source_type IS 'Source type';
--
-- Name: COLUMN datadic_variables.domain; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.domain IS 'Domain';
--
-- Name: COLUMN datadic_variables.form_name; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.form_name IS 'Form name (if the source was a type of form)';
--
-- Name: COLUMN datadic_variables.variable_name; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.variable_name IS 'Variable name';
--
-- Name: COLUMN datadic_variables.variable_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.variable_type IS 'Variable type';
--
-- Name: COLUMN datadic_variables.presentation_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.presentation_type IS 'Data type for presentation purposes';
--
-- Name: COLUMN datadic_variables.label; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.label IS 'Primary label or title (if source was a form, the label presented for the field)';
--
-- Name: COLUMN datadic_variables.label_note; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.label_note IS 'Description (if source was a form, a note presented for the field)';
--
-- Name: COLUMN datadic_variables.annotation; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.annotation IS 'Annotations (if source was a form, annotations not presented to the user)';
--
-- Name: COLUMN datadic_variables.is_required; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.is_required IS 'Was required in source';
--
-- Name: COLUMN datadic_variables.valid_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.valid_type IS 'Source data type';
--
-- Name: COLUMN datadic_variables.valid_min; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.valid_min IS 'Minimum value';
--
-- Name: COLUMN datadic_variables.valid_max; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.valid_max IS 'Maximum value';
--
-- Name: COLUMN datadic_variables.multi_valid_choices; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.multi_valid_choices IS 'List of valid choices for categorical variables';
--
-- Name: COLUMN datadic_variables.is_identifier; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.is_identifier IS 'Represents identifiable information';
--
-- Name: COLUMN datadic_variables.is_derived_var; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.is_derived_var IS 'Is a derived variable';
--
-- Name: COLUMN datadic_variables.multi_derived_from_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.multi_derived_from_id IS 'If a derived variable, ids of variables used to calculate it';
--
-- Name: COLUMN datadic_variables.doc_url; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.doc_url IS 'URL to additional documentation';
--
-- Name: COLUMN datadic_variables.target_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.target_type IS 'Type of participant this variable relates to';
--
-- Name: COLUMN datadic_variables.owner_email; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.owner_email IS 'Owner, especially for derived variables';
--
-- Name: COLUMN datadic_variables.classification; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.classification IS 'Category of sensitivity from a privacy perspective';
--
-- Name: COLUMN datadic_variables.other_classification; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.other_classification IS 'Additional information regarding classification';
--
-- Name: COLUMN datadic_variables.multi_timepoints; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.multi_timepoints IS 'Timepoints this data is collected (in longitudinal studies)';
--
-- Name: COLUMN datadic_variables.equivalent_to_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.equivalent_to_id IS 'Primary variable id this is equivalent to';
--
-- Name: COLUMN datadic_variables.storage_type; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.storage_type IS 'Type of storage for dataset';
--
-- Name: COLUMN datadic_variables.db_or_fs; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.db_or_fs IS 'Database or Filesystem name';
--
-- Name: COLUMN datadic_variables.schema_or_path; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.schema_or_path IS 'Database schema or Filesystem directory path';
--
-- Name: COLUMN datadic_variables.table_or_file; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.table_or_file IS 'Database table (or view, if derived or equivalent to another variable), or filename in directory';
--
-- Name: COLUMN datadic_variables.redcap_data_dictionary_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.redcap_data_dictionary_id IS 'Reference to REDCap data dictionary representation';
--
-- Name: COLUMN datadic_variables."position"; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables."position" IS 'Relative position (for source forms or other variables where order of collection matters)';
--
-- Name: COLUMN datadic_variables.section_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.section_id IS 'Section this belongs to';
--
-- Name: COLUMN datadic_variables.sub_section_id; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.sub_section_id IS 'Sub-section this belongs to';
--
-- Name: COLUMN datadic_variables.title; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.title IS 'Section caption';
--
-- Name: COLUMN datadic_variables.storage_varname; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON COLUMN ref_data.datadic_variables.storage_varname IS 'Database field name, or variable name in data file';
--
-- Name: datadic_variables_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.datadic_variables_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: datadic_variables_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.datadic_variables_id_seq OWNED BY ref_data.datadic_variables.id;
--
-- Name: redcap_client_requests; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_client_requests (
id bigint NOT NULL,
redcap_project_admin_id bigint,
action character varying,
name character varying,
server_url character varying,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
result jsonb
);
--
-- Name: TABLE redcap_client_requests; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON TABLE ref_data.redcap_client_requests IS 'Redcap client requests';
--
-- Name: redcap_client_requests_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_client_requests_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_client_requests_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_client_requests_id_seq OWNED BY ref_data.redcap_client_requests.id;
--
-- Name: redcap_data_collection_instrument_history; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_data_collection_instrument_history (
id bigint NOT NULL,
redcap_data_collection_instrument_id bigint,
redcap_project_admin_id bigint,
name character varying,
label character varying,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: redcap_data_collection_instrument_history_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_data_collection_instrument_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_data_collection_instrument_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_data_collection_instrument_history_id_seq OWNED BY ref_data.redcap_data_collection_instrument_history.id;
--
-- Name: redcap_data_collection_instruments; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_data_collection_instruments (
id bigint NOT NULL,
name character varying,
label character varying,
disabled boolean,
redcap_project_admin_id bigint,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: redcap_data_collection_instruments_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_data_collection_instruments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_data_collection_instruments_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_data_collection_instruments_id_seq OWNED BY ref_data.redcap_data_collection_instruments.id;
--
-- Name: redcap_data_dictionaries; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_data_dictionaries (
id bigint NOT NULL,
redcap_project_admin_id bigint,
field_count integer,
captured_metadata jsonb,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: TABLE redcap_data_dictionaries; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON TABLE ref_data.redcap_data_dictionaries IS 'Retrieved Redcap Data Dictionaries (metadata)';
--
-- Name: redcap_data_dictionaries_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_data_dictionaries_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_data_dictionaries_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_data_dictionaries_id_seq OWNED BY ref_data.redcap_data_dictionaries.id;
--
-- Name: redcap_data_dictionary_history; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_data_dictionary_history (
id bigint NOT NULL,
redcap_data_dictionary_id bigint,
redcap_project_admin_id bigint,
field_count integer,
captured_metadata jsonb,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: TABLE redcap_data_dictionary_history; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON TABLE ref_data.redcap_data_dictionary_history IS 'Retrieved Redcap Data Dictionaries (metadata) - history';
--
-- Name: redcap_data_dictionary_history_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_data_dictionary_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_data_dictionary_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_data_dictionary_history_id_seq OWNED BY ref_data.redcap_data_dictionary_history.id;
--
-- Name: redcap_project_admin_history; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_project_admin_history (
id bigint NOT NULL,
redcap_project_admin_id bigint,
name character varying,
api_key character varying,
server_url character varying,
captured_project_info jsonb,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
transfer_mode character varying,
frequency character varying,
status character varying,
post_transfer_pipeline character varying[] DEFAULT '{}'::character varying[],
notes character varying,
study character varying,
dynamic_model_table character varying
);
--
-- Name: TABLE redcap_project_admin_history; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON TABLE ref_data.redcap_project_admin_history IS 'Redcap project administration - history';
--
-- Name: redcap_project_admin_history_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_project_admin_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_project_admin_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_project_admin_history_id_seq OWNED BY ref_data.redcap_project_admin_history.id;
--
-- Name: redcap_project_admins; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_project_admins (
id bigint NOT NULL,
name character varying,
api_key character varying,
server_url character varying,
captured_project_info jsonb,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
transfer_mode character varying,
frequency character varying,
status character varying,
post_transfer_pipeline character varying[] DEFAULT '{}'::character varying[],
notes character varying,
study character varying,
dynamic_model_table character varying,
options character varying
);
--
-- Name: TABLE redcap_project_admins; Type: COMMENT; Schema: ref_data; Owner: -
--
COMMENT ON TABLE ref_data.redcap_project_admins IS 'Redcap project administration';
--
-- Name: redcap_project_admins_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_project_admins_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_project_admins_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_project_admins_id_seq OWNED BY ref_data.redcap_project_admins.id;
--
-- Name: redcap_project_user_history; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_project_user_history (
id bigint NOT NULL,
redcap_project_user_id bigint,
redcap_project_admin_id bigint,
username character varying,
email character varying,
expiration character varying,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: redcap_project_user_history_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_project_user_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_project_user_history_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_project_user_history_id_seq OWNED BY ref_data.redcap_project_user_history.id;
--
-- Name: redcap_project_users; Type: TABLE; Schema: ref_data; Owner: -
--
CREATE TABLE ref_data.redcap_project_users (
id bigint NOT NULL,
redcap_project_admin_id bigint,
username character varying,
email character varying,
expiration character varying,
disabled boolean,
admin_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: redcap_project_users_id_seq; Type: SEQUENCE; Schema: ref_data; Owner: -
--
CREATE SEQUENCE ref_data.redcap_project_users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: redcap_project_users_id_seq; Type: SEQUENCE OWNED BY; Schema: ref_data; Owner: -
--
ALTER SEQUENCE ref_data.redcap_project_users_id_seq OWNED BY ref_data.redcap_project_users.id;
--
-- Name: accuracy_score_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.accuracy_score_history ALTER COLUMN id SET DEFAULT nextval('ml_app.accuracy_score_history_id_seq'::regclass);
--
-- Name: accuracy_scores id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.accuracy_scores ALTER COLUMN id SET DEFAULT nextval('ml_app.accuracy_scores_id_seq'::regclass);
--
-- Name: activity_log_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_history ALTER COLUMN id SET DEFAULT nextval('ml_app.activity_log_history_id_seq'::regclass);
--
-- Name: activity_log_player_contact_phone_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phone_history ALTER COLUMN id SET DEFAULT nextval('ml_app.activity_log_player_contact_phone_history_id_seq'::regclass);
--
-- Name: activity_log_player_contact_phones id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phones ALTER COLUMN id SET DEFAULT nextval('ml_app.activity_log_player_contact_phones_id_seq'::regclass);
--
-- Name: activity_logs id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_logs ALTER COLUMN id SET DEFAULT nextval('ml_app.activity_logs_id_seq'::regclass);
--
-- Name: address_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.address_history ALTER COLUMN id SET DEFAULT nextval('ml_app.address_history_id_seq'::regclass);
--
-- Name: addresses id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.addresses ALTER COLUMN id SET DEFAULT nextval('ml_app.addresses_id_seq'::regclass);
--
-- Name: admin_action_logs id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_action_logs ALTER COLUMN id SET DEFAULT nextval('ml_app.admin_action_logs_id_seq'::regclass);
--
-- Name: admin_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_history ALTER COLUMN id SET DEFAULT nextval('ml_app.admin_history_id_seq'::regclass);
--
-- Name: admins id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admins ALTER COLUMN id SET DEFAULT nextval('ml_app.admins_id_seq'::regclass);
--
-- Name: app_configuration_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configuration_history ALTER COLUMN id SET DEFAULT nextval('ml_app.app_configuration_history_id_seq'::regclass);
--
-- Name: app_configurations id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configurations ALTER COLUMN id SET DEFAULT nextval('ml_app.app_configurations_id_seq'::regclass);
--
-- Name: app_type_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_type_history ALTER COLUMN id SET DEFAULT nextval('ml_app.app_type_history_id_seq'::regclass);
--
-- Name: app_types id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_types ALTER COLUMN id SET DEFAULT nextval('ml_app.app_types_id_seq'::regclass);
--
-- Name: college_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.college_history ALTER COLUMN id SET DEFAULT nextval('ml_app.college_history_id_seq'::regclass);
--
-- Name: colleges id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.colleges ALTER COLUMN id SET DEFAULT nextval('ml_app.colleges_id_seq'::regclass);
--
-- Name: config_libraries id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_libraries ALTER COLUMN id SET DEFAULT nextval('ml_app.config_libraries_id_seq'::regclass);
--
-- Name: config_library_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_library_history ALTER COLUMN id SET DEFAULT nextval('ml_app.config_library_history_id_seq'::regclass);
--
-- Name: delayed_jobs id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.delayed_jobs ALTER COLUMN id SET DEFAULT nextval('ml_app.delayed_jobs_id_seq'::regclass);
--
-- Name: dynamic_model_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.dynamic_model_history ALTER COLUMN id SET DEFAULT nextval('ml_app.dynamic_model_history_id_seq'::regclass);
--
-- Name: dynamic_models id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.dynamic_models ALTER COLUMN id SET DEFAULT nextval('ml_app.dynamic_models_id_seq'::regclass);
--
-- Name: exception_logs id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.exception_logs ALTER COLUMN id SET DEFAULT nextval('ml_app.exception_logs_id_seq'::regclass);
--
-- Name: external_identifier_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifier_history ALTER COLUMN id SET DEFAULT nextval('ml_app.external_identifier_history_id_seq'::regclass);
--
-- Name: external_identifiers id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifiers ALTER COLUMN id SET DEFAULT nextval('ml_app.external_identifiers_id_seq'::regclass);
--
-- Name: external_link_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_link_history ALTER COLUMN id SET DEFAULT nextval('ml_app.external_link_history_id_seq'::regclass);
--
-- Name: external_links id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_links ALTER COLUMN id SET DEFAULT nextval('ml_app.external_links_id_seq'::regclass);
--
-- Name: general_selection_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.general_selection_history ALTER COLUMN id SET DEFAULT nextval('ml_app.general_selection_history_id_seq'::regclass);
--
-- Name: general_selections id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.general_selections ALTER COLUMN id SET DEFAULT nextval('ml_app.general_selections_id_seq'::regclass);
--
-- Name: imports id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.imports ALTER COLUMN id SET DEFAULT nextval('ml_app.imports_id_seq'::regclass);
--
-- Name: imports_model_generators id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.imports_model_generators ALTER COLUMN id SET DEFAULT nextval('ml_app.imports_model_generators_id_seq'::regclass);
--
-- Name: item_flag_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_history ALTER COLUMN id SET DEFAULT nextval('ml_app.item_flag_history_id_seq'::regclass);
--
-- Name: item_flag_name_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_name_history ALTER COLUMN id SET DEFAULT nextval('ml_app.item_flag_name_history_id_seq'::regclass);
--
-- Name: item_flag_names id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_names ALTER COLUMN id SET DEFAULT nextval('ml_app.item_flag_names_id_seq'::regclass);
--
-- Name: item_flags id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flags ALTER COLUMN id SET DEFAULT nextval('ml_app.item_flags_id_seq'::regclass);
--
-- Name: manage_users id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.manage_users ALTER COLUMN id SET DEFAULT nextval('ml_app.manage_users_id_seq'::regclass);
--
-- Name: masters id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.masters ALTER COLUMN id SET DEFAULT nextval('ml_app.masters_id_seq'::regclass);
--
-- Name: message_notifications id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_notifications ALTER COLUMN id SET DEFAULT nextval('ml_app.message_notifications_id_seq'::regclass);
--
-- Name: message_template_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_template_history ALTER COLUMN id SET DEFAULT nextval('ml_app.message_template_history_id_seq'::regclass);
--
-- Name: message_templates id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_templates ALTER COLUMN id SET DEFAULT nextval('ml_app.message_templates_id_seq'::regclass);
--
-- Name: model_references id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.model_references ALTER COLUMN id SET DEFAULT nextval('ml_app.model_references_id_seq'::regclass);
--
-- Name: nfs_store_archived_file_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_file_history ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_archived_file_history_id_seq'::regclass);
--
-- Name: nfs_store_archived_files id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_files ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_archived_files_id_seq'::regclass);
--
-- Name: nfs_store_container_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_container_history ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_container_history_id_seq'::regclass);
--
-- Name: nfs_store_containers id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_containers ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_containers_id_seq'::regclass);
--
-- Name: nfs_store_downloads id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_downloads ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_downloads_id_seq'::regclass);
--
-- Name: nfs_store_filter_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filter_history ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_filter_history_id_seq'::regclass);
--
-- Name: nfs_store_filters id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filters ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_filters_id_seq'::regclass);
--
-- Name: nfs_store_imports id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_imports ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_imports_id_seq'::regclass);
--
-- Name: nfs_store_move_actions id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_move_actions ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_move_actions_id_seq'::regclass);
--
-- Name: nfs_store_stored_file_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_file_history ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_stored_file_history_id_seq'::regclass);
--
-- Name: nfs_store_stored_files id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_files ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_stored_files_id_seq'::regclass);
--
-- Name: nfs_store_trash_actions id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_trash_actions ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_trash_actions_id_seq'::regclass);
--
-- Name: nfs_store_uploads id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_uploads ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_uploads_id_seq'::regclass);
--
-- Name: nfs_store_user_file_actions id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_user_file_actions ALTER COLUMN id SET DEFAULT nextval('ml_app.nfs_store_user_file_actions_id_seq'::regclass);
--
-- Name: page_layout_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layout_history ALTER COLUMN id SET DEFAULT nextval('ml_app.page_layout_history_id_seq'::regclass);
--
-- Name: page_layouts id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layouts ALTER COLUMN id SET DEFAULT nextval('ml_app.page_layouts_id_seq'::regclass);
--
-- Name: player_contact_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contact_history ALTER COLUMN id SET DEFAULT nextval('ml_app.player_contact_history_id_seq'::regclass);
--
-- Name: player_contacts id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contacts ALTER COLUMN id SET DEFAULT nextval('ml_app.player_contacts_id_seq'::regclass);
--
-- Name: player_info_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_info_history ALTER COLUMN id SET DEFAULT nextval('ml_app.player_info_history_id_seq'::regclass);
--
-- Name: player_infos id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_infos ALTER COLUMN id SET DEFAULT nextval('ml_app.player_infos_id_seq'::regclass);
--
-- Name: pro_infos id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.pro_infos ALTER COLUMN id SET DEFAULT nextval('ml_app.pro_infos_id_seq'::regclass);
--
-- Name: protocol_event_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_event_history ALTER COLUMN id SET DEFAULT nextval('ml_app.protocol_event_history_id_seq'::regclass);
--
-- Name: protocol_events id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_events ALTER COLUMN id SET DEFAULT nextval('ml_app.protocol_events_id_seq'::regclass);
--
-- Name: protocol_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_history ALTER COLUMN id SET DEFAULT nextval('ml_app.protocol_history_id_seq'::regclass);
--
-- Name: protocols id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocols ALTER COLUMN id SET DEFAULT nextval('ml_app.protocols_id_seq'::regclass);
--
-- Name: rc_cis id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.rc_cis ALTER COLUMN id SET DEFAULT nextval('ml_app.rc_cis_id_seq'::regclass);
--
-- Name: rc_stage_cif_copy id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.rc_stage_cif_copy ALTER COLUMN id SET DEFAULT nextval('ml_app.rc_stage_cif_copy_id_seq'::regclass);
--
-- Name: report_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.report_history ALTER COLUMN id SET DEFAULT nextval('ml_app.report_history_id_seq'::regclass);
--
-- Name: reports id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.reports ALTER COLUMN id SET DEFAULT nextval('ml_app.reports_id_seq'::regclass);
--
-- Name: role_description_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_description_history ALTER COLUMN id SET DEFAULT nextval('ml_app.role_description_history_id_seq'::regclass);
--
-- Name: role_descriptions id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_descriptions ALTER COLUMN id SET DEFAULT nextval('ml_app.role_descriptions_id_seq'::regclass);
--
-- Name: sage_assignments id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sage_assignments ALTER COLUMN id SET DEFAULT nextval('ml_app.sage_assignments_id_seq'::regclass);
--
-- Name: scantron_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantron_history ALTER COLUMN id SET DEFAULT nextval('ml_app.scantron_history_id_seq'::regclass);
--
-- Name: scantrons id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantrons ALTER COLUMN id SET DEFAULT nextval('ml_app.scantrons_id_seq'::regclass);
--
-- Name: sessions id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sessions ALTER COLUMN id SET DEFAULT nextval('ml_app.sessions_id_seq'::regclass);
--
-- Name: sub_process_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_process_history ALTER COLUMN id SET DEFAULT nextval('ml_app.sub_process_history_id_seq'::regclass);
--
-- Name: sub_processes id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_processes ALTER COLUMN id SET DEFAULT nextval('ml_app.sub_processes_id_seq'::regclass);
--
-- Name: tracker_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history ALTER COLUMN id SET DEFAULT nextval('ml_app.tracker_history_id_seq'::regclass);
--
-- Name: trackers id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers ALTER COLUMN id SET DEFAULT nextval('ml_app.trackers_id_seq'::regclass);
--
-- Name: user_access_control_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_control_history ALTER COLUMN id SET DEFAULT nextval('ml_app.user_access_control_history_id_seq'::regclass);
--
-- Name: user_access_controls id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_controls ALTER COLUMN id SET DEFAULT nextval('ml_app.user_access_controls_id_seq'::regclass);
--
-- Name: user_action_logs id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_action_logs ALTER COLUMN id SET DEFAULT nextval('ml_app.user_action_logs_id_seq'::regclass);
--
-- Name: user_authorization_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_authorization_history ALTER COLUMN id SET DEFAULT nextval('ml_app.user_authorization_history_id_seq'::regclass);
--
-- Name: user_authorizations id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_authorizations ALTER COLUMN id SET DEFAULT nextval('ml_app.user_authorizations_id_seq'::regclass);
--
-- Name: user_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_history ALTER COLUMN id SET DEFAULT nextval('ml_app.user_history_id_seq'::regclass);
--
-- Name: user_role_history id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_role_history ALTER COLUMN id SET DEFAULT nextval('ml_app.user_role_history_id_seq'::regclass);
--
-- Name: user_roles id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_roles ALTER COLUMN id SET DEFAULT nextval('ml_app.user_roles_id_seq'::regclass);
--
-- Name: users id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users ALTER COLUMN id SET DEFAULT nextval('ml_app.users_id_seq'::regclass);
--
-- Name: users_contact_infos id; Type: DEFAULT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users_contact_infos ALTER COLUMN id SET DEFAULT nextval('ml_app.users_contact_infos_id_seq'::regclass);
--
-- Name: datadic_choice_history id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choice_history ALTER COLUMN id SET DEFAULT nextval('ref_data.datadic_choice_history_id_seq'::regclass);
--
-- Name: datadic_choices id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choices ALTER COLUMN id SET DEFAULT nextval('ref_data.datadic_choices_id_seq'::regclass);
--
-- Name: datadic_variable_history id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variable_history ALTER COLUMN id SET DEFAULT nextval('ref_data.datadic_variable_history_id_seq'::regclass);
--
-- Name: datadic_variables id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variables ALTER COLUMN id SET DEFAULT nextval('ref_data.datadic_variables_id_seq'::regclass);
--
-- Name: redcap_client_requests id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_client_requests ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_client_requests_id_seq'::regclass);
--
-- Name: redcap_data_collection_instrument_history id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instrument_history ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_data_collection_instrument_history_id_seq'::regclass);
--
-- Name: redcap_data_collection_instruments id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instruments ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_data_collection_instruments_id_seq'::regclass);
--
-- Name: redcap_data_dictionaries id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionaries ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_data_dictionaries_id_seq'::regclass);
--
-- Name: redcap_data_dictionary_history id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionary_history ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_data_dictionary_history_id_seq'::regclass);
--
-- Name: redcap_project_admin_history id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_admin_history ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_project_admin_history_id_seq'::regclass);
--
-- Name: redcap_project_admins id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_admins ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_project_admins_id_seq'::regclass);
--
-- Name: redcap_project_user_history id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_user_history ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_project_user_history_id_seq'::regclass);
--
-- Name: redcap_project_users id; Type: DEFAULT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_users ALTER COLUMN id SET DEFAULT nextval('ref_data.redcap_project_users_id_seq'::regclass);
--
-- Name: accuracy_score_history accuracy_score_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.accuracy_score_history
ADD CONSTRAINT accuracy_score_history_pkey PRIMARY KEY (id);
--
-- Name: accuracy_scores accuracy_scores_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.accuracy_scores
ADD CONSTRAINT accuracy_scores_pkey PRIMARY KEY (id);
--
-- Name: activity_log_history activity_log_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_history
ADD CONSTRAINT activity_log_history_pkey PRIMARY KEY (id);
--
-- Name: activity_log_player_contact_phone_history activity_log_player_contact_phone_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phone_history
ADD CONSTRAINT activity_log_player_contact_phone_history_pkey PRIMARY KEY (id);
--
-- Name: activity_log_player_contact_phones activity_log_player_contact_phones_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phones
ADD CONSTRAINT activity_log_player_contact_phones_pkey PRIMARY KEY (id);
--
-- Name: activity_logs activity_logs_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_logs
ADD CONSTRAINT activity_logs_pkey PRIMARY KEY (id);
--
-- Name: address_history address_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.address_history
ADD CONSTRAINT address_history_pkey PRIMARY KEY (id);
--
-- Name: addresses addresses_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.addresses
ADD CONSTRAINT addresses_pkey PRIMARY KEY (id);
--
-- Name: admin_action_logs admin_action_logs_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_action_logs
ADD CONSTRAINT admin_action_logs_pkey PRIMARY KEY (id);
--
-- Name: admin_history admin_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_history
ADD CONSTRAINT admin_history_pkey PRIMARY KEY (id);
--
-- Name: admins admins_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admins
ADD CONSTRAINT admins_pkey PRIMARY KEY (id);
--
-- Name: app_configuration_history app_configuration_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configuration_history
ADD CONSTRAINT app_configuration_history_pkey PRIMARY KEY (id);
--
-- Name: app_configurations app_configurations_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configurations
ADD CONSTRAINT app_configurations_pkey PRIMARY KEY (id);
--
-- Name: app_type_history app_type_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_type_history
ADD CONSTRAINT app_type_history_pkey PRIMARY KEY (id);
--
-- Name: app_types app_types_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_types
ADD CONSTRAINT app_types_pkey PRIMARY KEY (id);
--
-- Name: ar_internal_metadata ar_internal_metadata_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.ar_internal_metadata
ADD CONSTRAINT ar_internal_metadata_pkey PRIMARY KEY (key);
--
-- Name: college_history college_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.college_history
ADD CONSTRAINT college_history_pkey PRIMARY KEY (id);
--
-- Name: colleges colleges_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.colleges
ADD CONSTRAINT colleges_pkey PRIMARY KEY (id);
--
-- Name: config_libraries config_libraries_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_libraries
ADD CONSTRAINT config_libraries_pkey PRIMARY KEY (id);
--
-- Name: config_library_history config_library_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_library_history
ADD CONSTRAINT config_library_history_pkey PRIMARY KEY (id);
--
-- Name: delayed_jobs delayed_jobs_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.delayed_jobs
ADD CONSTRAINT delayed_jobs_pkey PRIMARY KEY (id);
--
-- Name: dynamic_model_history dynamic_model_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.dynamic_model_history
ADD CONSTRAINT dynamic_model_history_pkey PRIMARY KEY (id);
--
-- Name: dynamic_models dynamic_models_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.dynamic_models
ADD CONSTRAINT dynamic_models_pkey PRIMARY KEY (id);
--
-- Name: exception_logs exception_logs_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.exception_logs
ADD CONSTRAINT exception_logs_pkey PRIMARY KEY (id);
--
-- Name: external_identifier_history external_identifier_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifier_history
ADD CONSTRAINT external_identifier_history_pkey PRIMARY KEY (id);
--
-- Name: external_identifiers external_identifiers_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifiers
ADD CONSTRAINT external_identifiers_pkey PRIMARY KEY (id);
--
-- Name: external_link_history external_link_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_link_history
ADD CONSTRAINT external_link_history_pkey PRIMARY KEY (id);
--
-- Name: external_links external_links_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_links
ADD CONSTRAINT external_links_pkey PRIMARY KEY (id);
--
-- Name: general_selection_history general_selection_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.general_selection_history
ADD CONSTRAINT general_selection_history_pkey PRIMARY KEY (id);
--
-- Name: general_selections general_selections_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.general_selections
ADD CONSTRAINT general_selections_pkey PRIMARY KEY (id);
--
-- Name: imports_model_generators imports_model_generators_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.imports_model_generators
ADD CONSTRAINT imports_model_generators_pkey PRIMARY KEY (id);
--
-- Name: imports imports_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.imports
ADD CONSTRAINT imports_pkey PRIMARY KEY (id);
--
-- Name: item_flag_history item_flag_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_history
ADD CONSTRAINT item_flag_history_pkey PRIMARY KEY (id);
--
-- Name: item_flag_name_history item_flag_name_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_name_history
ADD CONSTRAINT item_flag_name_history_pkey PRIMARY KEY (id);
--
-- Name: item_flag_names item_flag_names_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_names
ADD CONSTRAINT item_flag_names_pkey PRIMARY KEY (id);
--
-- Name: item_flags item_flags_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flags
ADD CONSTRAINT item_flags_pkey PRIMARY KEY (id);
--
-- Name: manage_users manage_users_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.manage_users
ADD CONSTRAINT manage_users_pkey PRIMARY KEY (id);
--
-- Name: masters masters_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.masters
ADD CONSTRAINT masters_pkey PRIMARY KEY (id);
--
-- Name: message_notifications message_notifications_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_notifications
ADD CONSTRAINT message_notifications_pkey PRIMARY KEY (id);
--
-- Name: message_template_history message_template_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_template_history
ADD CONSTRAINT message_template_history_pkey PRIMARY KEY (id);
--
-- Name: message_templates message_templates_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_templates
ADD CONSTRAINT message_templates_pkey PRIMARY KEY (id);
--
-- Name: model_references model_references_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.model_references
ADD CONSTRAINT model_references_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_archived_file_history nfs_store_archived_file_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_file_history
ADD CONSTRAINT nfs_store_archived_file_history_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_archived_files nfs_store_archived_files_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_files
ADD CONSTRAINT nfs_store_archived_files_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_container_history nfs_store_container_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_container_history
ADD CONSTRAINT nfs_store_container_history_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_containers nfs_store_containers_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_containers
ADD CONSTRAINT nfs_store_containers_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_downloads nfs_store_downloads_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_downloads
ADD CONSTRAINT nfs_store_downloads_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_filter_history nfs_store_filter_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filter_history
ADD CONSTRAINT nfs_store_filter_history_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_filters nfs_store_filters_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filters
ADD CONSTRAINT nfs_store_filters_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_imports nfs_store_imports_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_imports
ADD CONSTRAINT nfs_store_imports_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_move_actions nfs_store_move_actions_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_move_actions
ADD CONSTRAINT nfs_store_move_actions_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_stored_file_history nfs_store_stored_file_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_file_history
ADD CONSTRAINT nfs_store_stored_file_history_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_stored_files nfs_store_stored_files_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_files
ADD CONSTRAINT nfs_store_stored_files_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_trash_actions nfs_store_trash_actions_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_trash_actions
ADD CONSTRAINT nfs_store_trash_actions_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_uploads nfs_store_uploads_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_uploads
ADD CONSTRAINT nfs_store_uploads_pkey PRIMARY KEY (id);
--
-- Name: nfs_store_user_file_actions nfs_store_user_file_actions_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_user_file_actions
ADD CONSTRAINT nfs_store_user_file_actions_pkey PRIMARY KEY (id);
--
-- Name: page_layout_history page_layout_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layout_history
ADD CONSTRAINT page_layout_history_pkey PRIMARY KEY (id);
--
-- Name: page_layouts page_layouts_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layouts
ADD CONSTRAINT page_layouts_pkey PRIMARY KEY (id);
--
-- Name: player_contact_history player_contact_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contact_history
ADD CONSTRAINT player_contact_history_pkey PRIMARY KEY (id);
--
-- Name: player_contacts player_contacts_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contacts
ADD CONSTRAINT player_contacts_pkey PRIMARY KEY (id);
--
-- Name: player_info_history player_info_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_info_history
ADD CONSTRAINT player_info_history_pkey PRIMARY KEY (id);
--
-- Name: player_infos player_infos_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_infos
ADD CONSTRAINT player_infos_pkey PRIMARY KEY (id);
--
-- Name: pro_infos pro_infos_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.pro_infos
ADD CONSTRAINT pro_infos_pkey PRIMARY KEY (id);
--
-- Name: protocol_event_history protocol_event_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_event_history
ADD CONSTRAINT protocol_event_history_pkey PRIMARY KEY (id);
--
-- Name: protocol_events protocol_events_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_events
ADD CONSTRAINT protocol_events_pkey PRIMARY KEY (id);
--
-- Name: protocol_history protocol_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_history
ADD CONSTRAINT protocol_history_pkey PRIMARY KEY (id);
--
-- Name: protocols protocols_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocols
ADD CONSTRAINT protocols_pkey PRIMARY KEY (id);
--
-- Name: rc_cis rc_cis_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.rc_cis
ADD CONSTRAINT rc_cis_pkey PRIMARY KEY (id);
--
-- Name: rc_stage_cif_copy rc_stage_cif_copy_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.rc_stage_cif_copy
ADD CONSTRAINT rc_stage_cif_copy_pkey PRIMARY KEY (id);
--
-- Name: report_history report_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.report_history
ADD CONSTRAINT report_history_pkey PRIMARY KEY (id);
--
-- Name: reports reports_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.reports
ADD CONSTRAINT reports_pkey PRIMARY KEY (id);
--
-- Name: role_description_history role_description_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_description_history
ADD CONSTRAINT role_description_history_pkey PRIMARY KEY (id);
--
-- Name: role_descriptions role_descriptions_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_descriptions
ADD CONSTRAINT role_descriptions_pkey PRIMARY KEY (id);
--
-- Name: sage_assignments sage_assignments_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sage_assignments
ADD CONSTRAINT sage_assignments_pkey PRIMARY KEY (id);
--
-- Name: scantron_history scantron_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantron_history
ADD CONSTRAINT scantron_history_pkey PRIMARY KEY (id);
--
-- Name: scantrons scantrons_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantrons
ADD CONSTRAINT scantrons_pkey PRIMARY KEY (id);
--
-- Name: sessions sessions_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sessions
ADD CONSTRAINT sessions_pkey PRIMARY KEY (id);
--
-- Name: sub_process_history sub_process_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_process_history
ADD CONSTRAINT sub_process_history_pkey PRIMARY KEY (id);
--
-- Name: sub_processes sub_processes_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_processes
ADD CONSTRAINT sub_processes_pkey PRIMARY KEY (id);
--
-- Name: tracker_history tracker_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT tracker_history_pkey PRIMARY KEY (id);
--
-- Name: trackers trackers_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT trackers_pkey PRIMARY KEY (id);
--
-- Name: trackers unique_master_protocol; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT unique_master_protocol UNIQUE (master_id, protocol_id);
--
-- Name: trackers unique_master_protocol_id; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT unique_master_protocol_id UNIQUE (master_id, protocol_id, id);
--
-- Name: sub_processes unique_protocol_and_id; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_processes
ADD CONSTRAINT unique_protocol_and_id UNIQUE (protocol_id, id);
--
-- Name: protocol_events unique_sub_process_and_id; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_events
ADD CONSTRAINT unique_sub_process_and_id UNIQUE (sub_process_id, id);
--
-- Name: user_access_control_history user_access_control_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_control_history
ADD CONSTRAINT user_access_control_history_pkey PRIMARY KEY (id);
--
-- Name: user_access_controls user_access_controls_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_controls
ADD CONSTRAINT user_access_controls_pkey PRIMARY KEY (id);
--
-- Name: user_action_logs user_action_logs_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_action_logs
ADD CONSTRAINT user_action_logs_pkey PRIMARY KEY (id);
--
-- Name: user_authorization_history user_authorization_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_authorization_history
ADD CONSTRAINT user_authorization_history_pkey PRIMARY KEY (id);
--
-- Name: user_authorizations user_authorizations_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_authorizations
ADD CONSTRAINT user_authorizations_pkey PRIMARY KEY (id);
--
-- Name: user_history user_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_history
ADD CONSTRAINT user_history_pkey PRIMARY KEY (id);
--
-- Name: user_role_history user_role_history_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_role_history
ADD CONSTRAINT user_role_history_pkey PRIMARY KEY (id);
--
-- Name: user_roles user_roles_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_roles
ADD CONSTRAINT user_roles_pkey PRIMARY KEY (id);
--
-- Name: users_contact_infos users_contact_infos_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users_contact_infos
ADD CONSTRAINT users_contact_infos_pkey PRIMARY KEY (id);
--
-- Name: users users_pkey; Type: CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
--
-- Name: datadic_choice_history datadic_choice_history_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choice_history
ADD CONSTRAINT datadic_choice_history_pkey PRIMARY KEY (id);
--
-- Name: datadic_choices datadic_choices_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choices
ADD CONSTRAINT datadic_choices_pkey PRIMARY KEY (id);
--
-- Name: datadic_variable_history datadic_variable_history_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variable_history
ADD CONSTRAINT datadic_variable_history_pkey PRIMARY KEY (id);
--
-- Name: datadic_variables datadic_variables_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variables
ADD CONSTRAINT datadic_variables_pkey PRIMARY KEY (id);
--
-- Name: redcap_client_requests redcap_client_requests_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_client_requests
ADD CONSTRAINT redcap_client_requests_pkey PRIMARY KEY (id);
--
-- Name: redcap_data_collection_instrument_history redcap_data_collection_instrument_history_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instrument_history
ADD CONSTRAINT redcap_data_collection_instrument_history_pkey PRIMARY KEY (id);
--
-- Name: redcap_data_collection_instruments redcap_data_collection_instruments_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instruments
ADD CONSTRAINT redcap_data_collection_instruments_pkey PRIMARY KEY (id);
--
-- Name: redcap_data_dictionaries redcap_data_dictionaries_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionaries
ADD CONSTRAINT redcap_data_dictionaries_pkey PRIMARY KEY (id);
--
-- Name: redcap_data_dictionary_history redcap_data_dictionary_history_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionary_history
ADD CONSTRAINT redcap_data_dictionary_history_pkey PRIMARY KEY (id);
--
-- Name: redcap_project_admin_history redcap_project_admin_history_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_admin_history
ADD CONSTRAINT redcap_project_admin_history_pkey PRIMARY KEY (id);
--
-- Name: redcap_project_admins redcap_project_admins_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_admins
ADD CONSTRAINT redcap_project_admins_pkey PRIMARY KEY (id);
--
-- Name: redcap_project_user_history redcap_project_user_history_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_user_history
ADD CONSTRAINT redcap_project_user_history_pkey PRIMARY KEY (id);
--
-- Name: redcap_project_users redcap_project_users_pkey; Type: CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_users
ADD CONSTRAINT redcap_project_users_pkey PRIMARY KEY (id);
--
-- Name: delayed_jobs_priority; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX delayed_jobs_priority ON ml_app.delayed_jobs USING btree (priority, run_at);
--
-- Name: idx_h_on_role_descriptions_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX idx_h_on_role_descriptions_id ON ml_app.role_description_history USING btree (role_description_id);
--
-- Name: index_accuracy_score_history_on_accuracy_score_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_accuracy_score_history_on_accuracy_score_id ON ml_app.accuracy_score_history USING btree (accuracy_score_id);
--
-- Name: index_accuracy_scores_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_accuracy_scores_on_admin_id ON ml_app.accuracy_scores USING btree (admin_id);
--
-- Name: index_activity_log_history_on_activity_log_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_history_on_activity_log_id ON ml_app.activity_log_history USING btree (activity_log_id);
--
-- Name: index_activity_log_player_contact_phone_history_on_activity_log; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phone_history_on_activity_log ON ml_app.activity_log_player_contact_phone_history USING btree (activity_log_player_contact_phone_id);
--
-- Name: index_activity_log_player_contact_phone_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phone_history_on_master_id ON ml_app.activity_log_player_contact_phone_history USING btree (master_id);
--
-- Name: index_activity_log_player_contact_phone_history_on_player_conta; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phone_history_on_player_conta ON ml_app.activity_log_player_contact_phone_history USING btree (player_contact_id);
--
-- Name: index_activity_log_player_contact_phone_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phone_history_on_user_id ON ml_app.activity_log_player_contact_phone_history USING btree (user_id);
--
-- Name: index_activity_log_player_contact_phones_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phones_on_master_id ON ml_app.activity_log_player_contact_phones USING btree (master_id);
--
-- Name: index_activity_log_player_contact_phones_on_player_contact_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phones_on_player_contact_id ON ml_app.activity_log_player_contact_phones USING btree (player_contact_id);
--
-- Name: index_activity_log_player_contact_phones_on_protocol_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phones_on_protocol_id ON ml_app.activity_log_player_contact_phones USING btree (protocol_id);
--
-- Name: index_activity_log_player_contact_phones_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_activity_log_player_contact_phones_on_user_id ON ml_app.activity_log_player_contact_phones USING btree (user_id);
--
-- Name: index_address_history_on_address_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_address_history_on_address_id ON ml_app.address_history USING btree (address_id);
--
-- Name: index_address_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_address_history_on_master_id ON ml_app.address_history USING btree (master_id);
--
-- Name: index_address_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_address_history_on_user_id ON ml_app.address_history USING btree (user_id);
--
-- Name: index_addresses_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_addresses_on_master_id ON ml_app.addresses USING btree (master_id);
--
-- Name: index_addresses_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_addresses_on_user_id ON ml_app.addresses USING btree (user_id);
--
-- Name: index_admin_action_logs_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_admin_action_logs_on_admin_id ON ml_app.admin_action_logs USING btree (admin_id);
--
-- Name: index_admin_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_admin_history_on_admin_id ON ml_app.admin_history USING btree (admin_id);
--
-- Name: index_admin_history_on_upd_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_admin_history_on_upd_admin_id ON ml_app.admin_history USING btree (updated_by_admin_id);
--
-- Name: index_admins_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_admins_on_admin_id ON ml_app.admins USING btree (admin_id);
--
-- Name: index_app_configuration_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_configuration_history_on_admin_id ON ml_app.app_configuration_history USING btree (admin_id);
--
-- Name: index_app_configuration_history_on_app_configuration_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_configuration_history_on_app_configuration_id ON ml_app.app_configuration_history USING btree (app_configuration_id);
--
-- Name: index_app_configurations_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_configurations_on_admin_id ON ml_app.app_configurations USING btree (admin_id);
--
-- Name: index_app_configurations_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_configurations_on_app_type_id ON ml_app.app_configurations USING btree (app_type_id);
--
-- Name: index_app_configurations_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_configurations_on_user_id ON ml_app.app_configurations USING btree (user_id);
--
-- Name: index_app_type_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_type_history_on_admin_id ON ml_app.app_type_history USING btree (admin_id);
--
-- Name: index_app_type_history_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_type_history_on_app_type_id ON ml_app.app_type_history USING btree (app_type_id);
--
-- Name: index_app_types_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_app_types_on_admin_id ON ml_app.app_types USING btree (admin_id);
--
-- Name: index_college_history_on_college_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_college_history_on_college_id ON ml_app.college_history USING btree (college_id);
--
-- Name: index_colleges_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_colleges_on_admin_id ON ml_app.colleges USING btree (admin_id);
--
-- Name: index_colleges_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_colleges_on_user_id ON ml_app.colleges USING btree (user_id);
--
-- Name: index_config_libraries_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_config_libraries_on_admin_id ON ml_app.config_libraries USING btree (admin_id);
--
-- Name: index_config_library_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_config_library_history_on_admin_id ON ml_app.config_library_history USING btree (admin_id);
--
-- Name: index_config_library_history_on_config_library_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_config_library_history_on_config_library_id ON ml_app.config_library_history USING btree (config_library_id);
--
-- Name: index_dynamic_model_history_on_dynamic_model_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_dynamic_model_history_on_dynamic_model_id ON ml_app.dynamic_model_history USING btree (dynamic_model_id);
--
-- Name: index_dynamic_models_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_dynamic_models_on_admin_id ON ml_app.dynamic_models USING btree (admin_id);
--
-- Name: index_exception_logs_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_exception_logs_on_admin_id ON ml_app.exception_logs USING btree (admin_id);
--
-- Name: index_exception_logs_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_exception_logs_on_user_id ON ml_app.exception_logs USING btree (user_id);
--
-- Name: index_external_identifier_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_external_identifier_history_on_admin_id ON ml_app.external_identifier_history USING btree (admin_id);
--
-- Name: index_external_identifier_history_on_external_identifier_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_external_identifier_history_on_external_identifier_id ON ml_app.external_identifier_history USING btree (external_identifier_id);
--
-- Name: index_external_identifiers_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_external_identifiers_on_admin_id ON ml_app.external_identifiers USING btree (admin_id);
--
-- Name: index_external_link_history_on_external_link_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_external_link_history_on_external_link_id ON ml_app.external_link_history USING btree (external_link_id);
--
-- Name: index_external_links_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_external_links_on_admin_id ON ml_app.external_links USING btree (admin_id);
--
-- Name: index_general_selection_history_on_general_selection_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_general_selection_history_on_general_selection_id ON ml_app.general_selection_history USING btree (general_selection_id);
--
-- Name: index_general_selections_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_general_selections_on_admin_id ON ml_app.general_selections USING btree (admin_id);
--
-- Name: index_imports_model_generators_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_imports_model_generators_on_admin_id ON ml_app.imports_model_generators USING btree (admin_id);
--
-- Name: index_imports_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_imports_on_user_id ON ml_app.imports USING btree (user_id);
--
-- Name: index_item_flag_history_on_item_flag_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_item_flag_history_on_item_flag_id ON ml_app.item_flag_history USING btree (item_flag_id);
--
-- Name: index_item_flag_name_history_on_item_flag_name_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_item_flag_name_history_on_item_flag_name_id ON ml_app.item_flag_name_history USING btree (item_flag_name_id);
--
-- Name: index_item_flag_names_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_item_flag_names_on_admin_id ON ml_app.item_flag_names USING btree (admin_id);
--
-- Name: index_item_flags_on_item_flag_name_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_item_flags_on_item_flag_name_id ON ml_app.item_flags USING btree (item_flag_name_id);
--
-- Name: index_item_flags_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_item_flags_on_user_id ON ml_app.item_flags USING btree (user_id);
--
-- Name: index_masters_on_msid; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_masters_on_msid ON ml_app.masters USING btree (msid);
--
-- Name: index_masters_on_pro_info_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_masters_on_pro_info_id ON ml_app.masters USING btree (pro_info_id);
--
-- Name: index_masters_on_proid; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_masters_on_proid ON ml_app.masters USING btree (pro_id);
--
-- Name: index_masters_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_masters_on_user_id ON ml_app.masters USING btree (user_id);
--
-- Name: index_message_notifications_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_notifications_on_app_type_id ON ml_app.message_notifications USING btree (app_type_id);
--
-- Name: index_message_notifications_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_notifications_on_master_id ON ml_app.message_notifications USING btree (master_id);
--
-- Name: index_message_notifications_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_notifications_on_user_id ON ml_app.message_notifications USING btree (user_id);
--
-- Name: index_message_notifications_status; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_notifications_status ON ml_app.message_notifications USING btree (status);
--
-- Name: index_message_template_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_template_history_on_admin_id ON ml_app.message_template_history USING btree (admin_id);
--
-- Name: index_message_template_history_on_message_template_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_template_history_on_message_template_id ON ml_app.message_template_history USING btree (message_template_id);
--
-- Name: index_message_templates_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_message_templates_on_admin_id ON ml_app.message_templates USING btree (admin_id);
--
-- Name: index_model_references_on_from_record_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_model_references_on_from_record_master_id ON ml_app.model_references USING btree (from_record_master_id);
--
-- Name: index_model_references_on_from_record_type_and_from_record_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_model_references_on_from_record_type_and_from_record_id ON ml_app.model_references USING btree (from_record_type, from_record_id);
--
-- Name: index_model_references_on_to_record_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_model_references_on_to_record_master_id ON ml_app.model_references USING btree (to_record_master_id);
--
-- Name: index_model_references_on_to_record_type_and_to_record_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_model_references_on_to_record_type_and_to_record_id ON ml_app.model_references USING btree (to_record_type, to_record_id);
--
-- Name: index_model_references_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_model_references_on_user_id ON ml_app.model_references USING btree (user_id);
--
-- Name: index_nfs_store_archived_file_history_on_nfs_store_archived_fil; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_archived_file_history_on_nfs_store_archived_fil ON ml_app.nfs_store_archived_file_history USING btree (nfs_store_archived_file_id);
--
-- Name: index_nfs_store_archived_file_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_archived_file_history_on_user_id ON ml_app.nfs_store_archived_file_history USING btree (user_id);
--
-- Name: index_nfs_store_archived_files_on_nfs_store_container_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_archived_files_on_nfs_store_container_id ON ml_app.nfs_store_archived_files USING btree (nfs_store_container_id);
--
-- Name: index_nfs_store_archived_files_on_nfs_store_stored_file_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_archived_files_on_nfs_store_stored_file_id ON ml_app.nfs_store_archived_files USING btree (nfs_store_stored_file_id);
--
-- Name: index_nfs_store_container_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_container_history_on_master_id ON ml_app.nfs_store_container_history USING btree (master_id);
--
-- Name: index_nfs_store_container_history_on_nfs_store_container_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_container_history_on_nfs_store_container_id ON ml_app.nfs_store_container_history USING btree (nfs_store_container_id);
--
-- Name: index_nfs_store_container_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_container_history_on_user_id ON ml_app.nfs_store_container_history USING btree (user_id);
--
-- Name: index_nfs_store_containers_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_containers_on_master_id ON ml_app.nfs_store_containers USING btree (master_id);
--
-- Name: index_nfs_store_containers_on_nfs_store_container_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_containers_on_nfs_store_container_id ON ml_app.nfs_store_containers USING btree (nfs_store_container_id);
--
-- Name: index_nfs_store_filter_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_filter_history_on_admin_id ON ml_app.nfs_store_filter_history USING btree (admin_id);
--
-- Name: index_nfs_store_filter_history_on_nfs_store_filter_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_filter_history_on_nfs_store_filter_id ON ml_app.nfs_store_filter_history USING btree (nfs_store_filter_id);
--
-- Name: index_nfs_store_filters_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_filters_on_admin_id ON ml_app.nfs_store_filters USING btree (admin_id);
--
-- Name: index_nfs_store_filters_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_filters_on_app_type_id ON ml_app.nfs_store_filters USING btree (app_type_id);
--
-- Name: index_nfs_store_filters_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_filters_on_user_id ON ml_app.nfs_store_filters USING btree (user_id);
--
-- Name: index_nfs_store_stored_file_history_on_nfs_store_stored_file_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_stored_file_history_on_nfs_store_stored_file_id ON ml_app.nfs_store_stored_file_history USING btree (nfs_store_stored_file_id);
--
-- Name: index_nfs_store_stored_file_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_stored_file_history_on_user_id ON ml_app.nfs_store_stored_file_history USING btree (user_id);
--
-- Name: index_nfs_store_stored_files_on_nfs_store_container_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_stored_files_on_nfs_store_container_id ON ml_app.nfs_store_stored_files USING btree (nfs_store_container_id);
--
-- Name: index_nfs_store_uploads_on_nfs_store_stored_file_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_uploads_on_nfs_store_stored_file_id ON ml_app.nfs_store_uploads USING btree (nfs_store_stored_file_id);
--
-- Name: index_nfs_store_uploads_on_upload_set; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_nfs_store_uploads_on_upload_set ON ml_app.nfs_store_uploads USING btree (upload_set);
--
-- Name: index_page_layout_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_page_layout_history_on_admin_id ON ml_app.page_layout_history USING btree (admin_id);
--
-- Name: index_page_layout_history_on_page_layout_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_page_layout_history_on_page_layout_id ON ml_app.page_layout_history USING btree (page_layout_id);
--
-- Name: index_page_layouts_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_page_layouts_on_admin_id ON ml_app.page_layouts USING btree (admin_id);
--
-- Name: index_page_layouts_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_page_layouts_on_app_type_id ON ml_app.page_layouts USING btree (app_type_id);
--
-- Name: index_player_contact_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_contact_history_on_master_id ON ml_app.player_contact_history USING btree (master_id);
--
-- Name: index_player_contact_history_on_player_contact_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_contact_history_on_player_contact_id ON ml_app.player_contact_history USING btree (player_contact_id);
--
-- Name: index_player_contact_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_contact_history_on_user_id ON ml_app.player_contact_history USING btree (user_id);
--
-- Name: index_player_contacts_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_contacts_on_master_id ON ml_app.player_contacts USING btree (master_id);
--
-- Name: index_player_contacts_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_contacts_on_user_id ON ml_app.player_contacts USING btree (user_id);
--
-- Name: index_player_info_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_info_history_on_master_id ON ml_app.player_info_history USING btree (master_id);
--
-- Name: index_player_info_history_on_player_info_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_info_history_on_player_info_id ON ml_app.player_info_history USING btree (player_info_id);
--
-- Name: index_player_info_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_info_history_on_user_id ON ml_app.player_info_history USING btree (user_id);
--
-- Name: index_player_infos_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_infos_on_master_id ON ml_app.player_infos USING btree (master_id);
--
-- Name: index_player_infos_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_player_infos_on_user_id ON ml_app.player_infos USING btree (user_id);
--
-- Name: index_pro_infos_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_pro_infos_on_master_id ON ml_app.pro_infos USING btree (master_id);
--
-- Name: index_pro_infos_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_pro_infos_on_user_id ON ml_app.pro_infos USING btree (user_id);
--
-- Name: index_protocol_event_history_on_protocol_event_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_protocol_event_history_on_protocol_event_id ON ml_app.protocol_event_history USING btree (protocol_event_id);
--
-- Name: index_protocol_events_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_protocol_events_on_admin_id ON ml_app.protocol_events USING btree (admin_id);
--
-- Name: index_protocol_events_on_sub_process_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_protocol_events_on_sub_process_id ON ml_app.protocol_events USING btree (sub_process_id);
--
-- Name: index_protocol_history_on_protocol_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_protocol_history_on_protocol_id ON ml_app.protocol_history USING btree (protocol_id);
--
-- Name: index_protocols_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_protocols_on_admin_id ON ml_app.protocols USING btree (admin_id);
--
-- Name: index_protocols_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_protocols_on_app_type_id ON ml_app.protocols USING btree (app_type_id);
--
-- Name: index_report_history_on_report_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_report_history_on_report_id ON ml_app.report_history USING btree (report_id);
--
-- Name: index_reports_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_reports_on_admin_id ON ml_app.reports USING btree (admin_id);
--
-- Name: index_role_description_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_role_description_history_on_admin_id ON ml_app.role_description_history USING btree (admin_id);
--
-- Name: index_role_description_history_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_role_description_history_on_app_type_id ON ml_app.role_description_history USING btree (app_type_id);
--
-- Name: index_role_descriptions_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_role_descriptions_on_admin_id ON ml_app.role_descriptions USING btree (admin_id);
--
-- Name: index_role_descriptions_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_role_descriptions_on_app_type_id ON ml_app.role_descriptions USING btree (app_type_id);
--
-- Name: index_sage_assignments_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sage_assignments_on_admin_id ON ml_app.sage_assignments USING btree (admin_id);
--
-- Name: index_sage_assignments_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sage_assignments_on_master_id ON ml_app.sage_assignments USING btree (master_id);
--
-- Name: index_sage_assignments_on_sage_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX index_sage_assignments_on_sage_id ON ml_app.sage_assignments USING btree (sage_id);
--
-- Name: index_sage_assignments_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sage_assignments_on_user_id ON ml_app.sage_assignments USING btree (user_id);
--
-- Name: index_scantron_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_scantron_history_on_master_id ON ml_app.scantron_history USING btree (master_id);
--
-- Name: index_scantron_history_on_scantron_table_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_scantron_history_on_scantron_table_id ON ml_app.scantron_history USING btree (scantron_table_id);
--
-- Name: index_scantron_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_scantron_history_on_user_id ON ml_app.scantron_history USING btree (user_id);
--
-- Name: index_scantrons_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_scantrons_on_master_id ON ml_app.scantrons USING btree (master_id);
--
-- Name: index_scantrons_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_scantrons_on_user_id ON ml_app.scantrons USING btree (user_id);
--
-- Name: index_sessions_on_session_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX index_sessions_on_session_id ON ml_app.sessions USING btree (session_id);
--
-- Name: index_sessions_on_updated_at; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sessions_on_updated_at ON ml_app.sessions USING btree (updated_at);
--
-- Name: index_sub_process_history_on_sub_process_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sub_process_history_on_sub_process_id ON ml_app.sub_process_history USING btree (sub_process_id);
--
-- Name: index_sub_processes_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sub_processes_on_admin_id ON ml_app.sub_processes USING btree (admin_id);
--
-- Name: index_sub_processes_on_protocol_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_sub_processes_on_protocol_id ON ml_app.sub_processes USING btree (protocol_id);
--
-- Name: index_tracker_history_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_tracker_history_on_master_id ON ml_app.tracker_history USING btree (master_id);
--
-- Name: index_tracker_history_on_protocol_event_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_tracker_history_on_protocol_event_id ON ml_app.tracker_history USING btree (protocol_event_id);
--
-- Name: index_tracker_history_on_protocol_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_tracker_history_on_protocol_id ON ml_app.tracker_history USING btree (protocol_id);
--
-- Name: index_tracker_history_on_sub_process_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_tracker_history_on_sub_process_id ON ml_app.tracker_history USING btree (sub_process_id);
--
-- Name: index_tracker_history_on_tracker_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_tracker_history_on_tracker_id ON ml_app.tracker_history USING btree (tracker_id);
--
-- Name: index_tracker_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_tracker_history_on_user_id ON ml_app.tracker_history USING btree (user_id);
--
-- Name: index_trackers_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_trackers_on_master_id ON ml_app.trackers USING btree (master_id);
--
-- Name: index_trackers_on_protocol_event_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_trackers_on_protocol_event_id ON ml_app.trackers USING btree (protocol_event_id);
--
-- Name: index_trackers_on_protocol_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_trackers_on_protocol_id ON ml_app.trackers USING btree (protocol_id);
--
-- Name: index_trackers_on_sub_process_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_trackers_on_sub_process_id ON ml_app.trackers USING btree (sub_process_id);
--
-- Name: index_trackers_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_trackers_on_user_id ON ml_app.trackers USING btree (user_id);
--
-- Name: index_user_access_control_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_access_control_history_on_admin_id ON ml_app.user_access_control_history USING btree (admin_id);
--
-- Name: index_user_access_control_history_on_user_access_control_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_access_control_history_on_user_access_control_id ON ml_app.user_access_control_history USING btree (user_access_control_id);
--
-- Name: index_user_access_controls_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_access_controls_on_app_type_id ON ml_app.user_access_controls USING btree (app_type_id);
--
-- Name: index_user_action_logs_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_action_logs_on_app_type_id ON ml_app.user_action_logs USING btree (app_type_id);
--
-- Name: index_user_action_logs_on_master_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_action_logs_on_master_id ON ml_app.user_action_logs USING btree (master_id);
--
-- Name: index_user_action_logs_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_action_logs_on_user_id ON ml_app.user_action_logs USING btree (user_id);
--
-- Name: index_user_authorization_history_on_user_authorization_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_authorization_history_on_user_authorization_id ON ml_app.user_authorization_history USING btree (user_authorization_id);
--
-- Name: index_user_history_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_history_on_app_type_id ON ml_app.user_history USING btree (app_type_id);
--
-- Name: index_user_history_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_history_on_user_id ON ml_app.user_history USING btree (user_id);
--
-- Name: index_user_role_history_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_role_history_on_admin_id ON ml_app.user_role_history USING btree (admin_id);
--
-- Name: index_user_role_history_on_user_role_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_role_history_on_user_role_id ON ml_app.user_role_history USING btree (user_role_id);
--
-- Name: index_user_roles_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_roles_on_admin_id ON ml_app.user_roles USING btree (admin_id);
--
-- Name: index_user_roles_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_roles_on_app_type_id ON ml_app.user_roles USING btree (app_type_id);
--
-- Name: index_user_roles_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_user_roles_on_user_id ON ml_app.user_roles USING btree (user_id);
--
-- Name: index_users_contact_infos_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_users_contact_infos_on_admin_id ON ml_app.users_contact_infos USING btree (admin_id);
--
-- Name: index_users_contact_infos_on_user_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_users_contact_infos_on_user_id ON ml_app.users_contact_infos USING btree (user_id);
--
-- Name: index_users_on_admin_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_users_on_admin_id ON ml_app.users USING btree (admin_id);
--
-- Name: index_users_on_app_type_id; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE INDEX index_users_on_app_type_id ON ml_app.users USING btree (app_type_id);
--
-- Name: index_users_on_authentication_token; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX index_users_on_authentication_token ON ml_app.users USING btree (authentication_token);
--
-- Name: index_users_on_email; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX index_users_on_email ON ml_app.users USING btree (email);
--
-- Name: index_users_on_reset_password_token; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX index_users_on_reset_password_token ON ml_app.users USING btree (reset_password_token);
--
-- Name: index_users_on_unlock_token; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX index_users_on_unlock_token ON ml_app.users USING btree (unlock_token);
--
-- Name: nfs_store_stored_files_unique_file; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX nfs_store_stored_files_unique_file ON ml_app.nfs_store_stored_files USING btree (nfs_store_container_id, file_hash, file_name, path);
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: ml_app; Owner: -
--
CREATE UNIQUE INDEX unique_schema_migrations ON ml_app.schema_migrations USING btree (version);
--
-- Name: idx_dch_on_redcap_dd_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_dch_on_redcap_dd_id ON ref_data.datadic_choice_history USING btree (redcap_data_dictionary_id);
--
-- Name: idx_dv_equiv; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_dv_equiv ON ref_data.datadic_variables USING btree (equivalent_to_id);
--
-- Name: idx_dvh_equiv; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_dvh_equiv ON ref_data.datadic_variable_history USING btree (equivalent_to_id);
--
-- Name: idx_dvh_on_redcap_dd_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_dvh_on_redcap_dd_id ON ref_data.datadic_variable_history USING btree (redcap_data_dictionary_id);
--
-- Name: idx_h_on_datadic_variable_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_h_on_datadic_variable_id ON ref_data.datadic_variable_history USING btree (datadic_variable_id);
--
-- Name: idx_h_on_proj_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_h_on_proj_admin_id ON ref_data.redcap_project_user_history USING btree (redcap_project_admin_id);
--
-- Name: idx_h_on_rdci_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_h_on_rdci_id ON ref_data.redcap_data_collection_instrument_history USING btree (redcap_data_collection_instrument_id);
--
-- Name: idx_h_on_redcap_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_h_on_redcap_admin_id ON ref_data.redcap_data_dictionary_history USING btree (redcap_project_admin_id);
--
-- Name: idx_h_on_redcap_project_user_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_h_on_redcap_project_user_id ON ref_data.redcap_project_user_history USING btree (redcap_project_user_id);
--
-- Name: idx_history_on_datadic_choice_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_history_on_datadic_choice_id ON ref_data.datadic_choice_history USING btree (datadic_choice_id);
--
-- Name: idx_history_on_redcap_data_dictionary_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_history_on_redcap_data_dictionary_id ON ref_data.redcap_data_dictionary_history USING btree (redcap_data_dictionary_id);
--
-- Name: idx_history_on_redcap_project_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_history_on_redcap_project_admin_id ON ref_data.redcap_project_admin_history USING btree (redcap_project_admin_id);
--
-- Name: idx_on_redcap_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_on_redcap_admin_id ON ref_data.redcap_data_dictionaries USING btree (redcap_project_admin_id);
--
-- Name: idx_rcr_on_redcap_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_rcr_on_redcap_admin_id ON ref_data.redcap_client_requests USING btree (redcap_project_admin_id);
--
-- Name: idx_rdci_pa; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_rdci_pa ON ref_data.redcap_data_collection_instruments USING btree (redcap_project_admin_id);
--
-- Name: idx_rdcih_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_rdcih_on_admin_id ON ref_data.redcap_data_collection_instrument_history USING btree (admin_id);
--
-- Name: idx_rdcih_on_proj_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX idx_rdcih_on_proj_admin_id ON ref_data.redcap_data_collection_instrument_history USING btree (redcap_project_admin_id);
--
-- Name: index_ref_data.datadic_choice_history_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.datadic_choice_history_on_admin_id" ON ref_data.datadic_choice_history USING btree (admin_id);
--
-- Name: index_ref_data.datadic_choices_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.datadic_choices_on_admin_id" ON ref_data.datadic_choices USING btree (admin_id);
--
-- Name: index_ref_data.datadic_choices_on_redcap_data_dictionary_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.datadic_choices_on_redcap_data_dictionary_id" ON ref_data.datadic_choices USING btree (redcap_data_dictionary_id);
--
-- Name: index_ref_data.datadic_variable_history_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.datadic_variable_history_on_admin_id" ON ref_data.datadic_variable_history USING btree (admin_id);
--
-- Name: index_ref_data.datadic_variables_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.datadic_variables_on_admin_id" ON ref_data.datadic_variables USING btree (admin_id);
--
-- Name: index_ref_data.datadic_variables_on_redcap_data_dictionary_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.datadic_variables_on_redcap_data_dictionary_id" ON ref_data.datadic_variables USING btree (redcap_data_dictionary_id);
--
-- Name: index_ref_data.redcap_client_requests_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_client_requests_on_admin_id" ON ref_data.redcap_client_requests USING btree (admin_id);
--
-- Name: index_ref_data.redcap_data_collection_instruments_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_data_collection_instruments_on_admin_id" ON ref_data.redcap_data_collection_instruments USING btree (admin_id);
--
-- Name: index_ref_data.redcap_data_dictionaries_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_data_dictionaries_on_admin_id" ON ref_data.redcap_data_dictionaries USING btree (admin_id);
--
-- Name: index_ref_data.redcap_data_dictionary_history_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_data_dictionary_history_on_admin_id" ON ref_data.redcap_data_dictionary_history USING btree (admin_id);
--
-- Name: index_ref_data.redcap_project_admin_history_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_project_admin_history_on_admin_id" ON ref_data.redcap_project_admin_history USING btree (admin_id);
--
-- Name: index_ref_data.redcap_project_admins_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_project_admins_on_admin_id" ON ref_data.redcap_project_admins USING btree (admin_id);
--
-- Name: index_ref_data.redcap_project_user_history_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_project_user_history_on_admin_id" ON ref_data.redcap_project_user_history USING btree (admin_id);
--
-- Name: index_ref_data.redcap_project_users_on_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_project_users_on_admin_id" ON ref_data.redcap_project_users USING btree (admin_id);
--
-- Name: index_ref_data.redcap_project_users_on_redcap_project_admin_id; Type: INDEX; Schema: ref_data; Owner: -
--
CREATE INDEX "index_ref_data.redcap_project_users_on_redcap_project_admin_id" ON ref_data.redcap_project_users USING btree (redcap_project_admin_id);
--
-- Name: accuracy_scores accuracy_score_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER accuracy_score_history_insert AFTER INSERT ON ml_app.accuracy_scores FOR EACH ROW EXECUTE PROCEDURE ml_app.log_accuracy_score_update();
--
-- Name: accuracy_scores accuracy_score_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER accuracy_score_history_update AFTER UPDATE ON ml_app.accuracy_scores FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_accuracy_score_update();
--
-- Name: activity_logs activity_log_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER activity_log_history_insert AFTER INSERT ON ml_app.activity_logs FOR EACH ROW EXECUTE PROCEDURE ml_app.log_activity_log_update();
--
-- Name: activity_logs activity_log_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER activity_log_history_update AFTER UPDATE ON ml_app.activity_logs FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_activity_log_update();
--
-- Name: activity_log_player_contact_phones activity_log_player_contact_phone_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER activity_log_player_contact_phone_history_insert AFTER INSERT ON ml_app.activity_log_player_contact_phones FOR EACH ROW EXECUTE PROCEDURE ml_app.log_activity_log_player_contact_phone_update();
--
-- Name: activity_log_player_contact_phones activity_log_player_contact_phone_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER activity_log_player_contact_phone_history_update AFTER UPDATE ON ml_app.activity_log_player_contact_phones FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_activity_log_player_contact_phone_update();
--
-- Name: addresses address_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER address_history_insert AFTER INSERT ON ml_app.addresses FOR EACH ROW EXECUTE PROCEDURE ml_app.log_address_update();
--
-- Name: addresses address_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER address_history_update AFTER UPDATE ON ml_app.addresses FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_address_update();
--
-- Name: addresses address_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER address_insert BEFORE INSERT ON ml_app.addresses FOR EACH ROW EXECUTE PROCEDURE ml_app.handle_address_update();
--
-- Name: addresses address_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER address_update BEFORE UPDATE ON ml_app.addresses FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.handle_address_update();
--
-- Name: admins admin_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER admin_history_insert AFTER INSERT ON ml_app.admins FOR EACH ROW EXECUTE PROCEDURE ml_app.log_admin_update();
--
-- Name: admins admin_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER admin_history_update AFTER UPDATE ON ml_app.admins FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_admin_update();
--
-- Name: app_configurations app_configuration_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER app_configuration_history_insert AFTER INSERT ON ml_app.app_configurations FOR EACH ROW EXECUTE PROCEDURE ml_app.log_app_configuration_update();
--
-- Name: app_configurations app_configuration_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER app_configuration_history_update AFTER UPDATE ON ml_app.app_configurations FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_app_configuration_update();
--
-- Name: app_types app_type_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER app_type_history_insert AFTER INSERT ON ml_app.app_types FOR EACH ROW EXECUTE PROCEDURE ml_app.log_app_type_update();
--
-- Name: app_types app_type_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER app_type_history_update AFTER UPDATE ON ml_app.app_types FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_app_type_update();
--
-- Name: colleges college_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER college_history_insert AFTER INSERT ON ml_app.colleges FOR EACH ROW EXECUTE PROCEDURE ml_app.log_college_update();
--
-- Name: colleges college_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER college_history_update AFTER UPDATE ON ml_app.colleges FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_college_update();
--
-- Name: config_libraries config_library_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER config_library_history_insert AFTER INSERT ON ml_app.config_libraries FOR EACH ROW EXECUTE PROCEDURE ml_app.log_config_library_update();
--
-- Name: config_libraries config_library_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER config_library_history_update AFTER UPDATE ON ml_app.config_libraries FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_config_library_update();
--
-- Name: dynamic_models dynamic_model_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER dynamic_model_history_insert AFTER INSERT ON ml_app.dynamic_models FOR EACH ROW EXECUTE PROCEDURE ml_app.log_dynamic_model_update();
--
-- Name: dynamic_models dynamic_model_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER dynamic_model_history_update AFTER UPDATE ON ml_app.dynamic_models FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_dynamic_model_update();
--
-- Name: external_identifiers external_identifier_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER external_identifier_history_insert AFTER INSERT ON ml_app.external_identifiers FOR EACH ROW EXECUTE PROCEDURE ml_app.log_external_identifier_update();
--
-- Name: external_identifiers external_identifier_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER external_identifier_history_update AFTER UPDATE ON ml_app.external_identifiers FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_external_identifier_update();
--
-- Name: external_links external_link_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER external_link_history_insert AFTER INSERT ON ml_app.external_links FOR EACH ROW EXECUTE PROCEDURE ml_app.log_external_link_update();
--
-- Name: external_links external_link_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER external_link_history_update AFTER UPDATE ON ml_app.external_links FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_external_link_update();
--
-- Name: general_selections general_selection_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER general_selection_history_insert AFTER INSERT ON ml_app.general_selections FOR EACH ROW EXECUTE PROCEDURE ml_app.log_general_selection_update();
--
-- Name: general_selections general_selection_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER general_selection_history_update AFTER UPDATE ON ml_app.general_selections FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_general_selection_update();
--
-- Name: item_flags item_flag_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER item_flag_history_insert AFTER INSERT ON ml_app.item_flags FOR EACH ROW EXECUTE PROCEDURE ml_app.log_item_flag_update();
--
-- Name: item_flags item_flag_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER item_flag_history_update AFTER UPDATE ON ml_app.item_flags FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_item_flag_update();
--
-- Name: item_flag_names item_flag_name_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER item_flag_name_history_insert AFTER INSERT ON ml_app.item_flag_names FOR EACH ROW EXECUTE PROCEDURE ml_app.log_item_flag_name_update();
--
-- Name: item_flag_names item_flag_name_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER item_flag_name_history_update AFTER UPDATE ON ml_app.item_flag_names FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_item_flag_name_update();
--
-- Name: role_descriptions log_role_description_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER log_role_description_history_insert AFTER INSERT ON ml_app.role_descriptions FOR EACH ROW EXECUTE PROCEDURE ml_app.role_description_history_upd();
--
-- Name: role_descriptions log_role_description_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER log_role_description_history_update AFTER UPDATE ON ml_app.role_descriptions FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.role_description_history_upd();
--
-- Name: message_templates message_template_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER message_template_history_insert AFTER INSERT ON ml_app.message_templates FOR EACH ROW EXECUTE PROCEDURE ml_app.log_message_template_update();
--
-- Name: message_templates message_template_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER message_template_history_update AFTER UPDATE ON ml_app.message_templates FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_message_template_update();
--
-- Name: nfs_store_archived_files nfs_store_archived_file_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_archived_file_history_insert AFTER INSERT ON ml_app.nfs_store_archived_files FOR EACH ROW EXECUTE PROCEDURE ml_app.log_nfs_store_archived_file_update();
--
-- Name: nfs_store_archived_files nfs_store_archived_file_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_archived_file_history_update AFTER UPDATE ON ml_app.nfs_store_archived_files FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_nfs_store_archived_file_update();
--
-- Name: nfs_store_containers nfs_store_container_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_container_history_insert AFTER INSERT ON ml_app.nfs_store_containers FOR EACH ROW EXECUTE PROCEDURE ml_app.log_nfs_store_container_update();
--
-- Name: nfs_store_containers nfs_store_container_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_container_history_update AFTER UPDATE ON ml_app.nfs_store_containers FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_nfs_store_container_update();
--
-- Name: nfs_store_filters nfs_store_filter_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_filter_history_insert AFTER INSERT ON ml_app.nfs_store_filters FOR EACH ROW EXECUTE PROCEDURE ml_app.log_nfs_store_filter_update();
--
-- Name: nfs_store_filters nfs_store_filter_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_filter_history_update AFTER UPDATE ON ml_app.nfs_store_filters FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_nfs_store_filter_update();
--
-- Name: nfs_store_stored_files nfs_store_stored_file_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_stored_file_history_insert AFTER INSERT ON ml_app.nfs_store_stored_files FOR EACH ROW EXECUTE PROCEDURE ml_app.log_nfs_store_stored_file_update();
--
-- Name: nfs_store_stored_files nfs_store_stored_file_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER nfs_store_stored_file_history_update AFTER UPDATE ON ml_app.nfs_store_stored_files FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_nfs_store_stored_file_update();
--
-- Name: page_layouts page_layout_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER page_layout_history_insert AFTER INSERT ON ml_app.page_layouts FOR EACH ROW EXECUTE PROCEDURE ml_app.log_page_layout_update();
--
-- Name: page_layouts page_layout_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER page_layout_history_update AFTER UPDATE ON ml_app.page_layouts FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_page_layout_update();
--
-- Name: player_contacts player_contact_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_contact_history_insert AFTER INSERT ON ml_app.player_contacts FOR EACH ROW EXECUTE PROCEDURE ml_app.log_player_contact_update();
--
-- Name: player_contacts player_contact_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_contact_history_update AFTER UPDATE ON ml_app.player_contacts FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_player_contact_update();
--
-- Name: player_contacts player_contact_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_contact_insert BEFORE INSERT ON ml_app.player_contacts FOR EACH ROW EXECUTE PROCEDURE ml_app.handle_player_contact_update();
--
-- Name: player_contacts player_contact_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_contact_update BEFORE UPDATE ON ml_app.player_contacts FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.handle_player_contact_update();
--
-- Name: player_infos player_info_before_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_info_before_update BEFORE UPDATE ON ml_app.player_infos FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.handle_player_info_before_update();
--
-- Name: player_infos player_info_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_info_history_insert AFTER INSERT ON ml_app.player_infos FOR EACH ROW EXECUTE PROCEDURE ml_app.log_player_info_update();
--
-- Name: player_infos player_info_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_info_history_update AFTER UPDATE ON ml_app.player_infos FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_player_info_update();
--
-- Name: player_infos player_info_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_info_insert AFTER INSERT ON ml_app.player_infos FOR EACH ROW EXECUTE PROCEDURE ml_app.update_master_with_player_info();
--
-- Name: player_infos player_info_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER player_info_update AFTER UPDATE ON ml_app.player_infos FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.update_master_with_player_info();
--
-- Name: pro_infos pro_info_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER pro_info_insert AFTER INSERT ON ml_app.pro_infos FOR EACH ROW EXECUTE PROCEDURE ml_app.update_master_with_pro_info();
--
-- Name: pro_infos pro_info_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER pro_info_update AFTER UPDATE ON ml_app.pro_infos FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.update_master_with_pro_info();
--
-- Name: protocol_events protocol_event_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER protocol_event_history_insert AFTER INSERT ON ml_app.protocol_events FOR EACH ROW EXECUTE PROCEDURE ml_app.log_protocol_event_update();
--
-- Name: protocol_events protocol_event_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER protocol_event_history_update AFTER UPDATE ON ml_app.protocol_events FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_protocol_event_update();
--
-- Name: protocols protocol_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER protocol_history_insert AFTER INSERT ON ml_app.protocols FOR EACH ROW EXECUTE PROCEDURE ml_app.log_protocol_update();
--
-- Name: protocols protocol_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER protocol_history_update AFTER UPDATE ON ml_app.protocols FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_protocol_update();
--
-- Name: rc_stage_cif_copy rc_cis_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER rc_cis_update BEFORE UPDATE ON ml_app.rc_stage_cif_copy FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.handle_rc_cis_update();
--
-- Name: reports report_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER report_history_insert AFTER INSERT ON ml_app.reports FOR EACH ROW EXECUTE PROCEDURE ml_app.log_report_update();
--
-- Name: reports report_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER report_history_update AFTER UPDATE ON ml_app.reports FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_report_update();
--
-- Name: scantrons scantron_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER scantron_history_insert AFTER INSERT ON ml_app.scantrons FOR EACH ROW EXECUTE PROCEDURE ml_app.log_scantron_update();
--
-- Name: scantrons scantron_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER scantron_history_update AFTER UPDATE ON ml_app.scantrons FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_scantron_update();
--
-- Name: sub_processes sub_process_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER sub_process_history_insert AFTER INSERT ON ml_app.sub_processes FOR EACH ROW EXECUTE PROCEDURE ml_app.log_sub_process_update();
--
-- Name: sub_processes sub_process_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER sub_process_history_update AFTER UPDATE ON ml_app.sub_processes FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_sub_process_update();
--
-- Name: trackers tracker_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER tracker_history_insert AFTER INSERT ON ml_app.trackers FOR EACH ROW EXECUTE PROCEDURE ml_app.log_tracker_update();
--
-- Name: tracker_history tracker_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER tracker_history_update BEFORE UPDATE ON ml_app.tracker_history FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.handle_tracker_history_update();
--
-- Name: trackers tracker_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER tracker_history_update AFTER UPDATE ON ml_app.trackers FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_tracker_update();
--
-- Name: tracker_history tracker_record_delete; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER tracker_record_delete AFTER DELETE ON ml_app.tracker_history FOR EACH ROW EXECUTE PROCEDURE ml_app.handle_delete();
--
-- Name: trackers tracker_upsert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER tracker_upsert BEFORE INSERT ON ml_app.trackers FOR EACH ROW EXECUTE PROCEDURE ml_app.tracker_upsert();
--
-- Name: user_access_controls user_access_control_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_access_control_history_insert AFTER INSERT ON ml_app.user_access_controls FOR EACH ROW EXECUTE PROCEDURE ml_app.log_user_access_control_update();
--
-- Name: user_access_controls user_access_control_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_access_control_history_update AFTER UPDATE ON ml_app.user_access_controls FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_user_access_control_update();
--
-- Name: user_authorizations user_authorization_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_authorization_history_insert AFTER INSERT ON ml_app.user_authorizations FOR EACH ROW EXECUTE PROCEDURE ml_app.log_user_authorization_update();
--
-- Name: user_authorizations user_authorization_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_authorization_history_update AFTER UPDATE ON ml_app.user_authorizations FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_user_authorization_update();
--
-- Name: users user_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_history_insert AFTER INSERT ON ml_app.users FOR EACH ROW EXECUTE PROCEDURE ml_app.log_user_update();
--
-- Name: users user_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_history_update AFTER UPDATE ON ml_app.users FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_user_update();
--
-- Name: user_roles user_role_history_insert; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_role_history_insert AFTER INSERT ON ml_app.user_roles FOR EACH ROW EXECUTE PROCEDURE ml_app.log_user_role_update();
--
-- Name: user_roles user_role_history_update; Type: TRIGGER; Schema: ml_app; Owner: -
--
CREATE TRIGGER user_role_history_update AFTER UPDATE ON ml_app.user_roles FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ml_app.log_user_role_update();
--
-- Name: redcap_data_collection_instruments log_redcap_data_collection_instrument_history_insert; Type: TRIGGER; Schema: ref_data; Owner: -
--
CREATE TRIGGER log_redcap_data_collection_instrument_history_insert AFTER INSERT ON ref_data.redcap_data_collection_instruments FOR EACH ROW EXECUTE PROCEDURE ref_data.redcap_data_collection_instrument_history_upd();
--
-- Name: redcap_data_collection_instruments log_redcap_data_collection_instrument_history_update; Type: TRIGGER; Schema: ref_data; Owner: -
--
CREATE TRIGGER log_redcap_data_collection_instrument_history_update AFTER UPDATE ON ref_data.redcap_data_collection_instruments FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ref_data.redcap_data_collection_instrument_history_upd();
--
-- Name: redcap_project_users log_redcap_project_user_history_insert; Type: TRIGGER; Schema: ref_data; Owner: -
--
CREATE TRIGGER log_redcap_project_user_history_insert AFTER INSERT ON ref_data.redcap_project_users FOR EACH ROW EXECUTE PROCEDURE ref_data.redcap_project_user_history_upd();
--
-- Name: redcap_project_users log_redcap_project_user_history_update; Type: TRIGGER; Schema: ref_data; Owner: -
--
CREATE TRIGGER log_redcap_project_user_history_update AFTER UPDATE ON ref_data.redcap_project_users FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE ref_data.redcap_project_user_history_upd();
--
-- Name: accuracy_score_history fk_accuracy_score_history_accuracy_scores; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.accuracy_score_history
ADD CONSTRAINT fk_accuracy_score_history_accuracy_scores FOREIGN KEY (accuracy_score_id) REFERENCES ml_app.accuracy_scores(id);
--
-- Name: activity_log_player_contact_phone_history fk_activity_log_player_contact_phone_history_activity_log_playe; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phone_history
ADD CONSTRAINT fk_activity_log_player_contact_phone_history_activity_log_playe FOREIGN KEY (activity_log_player_contact_phone_id) REFERENCES ml_app.activity_log_player_contact_phones(id);
--
-- Name: activity_log_player_contact_phone_history fk_activity_log_player_contact_phone_history_masters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phone_history
ADD CONSTRAINT fk_activity_log_player_contact_phone_history_masters FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: activity_log_player_contact_phone_history fk_activity_log_player_contact_phone_history_player_contact_pho; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phone_history
ADD CONSTRAINT fk_activity_log_player_contact_phone_history_player_contact_pho FOREIGN KEY (player_contact_id) REFERENCES ml_app.player_contacts(id);
--
-- Name: activity_log_player_contact_phone_history fk_activity_log_player_contact_phone_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phone_history
ADD CONSTRAINT fk_activity_log_player_contact_phone_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: address_history fk_address_history_addresses; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.address_history
ADD CONSTRAINT fk_address_history_addresses FOREIGN KEY (address_id) REFERENCES ml_app.addresses(id);
--
-- Name: address_history fk_address_history_masters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.address_history
ADD CONSTRAINT fk_address_history_masters FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: address_history fk_address_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.address_history
ADD CONSTRAINT fk_address_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: admin_history fk_admin_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_history
ADD CONSTRAINT fk_admin_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: admin_history fk_admin_history_upd_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_history
ADD CONSTRAINT fk_admin_history_upd_admins FOREIGN KEY (updated_by_admin_id) REFERENCES ml_app.admins(id);
--
-- Name: app_configuration_history fk_app_configuration_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configuration_history
ADD CONSTRAINT fk_app_configuration_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: app_configuration_history fk_app_configuration_history_app_configurations; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configuration_history
ADD CONSTRAINT fk_app_configuration_history_app_configurations FOREIGN KEY (app_configuration_id) REFERENCES ml_app.app_configurations(id);
--
-- Name: app_type_history fk_app_type_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_type_history
ADD CONSTRAINT fk_app_type_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: app_type_history fk_app_type_history_app_types; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_type_history
ADD CONSTRAINT fk_app_type_history_app_types FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: college_history fk_college_history_colleges; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.college_history
ADD CONSTRAINT fk_college_history_colleges FOREIGN KEY (college_id) REFERENCES ml_app.colleges(id);
--
-- Name: dynamic_model_history fk_dynamic_model_history_dynamic_models; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.dynamic_model_history
ADD CONSTRAINT fk_dynamic_model_history_dynamic_models FOREIGN KEY (dynamic_model_id) REFERENCES ml_app.dynamic_models(id);
--
-- Name: external_link_history fk_external_link_history_external_links; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_link_history
ADD CONSTRAINT fk_external_link_history_external_links FOREIGN KEY (external_link_id) REFERENCES ml_app.external_links(id);
--
-- Name: general_selection_history fk_general_selection_history_general_selections; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.general_selection_history
ADD CONSTRAINT fk_general_selection_history_general_selections FOREIGN KEY (general_selection_id) REFERENCES ml_app.general_selections(id);
--
-- Name: item_flag_history fk_item_flag_history_item_flags; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_history
ADD CONSTRAINT fk_item_flag_history_item_flags FOREIGN KEY (item_flag_id) REFERENCES ml_app.item_flags(id);
--
-- Name: item_flag_name_history fk_item_flag_name_history_item_flag_names; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_name_history
ADD CONSTRAINT fk_item_flag_name_history_item_flag_names FOREIGN KEY (item_flag_name_id) REFERENCES ml_app.item_flag_names(id);
--
-- Name: message_template_history fk_message_template_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_template_history
ADD CONSTRAINT fk_message_template_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: message_template_history fk_message_template_history_message_templates; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_template_history
ADD CONSTRAINT fk_message_template_history_message_templates FOREIGN KEY (message_template_id) REFERENCES ml_app.message_templates(id);
--
-- Name: nfs_store_archived_file_history fk_nfs_store_archived_file_history_nfs_store_archived_files; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_file_history
ADD CONSTRAINT fk_nfs_store_archived_file_history_nfs_store_archived_files FOREIGN KEY (nfs_store_archived_file_id) REFERENCES ml_app.nfs_store_archived_files(id);
--
-- Name: nfs_store_archived_file_history fk_nfs_store_archived_file_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_file_history
ADD CONSTRAINT fk_nfs_store_archived_file_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_container_history fk_nfs_store_container_history_masters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_container_history
ADD CONSTRAINT fk_nfs_store_container_history_masters FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: nfs_store_container_history fk_nfs_store_container_history_nfs_store_containers; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_container_history
ADD CONSTRAINT fk_nfs_store_container_history_nfs_store_containers FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: nfs_store_container_history fk_nfs_store_container_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_container_history
ADD CONSTRAINT fk_nfs_store_container_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_filter_history fk_nfs_store_filter_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filter_history
ADD CONSTRAINT fk_nfs_store_filter_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_filter_history fk_nfs_store_filter_history_nfs_store_filters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filter_history
ADD CONSTRAINT fk_nfs_store_filter_history_nfs_store_filters FOREIGN KEY (nfs_store_filter_id) REFERENCES ml_app.nfs_store_filters(id);
--
-- Name: nfs_store_stored_file_history fk_nfs_store_stored_file_history_nfs_store_stored_files; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_file_history
ADD CONSTRAINT fk_nfs_store_stored_file_history_nfs_store_stored_files FOREIGN KEY (nfs_store_stored_file_id) REFERENCES ml_app.nfs_store_stored_files(id);
--
-- Name: nfs_store_stored_file_history fk_nfs_store_stored_file_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_file_history
ADD CONSTRAINT fk_nfs_store_stored_file_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: page_layout_history fk_page_layout_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layout_history
ADD CONSTRAINT fk_page_layout_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: page_layout_history fk_page_layout_history_page_layouts; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layout_history
ADD CONSTRAINT fk_page_layout_history_page_layouts FOREIGN KEY (page_layout_id) REFERENCES ml_app.page_layouts(id);
--
-- Name: player_contact_history fk_player_contact_history_masters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contact_history
ADD CONSTRAINT fk_player_contact_history_masters FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: player_contact_history fk_player_contact_history_player_contacts; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contact_history
ADD CONSTRAINT fk_player_contact_history_player_contacts FOREIGN KEY (player_contact_id) REFERENCES ml_app.player_contacts(id);
--
-- Name: player_contact_history fk_player_contact_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contact_history
ADD CONSTRAINT fk_player_contact_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: player_info_history fk_player_info_history_masters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_info_history
ADD CONSTRAINT fk_player_info_history_masters FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: player_info_history fk_player_info_history_player_infos; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_info_history
ADD CONSTRAINT fk_player_info_history_player_infos FOREIGN KEY (player_info_id) REFERENCES ml_app.player_infos(id);
--
-- Name: player_info_history fk_player_info_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_info_history
ADD CONSTRAINT fk_player_info_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: protocol_event_history fk_protocol_event_history_protocol_events; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_event_history
ADD CONSTRAINT fk_protocol_event_history_protocol_events FOREIGN KEY (protocol_event_id) REFERENCES ml_app.protocol_events(id);
--
-- Name: protocol_history fk_protocol_history_protocols; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_history
ADD CONSTRAINT fk_protocol_history_protocols FOREIGN KEY (protocol_id) REFERENCES ml_app.protocols(id);
--
-- Name: masters fk_rails_00b234154d; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.masters
ADD CONSTRAINT fk_rails_00b234154d FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: app_configurations fk_rails_00f31a00c4; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configurations
ADD CONSTRAINT fk_rails_00f31a00c4 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_filters fk_rails_0208c3b54d; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filters
ADD CONSTRAINT fk_rails_0208c3b54d FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: external_identifier_history fk_rails_0210618434; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifier_history
ADD CONSTRAINT fk_rails_0210618434 FOREIGN KEY (external_identifier_id) REFERENCES ml_app.external_identifiers(id);
--
-- Name: player_infos fk_rails_08e7f66647; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_infos
ADD CONSTRAINT fk_rails_08e7f66647 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: user_action_logs fk_rails_08eec3f089; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_action_logs
ADD CONSTRAINT fk_rails_08eec3f089 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: role_description_history fk_rails_0910ca20ea; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_description_history
ADD CONSTRAINT fk_rails_0910ca20ea FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: protocol_events fk_rails_0a64e1160a; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_events
ADD CONSTRAINT fk_rails_0a64e1160a FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_imports fk_rails_0ad81c489c; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_imports
ADD CONSTRAINT fk_rails_0ad81c489c FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_containers fk_rails_0c84487284; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_containers
ADD CONSTRAINT fk_rails_0c84487284 FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: nfs_store_imports fk_rails_0d30944d1b; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_imports
ADD CONSTRAINT fk_rails_0d30944d1b FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: nfs_store_stored_files fk_rails_0de144234e; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_files
ADD CONSTRAINT fk_rails_0de144234e FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: nfs_store_trash_actions fk_rails_0e2ecd8d43; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_trash_actions
ADD CONSTRAINT fk_rails_0e2ecd8d43 FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: users fk_rails_1694bfe639; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users
ADD CONSTRAINT fk_rails_1694bfe639 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: activity_log_history fk_rails_16d57266f7; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_history
ADD CONSTRAINT fk_rails_16d57266f7 FOREIGN KEY (activity_log_id) REFERENCES ml_app.activity_logs(id);
--
-- Name: user_roles fk_rails_174e058eb3; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_roles
ADD CONSTRAINT fk_rails_174e058eb3 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: scantrons fk_rails_1a7e2b01e0; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantrons
ADD CONSTRAINT fk_rails_1a7e2b01e0 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_stored_files fk_rails_1cc4562569; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_stored_files
ADD CONSTRAINT fk_rails_1cc4562569 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: activity_log_player_contact_phones fk_rails_1d67a3e7f2; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phones
ADD CONSTRAINT fk_rails_1d67a3e7f2 FOREIGN KEY (protocol_id) REFERENCES ml_app.protocols(id);
--
-- Name: config_library_history fk_rails_1ec40f248c; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_library_history
ADD CONSTRAINT fk_rails_1ec40f248c FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: sub_processes fk_rails_1fc7475261; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_processes
ADD CONSTRAINT fk_rails_1fc7475261 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: pro_infos fk_rails_20667815e3; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.pro_infos
ADD CONSTRAINT fk_rails_20667815e3 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: item_flag_names fk_rails_22ccfd95e1; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flag_names
ADD CONSTRAINT fk_rails_22ccfd95e1 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: player_infos fk_rails_23cd255bc6; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_infos
ADD CONSTRAINT fk_rails_23cd255bc6 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_containers fk_rails_2708bd6a94; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_containers
ADD CONSTRAINT fk_rails_2708bd6a94 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: nfs_store_downloads fk_rails_272f69e6af; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_downloads
ADD CONSTRAINT fk_rails_272f69e6af FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: role_descriptions fk_rails_291bbea3bc; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_descriptions
ADD CONSTRAINT fk_rails_291bbea3bc FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: nfs_store_archived_files fk_rails_2b59e23148; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_files
ADD CONSTRAINT fk_rails_2b59e23148 FOREIGN KEY (nfs_store_stored_file_id) REFERENCES ml_app.nfs_store_stored_files(id);
--
-- Name: model_references fk_rails_2d8072edea; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.model_references
ADD CONSTRAINT fk_rails_2d8072edea FOREIGN KEY (to_record_master_id) REFERENCES ml_app.masters(id);
--
-- Name: activity_log_player_contact_phones fk_rails_2de1cadfad; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phones
ADD CONSTRAINT fk_rails_2de1cadfad FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: nfs_store_archived_files fk_rails_2eab578259; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_files
ADD CONSTRAINT fk_rails_2eab578259 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: user_roles fk_rails_318345354e; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_roles
ADD CONSTRAINT fk_rails_318345354e FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: admin_action_logs fk_rails_3389f178f6; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admin_action_logs
ADD CONSTRAINT fk_rails_3389f178f6 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: page_layouts fk_rails_37a2f11066; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layouts
ADD CONSTRAINT fk_rails_37a2f11066 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: message_notifications fk_rails_3a3553e146; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_notifications
ADD CONSTRAINT fk_rails_3a3553e146 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: nfs_store_uploads fk_rails_3f5167a964; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_uploads
ADD CONSTRAINT fk_rails_3f5167a964 FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: trackers fk_rails_447d125f63; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT fk_rails_447d125f63 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: scantrons fk_rails_45205ed085; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantrons
ADD CONSTRAINT fk_rails_45205ed085 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: role_description_history fk_rails_47581bba71; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_description_history
ADD CONSTRAINT fk_rails_47581bba71 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: trackers fk_rails_47b051d356; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT fk_rails_47b051d356 FOREIGN KEY (sub_process_id) REFERENCES ml_app.sub_processes(id);
--
-- Name: addresses fk_rails_48c9e0c5a2; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.addresses
ADD CONSTRAINT fk_rails_48c9e0c5a2 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: colleges fk_rails_49306e4f49; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.colleges
ADD CONSTRAINT fk_rails_49306e4f49 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: model_references fk_rails_4bbf83b940; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.model_references
ADD CONSTRAINT fk_rails_4bbf83b940 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: users_contact_infos fk_rails_4decdf690b; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users_contact_infos
ADD CONSTRAINT fk_rails_4decdf690b FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: message_templates fk_rails_4fe5122ed4; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_templates
ADD CONSTRAINT fk_rails_4fe5122ed4 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_uploads fk_rails_4ff6d28f98; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_uploads
ADD CONSTRAINT fk_rails_4ff6d28f98 FOREIGN KEY (nfs_store_stored_file_id) REFERENCES ml_app.nfs_store_stored_files(id);
--
-- Name: exception_logs fk_rails_51ae125c4f; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.exception_logs
ADD CONSTRAINT fk_rails_51ae125c4f FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: protocol_events fk_rails_564af80fb6; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocol_events
ADD CONSTRAINT fk_rails_564af80fb6 FOREIGN KEY (sub_process_id) REFERENCES ml_app.sub_processes(id);
--
-- Name: external_identifier_history fk_rails_5b0628cf42; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifier_history
ADD CONSTRAINT fk_rails_5b0628cf42 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: activity_log_player_contact_phones fk_rails_5ce1857310; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phones
ADD CONSTRAINT fk_rails_5ce1857310 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: trackers fk_rails_623e0ca5ac; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT fk_rails_623e0ca5ac FOREIGN KEY (protocol_id) REFERENCES ml_app.protocols(id);
--
-- Name: nfs_store_user_file_actions fk_rails_639da31037; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_user_file_actions
ADD CONSTRAINT fk_rails_639da31037 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: app_configurations fk_rails_647c63b069; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configurations
ADD CONSTRAINT fk_rails_647c63b069 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: nfs_store_containers fk_rails_6a3d7bf39f; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_containers
ADD CONSTRAINT fk_rails_6a3d7bf39f FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: users fk_rails_6a971dc818; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users
ADD CONSTRAINT fk_rails_6a971dc818 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: protocols fk_rails_6de4fd560d; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocols
ADD CONSTRAINT fk_rails_6de4fd560d FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: tracker_history fk_rails_6e050927c2; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT fk_rails_6e050927c2 FOREIGN KEY (tracker_id) REFERENCES ml_app.trackers(id);
--
-- Name: accuracy_scores fk_rails_70c17e88fd; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.accuracy_scores
ADD CONSTRAINT fk_rails_70c17e88fd FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: external_identifiers fk_rails_7218113eac; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_identifiers
ADD CONSTRAINT fk_rails_7218113eac FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: player_contacts fk_rails_72b1afe72f; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contacts
ADD CONSTRAINT fk_rails_72b1afe72f FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_move_actions fk_rails_75138f1972; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_move_actions
ADD CONSTRAINT fk_rails_75138f1972 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_filters fk_rails_776e17eafd; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filters
ADD CONSTRAINT fk_rails_776e17eafd FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: users_contact_infos fk_rails_7808f5fdb3; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.users_contact_infos
ADD CONSTRAINT fk_rails_7808f5fdb3 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: sub_processes fk_rails_7c10a99849; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_processes
ADD CONSTRAINT fk_rails_7c10a99849 FOREIGN KEY (protocol_id) REFERENCES ml_app.protocols(id);
--
-- Name: user_access_controls fk_rails_8108e25f83; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_controls
ADD CONSTRAINT fk_rails_8108e25f83 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: tracker_history fk_rails_83aa075398; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT fk_rails_83aa075398 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: pro_infos fk_rails_86cecb1e36; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.pro_infos
ADD CONSTRAINT fk_rails_86cecb1e36 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: config_library_history fk_rails_88664b466b; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_library_history
ADD CONSTRAINT fk_rails_88664b466b FOREIGN KEY (config_library_id) REFERENCES ml_app.config_libraries(id);
--
-- Name: app_types fk_rails_8be93bcf4b; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_types
ADD CONSTRAINT fk_rails_8be93bcf4b FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: tracker_history fk_rails_9513fd1c35; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT fk_rails_9513fd1c35 FOREIGN KEY (sub_process_id) REFERENCES ml_app.sub_processes(id);
--
-- Name: sage_assignments fk_rails_971255ec2c; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sage_assignments
ADD CONSTRAINT fk_rails_971255ec2c FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: protocols fk_rails_990daa5f76; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.protocols
ADD CONSTRAINT fk_rails_990daa5f76 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: role_description_history fk_rails_9d88430088; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_description_history
ADD CONSTRAINT fk_rails_9d88430088 FOREIGN KEY (role_description_id) REFERENCES ml_app.role_descriptions(id);
--
-- Name: tracker_history fk_rails_9e92bdfe65; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT fk_rails_9e92bdfe65 FOREIGN KEY (protocol_event_id) REFERENCES ml_app.protocol_events(id);
--
-- Name: tracker_history fk_rails_9f5797d684; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT fk_rails_9f5797d684 FOREIGN KEY (protocol_id) REFERENCES ml_app.protocols(id);
--
-- Name: addresses fk_rails_a44670b00a; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.addresses
ADD CONSTRAINT fk_rails_a44670b00a FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: model_references fk_rails_a4eb981c4a; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.model_references
ADD CONSTRAINT fk_rails_a4eb981c4a FOREIGN KEY (from_record_master_id) REFERENCES ml_app.masters(id);
--
-- Name: user_history fk_rails_af2f6ffc55; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_history
ADD CONSTRAINT fk_rails_af2f6ffc55 FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: activity_log_player_contact_phones fk_rails_b071294797; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.activity_log_player_contact_phones
ADD CONSTRAINT fk_rails_b071294797 FOREIGN KEY (player_contact_id) REFERENCES ml_app.player_contacts(id);
--
-- Name: colleges fk_rails_b0a6220067; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.colleges
ADD CONSTRAINT fk_rails_b0a6220067 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: reports fk_rails_b138baacff; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.reports
ADD CONSTRAINT fk_rails_b138baacff FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: imports fk_rails_b1e2154c26; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.imports
ADD CONSTRAINT fk_rails_b1e2154c26 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: user_roles fk_rails_b345649dfe; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_roles
ADD CONSTRAINT fk_rails_b345649dfe FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: trackers fk_rails_b822840dc1; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT fk_rails_b822840dc1 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: trackers fk_rails_bb6af37155; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT fk_rails_bb6af37155 FOREIGN KEY (protocol_event_id) REFERENCES ml_app.protocol_events(id);
--
-- Name: imports_model_generators fk_rails_bd9f10d2c7; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.imports_model_generators
ADD CONSTRAINT fk_rails_bd9f10d2c7 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_uploads fk_rails_bdb308087e; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_uploads
ADD CONSTRAINT fk_rails_bdb308087e FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: admins fk_rails_c05d151591; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.admins
ADD CONSTRAINT fk_rails_c05d151591 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_move_actions fk_rails_c1ea9a5fd9; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_move_actions
ADD CONSTRAINT fk_rails_c1ea9a5fd9 FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: item_flags fk_rails_c2d5bb8930; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flags
ADD CONSTRAINT fk_rails_c2d5bb8930 FOREIGN KEY (item_flag_name_id) REFERENCES ml_app.item_flag_names(id);
--
-- Name: nfs_store_user_file_actions fk_rails_c423dc1802; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_user_file_actions
ADD CONSTRAINT fk_rails_c423dc1802 FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: tracker_history fk_rails_c55341c576; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT fk_rails_c55341c576 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: exception_logs fk_rails_c720bf523c; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.exception_logs
ADD CONSTRAINT fk_rails_c720bf523c FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: user_action_logs fk_rails_c94bae872a; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_action_logs
ADD CONSTRAINT fk_rails_c94bae872a FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: nfs_store_downloads fk_rails_cd756b42dd; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_downloads
ADD CONSTRAINT fk_rails_cd756b42dd FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: user_action_logs fk_rails_cfc9dc539f; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_action_logs
ADD CONSTRAINT fk_rails_cfc9dc539f FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: message_notifications fk_rails_d3566ee56d; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_notifications
ADD CONSTRAINT fk_rails_d3566ee56d FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: player_contacts fk_rails_d3c0ddde90; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.player_contacts
ADD CONSTRAINT fk_rails_d3c0ddde90 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: config_libraries fk_rails_da3ba4f850; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.config_libraries
ADD CONSTRAINT fk_rails_da3ba4f850 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: item_flags fk_rails_dce5169cfd; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.item_flags
ADD CONSTRAINT fk_rails_dce5169cfd FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: nfs_store_trash_actions fk_rails_de41d50f67; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_trash_actions
ADD CONSTRAINT fk_rails_de41d50f67 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: dynamic_models fk_rails_deec8fcb38; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.dynamic_models
ADD CONSTRAINT fk_rails_deec8fcb38 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_containers fk_rails_e01d928507; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_containers
ADD CONSTRAINT fk_rails_e01d928507 FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: sage_assignments fk_rails_e3c559b547; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sage_assignments
ADD CONSTRAINT fk_rails_e3c559b547 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: page_layouts fk_rails_e410af4010; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.page_layouts
ADD CONSTRAINT fk_rails_e410af4010 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: sage_assignments fk_rails_ebab73db27; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sage_assignments
ADD CONSTRAINT fk_rails_ebab73db27 FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: external_links fk_rails_ebf3863277; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.external_links
ADD CONSTRAINT fk_rails_ebf3863277 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_archived_files fk_rails_ecfa3cb151; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_archived_files
ADD CONSTRAINT fk_rails_ecfa3cb151 FOREIGN KEY (nfs_store_container_id) REFERENCES ml_app.nfs_store_containers(id);
--
-- Name: app_configurations fk_rails_f0ac516fff; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.app_configurations
ADD CONSTRAINT fk_rails_f0ac516fff FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: nfs_store_filters fk_rails_f547361daa; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.nfs_store_filters
ADD CONSTRAINT fk_rails_f547361daa FOREIGN KEY (app_type_id) REFERENCES ml_app.app_types(id);
--
-- Name: general_selections fk_rails_f62500107f; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.general_selections
ADD CONSTRAINT fk_rails_f62500107f FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: role_descriptions fk_rails_f646dbe30d; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.role_descriptions
ADD CONSTRAINT fk_rails_f646dbe30d FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: message_notifications fk_rails_fa6dbd15de; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.message_notifications
ADD CONSTRAINT fk_rails_fa6dbd15de FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: report_history fk_report_history_reports; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.report_history
ADD CONSTRAINT fk_report_history_reports FOREIGN KEY (report_id) REFERENCES ml_app.reports(id);
--
-- Name: scantron_history fk_scantron_history_masters; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantron_history
ADD CONSTRAINT fk_scantron_history_masters FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: scantron_history fk_scantron_history_scantrons; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantron_history
ADD CONSTRAINT fk_scantron_history_scantrons FOREIGN KEY (scantron_table_id) REFERENCES ml_app.scantrons(id);
--
-- Name: scantron_history fk_scantron_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.scantron_history
ADD CONSTRAINT fk_scantron_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: sub_process_history fk_sub_process_history_sub_processes; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.sub_process_history
ADD CONSTRAINT fk_sub_process_history_sub_processes FOREIGN KEY (sub_process_id) REFERENCES ml_app.sub_processes(id);
--
-- Name: user_access_control_history fk_user_access_control_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_control_history
ADD CONSTRAINT fk_user_access_control_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: user_access_control_history fk_user_access_control_history_user_access_controls; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_access_control_history
ADD CONSTRAINT fk_user_access_control_history_user_access_controls FOREIGN KEY (user_access_control_id) REFERENCES ml_app.user_access_controls(id);
--
-- Name: user_authorization_history fk_user_authorization_history_user_authorizations; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_authorization_history
ADD CONSTRAINT fk_user_authorization_history_user_authorizations FOREIGN KEY (user_authorization_id) REFERENCES ml_app.user_authorizations(id);
--
-- Name: user_history fk_user_history_users; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_history
ADD CONSTRAINT fk_user_history_users FOREIGN KEY (user_id) REFERENCES ml_app.users(id);
--
-- Name: user_role_history fk_user_role_history_admins; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_role_history
ADD CONSTRAINT fk_user_role_history_admins FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: user_role_history fk_user_role_history_user_roles; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.user_role_history
ADD CONSTRAINT fk_user_role_history_user_roles FOREIGN KEY (user_role_id) REFERENCES ml_app.user_roles(id);
--
-- Name: rc_cis rc_cis_master_id_fkey; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.rc_cis
ADD CONSTRAINT rc_cis_master_id_fkey FOREIGN KEY (master_id) REFERENCES ml_app.masters(id);
--
-- Name: tracker_history unique_master_protocol_tracker_id; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT unique_master_protocol_tracker_id FOREIGN KEY (master_id, protocol_id, tracker_id) REFERENCES ml_app.trackers(master_id, protocol_id, id);
--
-- Name: trackers valid_protocol_sub_process; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT valid_protocol_sub_process FOREIGN KEY (protocol_id, sub_process_id) REFERENCES ml_app.sub_processes(protocol_id, id) MATCH FULL;
--
-- Name: tracker_history valid_protocol_sub_process; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT valid_protocol_sub_process FOREIGN KEY (protocol_id, sub_process_id) REFERENCES ml_app.sub_processes(protocol_id, id) MATCH FULL;
--
-- Name: trackers valid_sub_process_event; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.trackers
ADD CONSTRAINT valid_sub_process_event FOREIGN KEY (sub_process_id, protocol_event_id) REFERENCES ml_app.protocol_events(sub_process_id, id);
--
-- Name: tracker_history valid_sub_process_event; Type: FK CONSTRAINT; Schema: ml_app; Owner: -
--
ALTER TABLE ONLY ml_app.tracker_history
ADD CONSTRAINT valid_sub_process_event FOREIGN KEY (sub_process_id, protocol_event_id) REFERENCES ml_app.protocol_events(sub_process_id, id);
--
-- Name: datadic_variables fk_rails_029902d3e3; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variables
ADD CONSTRAINT fk_rails_029902d3e3 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: datadic_variable_history fk_rails_143e8a7c25; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variable_history
ADD CONSTRAINT fk_rails_143e8a7c25 FOREIGN KEY (equivalent_to_id) REFERENCES ref_data.datadic_variables(id);
--
-- Name: redcap_data_dictionaries fk_rails_16cfa46407; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionaries
ADD CONSTRAINT fk_rails_16cfa46407 FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: redcap_data_dictionary_history fk_rails_25f366a78c; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionary_history
ADD CONSTRAINT fk_rails_25f366a78c FOREIGN KEY (redcap_data_dictionary_id) REFERENCES ref_data.redcap_data_dictionaries(id);
--
-- Name: redcap_data_collection_instruments fk_rails_2aa7bf926a; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instruments
ADD CONSTRAINT fk_rails_2aa7bf926a FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: redcap_client_requests fk_rails_32285f308d; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_client_requests
ADD CONSTRAINT fk_rails_32285f308d FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: datadic_variables fk_rails_34eadb0aee; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variables
ADD CONSTRAINT fk_rails_34eadb0aee FOREIGN KEY (equivalent_to_id) REFERENCES ref_data.datadic_variables(id);
--
-- Name: redcap_project_users fk_rails_38d0954914; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_users
ADD CONSTRAINT fk_rails_38d0954914 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: datadic_choice_history fk_rails_42389740a0; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choice_history
ADD CONSTRAINT fk_rails_42389740a0 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: redcap_data_dictionaries fk_rails_4766ebe50f; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionaries
ADD CONSTRAINT fk_rails_4766ebe50f FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: datadic_variable_history fk_rails_5302a77293; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variable_history
ADD CONSTRAINT fk_rails_5302a77293 FOREIGN KEY (datadic_variable_id) REFERENCES ref_data.datadic_variables(id);
--
-- Name: datadic_variables fk_rails_5578e37430; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variables
ADD CONSTRAINT fk_rails_5578e37430 FOREIGN KEY (redcap_data_dictionary_id) REFERENCES ref_data.redcap_data_dictionaries(id);
--
-- Name: datadic_choice_history fk_rails_63103b7cf7; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choice_history
ADD CONSTRAINT fk_rails_63103b7cf7 FOREIGN KEY (datadic_choice_id) REFERENCES ref_data.datadic_choices(id);
--
-- Name: datadic_choices fk_rails_67ca4d7e1f; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choices
ADD CONSTRAINT fk_rails_67ca4d7e1f FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: datadic_variable_history fk_rails_6ba6ab1e1f; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variable_history
ADD CONSTRAINT fk_rails_6ba6ab1e1f FOREIGN KEY (redcap_data_dictionary_id) REFERENCES ref_data.redcap_data_dictionaries(id);
--
-- Name: redcap_data_collection_instrument_history fk_rails_6c93846f69; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instrument_history
ADD CONSTRAINT fk_rails_6c93846f69 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: redcap_project_user_history fk_rails_7ba2e90d7d; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_user_history
ADD CONSTRAINT fk_rails_7ba2e90d7d FOREIGN KEY (redcap_project_user_id) REFERENCES ref_data.redcap_project_users(id);
--
-- Name: redcap_project_user_history fk_rails_89af917107; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_user_history
ADD CONSTRAINT fk_rails_89af917107 FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: redcap_data_dictionary_history fk_rails_9a6eca0fe7; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionary_history
ADD CONSTRAINT fk_rails_9a6eca0fe7 FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: redcap_project_user_history fk_rails_a0bf0fdddb; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_user_history
ADD CONSTRAINT fk_rails_a0bf0fdddb FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: redcap_project_users fk_rails_a6952cc0e8; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_users
ADD CONSTRAINT fk_rails_a6952cc0e8 FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: redcap_project_admin_history fk_rails_a7610f4fec; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_project_admin_history
ADD CONSTRAINT fk_rails_a7610f4fec FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: redcap_data_collection_instrument_history fk_rails_cb0b57b6c1; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instrument_history
ADD CONSTRAINT fk_rails_cb0b57b6c1 FOREIGN KEY (redcap_project_admin_id) REFERENCES ref_data.redcap_project_admins(id);
--
-- Name: datadic_choice_history fk_rails_cb8a1e9d10; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choice_history
ADD CONSTRAINT fk_rails_cb8a1e9d10 FOREIGN KEY (redcap_data_dictionary_id) REFERENCES ref_data.redcap_data_dictionaries(id);
--
-- Name: redcap_data_collection_instrument_history fk_rails_ce6075441d; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_collection_instrument_history
ADD CONSTRAINT fk_rails_ce6075441d FOREIGN KEY (redcap_data_collection_instrument_id) REFERENCES ref_data.redcap_data_collection_instruments(id);
--
-- Name: datadic_variable_history fk_rails_d7e89fcbde; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_variable_history
ADD CONSTRAINT fk_rails_d7e89fcbde FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- Name: datadic_choices fk_rails_f5497a3583; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.datadic_choices
ADD CONSTRAINT fk_rails_f5497a3583 FOREIGN KEY (redcap_data_dictionary_id) REFERENCES ref_data.redcap_data_dictionaries(id);
--
-- Name: redcap_data_dictionary_history fk_rails_fffede9aa7; Type: FK CONSTRAINT; Schema: ref_data; Owner: -
--
ALTER TABLE ONLY ref_data.redcap_data_dictionary_history
ADD CONSTRAINT fk_rails_fffede9aa7 FOREIGN KEY (admin_id) REFERENCES ml_app.admins(id);
--
-- PostgreSQL database dump complete
--
SET search_path TO ml_app,ref_data;
INSERT INTO "schema_migrations" (version) VALUES
('20150602181200'),
('20150602181229'),
('20150602181400'),
('20150602181925'),
('20150602205642'),
('20150603135202'),
('20150603153758'),
('20150603170429'),
('20150604160659'),
('20150609140033'),
('20150609150931'),
('20150609160545'),
('20150609161656'),
('20150609185229'),
('20150609185749'),
('20150609190556'),
('20150610142403'),
('20150610143629'),
('20150610155810'),
('20150610160257'),
('20150610183502'),
('20150610220253'),
('20150610220320'),
('20150610220451'),
('20150611144834'),
('20150611145259'),
('20150611180303'),
('20150611202453'),
('20150616202753'),
('20150616202829'),
('20150618143506'),
('20150618161857'),
('20150618161945'),
('20150619165405'),
('20150622144725'),
('20150623191520'),
('20150623194212'),
('20150625213040'),
('20150626190344'),
('20150629210656'),
('20150630202829'),
('20150702200308'),
('20150707142702'),
('20150707143233'),
('20150707150524'),
('20150707150615'),
('20150707150921'),
('20150707151004'),
('20150707151010'),
('20150707151032'),
('20150707151129'),
('20150707153720'),
('20150707222630'),
('20150710135307'),
('20150710135959'),
('20150710160209'),
('20150710160215'),
('20150715181110'),
('20150720141845'),
('20150720173900'),
('20150720175827'),
('20150721204937'),
('20150724165441'),
('20150727164955'),
('20150728133359'),
('20150728203820'),
('20150728213254'),
('20150728213551'),
('20150729182424'),
('20150730174055'),
('20150730181206'),
('20150730202422'),
('20150803181029'),
('20150803194546'),
('20150803194551'),
('20150804160523'),
('20150804203710'),
('20150805132950'),
('20150805161302'),
('20150805200932'),
('20150811174323'),
('20150812194032'),
('20150820151214'),
('20150820151728'),
('20150820152721'),
('20150820155555'),
('20150826145029'),
('20150826145125'),
('20150924163412'),
('20150924183936'),
('20151005143945'),
('20151009191559'),
('20151013191910'),
('20151015142035'),
('20151015150733'),
('20151015183136'),
('20151016160248'),
('20151019203248'),
('20151019204910'),
('20151020145339'),
('20151021162145'),
('20151021171534'),
('20151022142507'),
('20151022191658'),
('20151023171217'),
('20151026181305'),
('20151028145802'),
('20151028155426'),
('20151109223309'),
('20151120150828'),
('20151120151912'),
('20151123203524'),
('20151124151501'),
('20151125192206'),
('20151202180745'),
('20151208144918'),
('20151208200918'),
('20151208200919'),
('20151208200920'),
('20151208244916'),
('20151208244917'),
('20151208244918'),
('20151215165127'),
('20151215170733'),
('20151216102328'),
('20151218203119'),
('20160203120436'),
('20160203121701'),
('20160203130714'),
('20160203151737'),
('20160203211330'),
('20160204120512'),
('20160210200918'),
('20160210200919'),
('20170823145313'),
('20170830100037'),
('20170830105123'),
('20170901152707'),
('20170908074038'),
('20170922182052'),
('20170926144234'),
('20171002120537'),
('20171013141835'),
('20171013141837'),
('20171025095942'),
('20171031145807'),
('20171207163040'),
('20171207170748'),
('20180119173411'),
('20180123111956'),
('20180123154108'),
('20180126120818'),
('20180206173516'),
('20180209145336'),
('20180209152723'),
('20180209152747'),
('20180209171641'),
('20180228145731'),
('20180301114206'),
('20180302144109'),
('20180313091440'),
('20180319133539'),
('20180319133540'),
('20180319175721'),
('20180320105954'),
('20180320113757'),
('20180320154951'),
('20180320183512'),
('20180321082612'),
('20180321095805'),
('20180404150536'),
('20180405141059'),
('20180416145033'),
('20180426091838'),
('20180502082334'),
('20180504080300'),
('20180531091440'),
('20180723165621'),
('20180725140502'),
('20180814142112'),
('20180814142559'),
('20180814142560'),
('20180814142561'),
('20180814142562'),
('20180814142924'),
('20180814180843'),
('20180815104221'),
('20180817114138'),
('20180817114157'),
('20180818133205'),
('20180821123717'),
('20180822085118'),
('20180822093147'),
('20180830144523'),
('20180831132605'),
('20180911153518'),
('20180913142103'),
('20180924153547'),
('20181002142656'),
('20181002165822'),
('20181003182428'),
('20181004113953'),
('20181008104204'),
('20181030185123'),
('20181108115216'),
('20181113143210'),
('20181113143327'),
('20181113150331'),
('20181113150713'),
('20181113152652'),
('20181113154525'),
('20181113154855'),
('20181113154920'),
('20181113154942'),
('20181113165948'),
('20181113170144'),
('20181113172429'),
('20181113175031'),
('20181113180608'),
('20181113183446'),
('20181113184022'),
('20181113184516'),
('20181113184920'),
('20181113185315'),
('20181205103333'),
('20181206123849'),
('20181220131156'),
('20181220160047'),
('20190130152053'),
('20190130152208'),
('20190131130024'),
('20190201160559'),
('20190201160606'),
('20190225094021'),
('20190226165932'),
('20190226165938'),
('20190226173917'),
('20190312160404'),
('20190312163119'),
('20190416181222'),
('20190502142561'),
('20190517135351'),
('20190523115611'),
('20190528152006'),
('20190612140618'),
('20190614162317'),
('20190624082535'),
('20190625142421'),
('20190628131713'),
('20190709174613'),
('20190709174638'),
('20190711074003'),
('20190711084434'),
('20190902123518'),
('20190906172361'),
('20191115124723'),
('20191115124732'),
('20200313160640'),
('20200403172361'),
('20200611123849'),
('20200720100000'),
('20200720110000'),
('20200720121356'),
('20200720161000'),
('20200720161100'),
('20200723104100'),
('20200723153130'),
('20200724153400'),
('20200727081305'),
('20200727081306'),
('20200727122116'),
('20200727122117'),
('20200731121100'),
('20200731121144'),
('20200731122147'),
('20200731124515'),
('20200731124908'),
('20200731130750'),
('20200803161100'),
('20200803162444'),
('20200813162728'),
('20200821114133'),
('20200924100402'),
('20200924121742'),
('20200924125253'),
('20200924125350'),
('20201001120642'),
('20201109114833'),
('20201111160935'),
('20201111161035'),
('20201112163129'),
('20210107151553'),
('20210107151556'),
('20210107152851'),
('20210107152900'),
('20210107152911'),
('20210107165838'),
('20210107165840'),
('20210107165841'),
('20210107165842'),
('20210107165843'),
('20210107165844'),
('20210108085826'),
('20210128180947'),
('20210129150044'),
('20210129154600'),
('20210201124324'),
('20210204205746'),
('20210209095546'),
('20210209154901'),
('20210215153201'),
('20210216132458'),
('20210216133011'),
('20210303164631'),
('20210303164632'),
('20210305113828'),
('20210308143952'),
('20210312143952'),
('20210318150132'),
('20210318150446'),
('20210330085617'),
('20210406154800'),
('20210428102016'),
('20210526183942'),
('20210712152134'),
('20210809151207'),
('20210816170804');
| 27.989884 | 391 | 0.718881 |
1647a0db5018ed097b180e33898df7470f3d624d | 813 | ts | TypeScript | edm/src/app/commonmodule/accordion/accordion-group.component.ts | pagumakwana/repo_edm | 231c10d0fff1bc90076a799ec5f66473978c4950 | [
"MIT"
] | 1 | 2020-04-10T12:09:19.000Z | 2020-04-10T12:09:19.000Z | edm/src/app/commonmodule/accordion/accordion-group.component.ts | pagumakwana/repo_edm | 231c10d0fff1bc90076a799ec5f66473978c4950 | [
"MIT"
] | 130 | 2020-04-09T20:27:57.000Z | 2022-03-02T05:14:18.000Z | edm/src/app/commonmodule/accordion/accordion-group.component.ts | pagumakwana/repo_edm | 231c10d0fff1bc90076a799ec5f66473978c4950 | [
"MIT"
] | 1 | 2020-05-18T12:15:20.000Z | 2020-05-18T12:15:20.000Z | import { ChangeDetectionStrategy, Component, Input, Output, EventEmitter } from '@angular/core';
@Component({
selector: 'group',
template: `
<div class="mypanel">
<div class="title" (click)="toggle.emit()">
{{title}}
</div>
<div class="body" [ngClass]="{'hidden': !opened}">
<ng-content></ng-content>
</div>
<div>
`,
styleUrls: ['accordion.component.css'],
// changeDetection: ChangeDetectionStrategy.OnPush
})
export class AccordionGroupComponent {
/**
* If the panel is opened or closed
*/
@Input() opened = false;
/**
* Text to display in the group title bar
*/
@Input() title: string;
/**
* Emitted when user clicks on group titlebar
* @type {EventEmitter<any>}
*/
@Output() toggle: EventEmitter<any> = new EventEmitter<any>();
}
| 22.583333 | 96 | 0.626076 |
9676708f3c9030dfb946a08185465688f8a6d19e | 2,413 | php | PHP | application/models/Category_model.php | anjipavuluri21/ambition | 23760136bf0c3544172a7d77877179c0fbbebdfb | [
"MIT"
] | null | null | null | application/models/Category_model.php | anjipavuluri21/ambition | 23760136bf0c3544172a7d77877179c0fbbebdfb | [
"MIT"
] | 2 | 2021-09-29T17:33:29.000Z | 2022-02-10T20:11:38.000Z | application/models/Category_model.php | anjipavuluri21/ambition | 23760136bf0c3544172a7d77877179c0fbbebdfb | [
"MIT"
] | null | null | null | <?php
class Category_model extends CI_Model {
public function __construct() {
parent::__construct();
}
public function insertCourseCategory($insert_data) {
$category_list = array(
'course_id' => $insert_data['course_id'],
'course_category_name' => $insert_data['course_category'],
'created_at' => date('y-m-d h:i:s'),
'created_by' => $this->session->userdata['user_data']['user_id'],
);
$result = $this->db->insert('course_category', $category_list);
// print_r($result);exit;
if ($this->db->affected_rows() > 0) {
$response = "success";
} else {
$response = "fail";
}
return $response;
}
public function categoryList() {
$query = $this->db->query('SELECT course_category.*,courses.course_name
FROM course_category
LEFT JOIN courses
ON course_category.course_id=courses.course_id');
if ($query->num_rows() > 0) {
return $query->result();
}
}
public function delete_course_category($id) {
$sql = "DELETE FROM course_category where course_category_id=" . $id;
$this->db->query($sql);
if ($this->db->affected_rows() > 0) {
$response = "success";
} else {
$response = "fail";
}
return $response;
}
public function editCourseCategory($course_category_id) {
$query = $this->db->select('*')
->from('course_category')
->where('course_category_id', $course_category_id)
->get();
return $query->row();
}
public function updateCategory($update_category) {
$category_data = array(
'course_id' => $update_category['course_id'],
'course_category_name' => $update_category['course_category'],
'updated_at' => date('y-m-d h:i:s'),
'updated_by' => $this->session->userdata['user_data']['user_id'],
);
$this->db->where('course_category_id', $update_category['course_category_id']);
$this->db->update('course_category', $category_data);
// echo $this->db->last_query();exit;
if ($this->db->affected_rows() > 0) {
$response = "success";
} else {
$response = "fail";
}
return $response;
}
}
| 32.173333 | 87 | 0.547037 |
0bb0d606495610a6af3e36a6f278f11886862d99 | 731 | js | JavaScript | src/test-runner/runner.js | nerdbeere/tower-defense | 95cafabc1bb2fa9bba1e1f288ab347360a658e78 | [
"MIT"
] | null | null | null | src/test-runner/runner.js | nerdbeere/tower-defense | 95cafabc1bb2fa9bba1e1f288ab347360a658e78 | [
"MIT"
] | null | null | null | src/test-runner/runner.js | nerdbeere/tower-defense | 95cafabc1bb2fa9bba1e1f288ab347360a658e78 | [
"MIT"
] | null | null | null | import React from 'react';
import Iframe from './iframe';
export default class TestRunner {
constructor(domNode, eventReceiver) {
this._onStats = null;
this._domNode = domNode;
(eventReceiver || window).addEventListener('message', this.handleDataReceived.bind(this), false);
}
render(iframeSrc) {
var iframe = React.render(<Iframe iframeSrc={iframeSrc}/>, this._domNode);
this._iframeRef = iframe.getIframeRef();
}
send(sourceCode) {
var iframe = this._iframeRef.contentWindow;
iframe.postMessage(sourceCode, '*');
}
onStats(fn) {
this._onStats = fn;
}
handleDataReceived(data) {
if (this._onStats) {
var stats = data.data;
this._onStats(stats);
}
}
}
| 21.5 | 101 | 0.663475 |
b9185570e623b7a1a18f9649b467fd9791c0c3f1 | 2,163 | go | Go | cmd/internal/install/install.go | nian8/gop | b7ebd802fb834bddfd016cf8e886788d33c73589 | [
"Apache-2.0"
] | null | null | null | cmd/internal/install/install.go | nian8/gop | b7ebd802fb834bddfd016cf8e886788d33c73589 | [
"Apache-2.0"
] | 1 | 2022-03-31T13:41:27.000Z | 2022-03-31T13:41:27.000Z | cmd/internal/install/install.go | nian8/gop | b7ebd802fb834bddfd016cf8e886788d33c73589 | [
"Apache-2.0"
] | null | null | null | /*
Copyright 2021 The GoPlus Authors (goplus.org)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package install implements the ``gop install'' command.
package install
import (
"fmt"
"os"
"os/exec"
"strings"
"github.com/goplus/gop/cl"
"github.com/goplus/gop/cmd/gengo"
"github.com/goplus/gop/cmd/internal/base"
)
// Cmd - gop install
var Cmd = &base.Command{
UsageLine: "gop install [-v] <gopSrcDir|gopSrcFile>",
Short: "Build Go+ files and install target to GOBIN",
}
var (
flag = &Cmd.Flag
flagVerbose = flag.Bool("v", false, "print the names of packages as they are compiled.")
)
func init() {
Cmd.Run = runCmd
}
func runCmd(cmd *base.Command, args []string) {
flag.Parse(args)
if flag.NArg() < 1 {
cmd.Usage(os.Stderr)
return
}
var exitCode int
var recursive bool
var dir = flag.Arg(0)
if strings.HasSuffix(dir, "/...") {
dir = dir[:len(dir)-4]
recursive = true
}
runner := new(gengo.Runner)
runner.SetAfter(func(p *gengo.Runner, dir string, flags int) error {
errs := p.ResetErrors()
if errs != nil {
for _, err := range errs {
fmt.Fprintln(os.Stderr, err)
}
fmt.Fprintln(os.Stderr)
}
return nil
})
runner.GenGo(dir, recursive, &cl.Config{CacheLoadPkgs: *flagVerbose})
goCmd(dir, "install", args...)
os.Exit(exitCode)
}
func goCmd(dir string, op string, args ...string) error {
opwargs := make([]string, len(args)+1)
opwargs[0] = op
copy(opwargs[1:], args)
cmd := exec.Command("go", opwargs...)
cmd.Dir = dir
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = os.Environ()
return cmd.Run()
}
// -----------------------------------------------------------------------------
| 24.303371 | 89 | 0.661119 |
565d7c08d5cdc87d4424ae313dc8edcb85a59455 | 1,031 | sql | SQL | openGaussBase/testcase/KEYWORDS/Current/Opengauss_Function_Keyword_Current_Case0028.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Current/Opengauss_Function_Keyword_Current_Case0028.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Current/Opengauss_Function_Keyword_Current_Case0028.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | -- @testpoint: opengauss关键字current(非保留),作为同义词对象名,部分测试点合理报错
--前置条件
drop table if exists current_test;
create table current_test(id int,name varchar(10));
--关键字不带引号-成功
drop synonym if exists current;
create synonym current for current_test;
insert into current values (1,'ada'),(2, 'bob');
update current set current.name='cici' where current.id=2;
select * from current;
drop synonym if exists current;
--关键字带双引号-成功
drop synonym if exists "current";
create synonym "current" for current_test;
drop synonym if exists "current";
--关键字带单引号-合理报错
drop synonym if exists 'current';
create synonym 'current' for current_test;
insert into 'current' values (1,'ada'),(2, 'bob');
update 'current' set 'current'.name='cici' where 'current'.id=2;
select * from 'current';
--关键字带反引号-合理报错
drop synonym if exists `current`;
create synonym `current` for current_test;
insert into `current` values (1,'ada'),(2, 'bob');
update `current` set `current`.name='cici' where `current`.id=2;
select * from `current`;
drop table if exists current_test; | 31.242424 | 64 | 0.745878 |
569b4da6ba64cb65ff002c58724eeb7793c3c652 | 343 | ts | TypeScript | src/Tips/TipsItem/index.ts | wyj580231/ant-design-mini | 049dc7cf6b79d2618fd889db6ea43799c84bdcbf | [
"MIT"
] | 105 | 2022-02-15T09:01:53.000Z | 2022-03-30T08:39:56.000Z | src/Tips/TipsItem/index.ts | wyj580231/ant-design-mini | 049dc7cf6b79d2618fd889db6ea43799c84bdcbf | [
"MIT"
] | 5 | 2022-02-25T09:39:02.000Z | 2022-03-29T01:54:05.000Z | src/Tips/TipsItem/index.ts | wyj580231/ant-design-mini | 049dc7cf6b79d2618fd889db6ea43799c84bdcbf | [
"MIT"
] | 17 | 2022-02-15T09:12:22.000Z | 2022-03-28T09:21:23.000Z | Component({
methods: {
onButtonTap() {
const { onButtonTap } = this.props;
if (typeof onButtonTap === 'function') {
return onButtonTap();
}
},
onHideTips() {
const { onHideTips } = this.props;
if (typeof onHideTips === 'function') {
return onHideTips(false);
}
},
},
});
| 20.176471 | 46 | 0.516035 |
a8f6b5dc2f433046a747b41d877bc4f5d8ccde13 | 3,507 | swift | Swift | JXExtensionKit_SwiftTests/FoundationTests/Data_SwiftTests.swift | Barnett2050/JXExtensionKit_Swift | e9b7029917466172420aa63c1c0c877199d071a7 | [
"MIT"
] | null | null | null | JXExtensionKit_SwiftTests/FoundationTests/Data_SwiftTests.swift | Barnett2050/JXExtensionKit_Swift | e9b7029917466172420aa63c1c0c877199d071a7 | [
"MIT"
] | null | null | null | JXExtensionKit_SwiftTests/FoundationTests/Data_SwiftTests.swift | Barnett2050/JXExtensionKit_Swift | e9b7029917466172420aa63c1c0c877199d071a7 | [
"MIT"
] | null | null | null | //
// Data_SwiftTests.swift
// JXExtensionKit_SwiftTests
//
// Created by Barnett on 2021/3/11.
// Copyright © 2021 Barnett. All rights reserved.
//
import XCTest
import Foundation
class Data_SwiftTests: XCTestCase {
override func setUpWithError() throws {
}
override func tearDownWithError() throws {
}
var testString = "1234567890"
let encryptKey = "20200429"
let aesKey = "0123456789111213"
let testIv = "0123456789111213"
func test_Encrypt() throws {
var testData = self.testString.data(using: String.Encoding.utf8)
XCTAssertTrue(testData!.md2String() == "38e53522a2e67fc5ea57bae1575a3107")
XCTAssertTrue(testData!.md4String() == "85b196c3e39457d91cab9c905f9a11c0")
XCTAssertTrue(testData!.md5String() == "e807f1fcf82d132f9bb018ca6738a19f")
XCTAssertTrue(testData!.sha1String() == "01b307acba4f54f55aafc33bb06bbbf6ca803e9a")
XCTAssertTrue(testData!.sha224String() == "b564e8a5cf20a254eb34e1ae98c3d957c351ce854491ccbeaeb220ea")
XCTAssertTrue(testData!.sha256String() == "c775e7b757ede630cd0aa1113bd102661ab38829ca52a6422ab782862f268646")
XCTAssertTrue(testData!.sha384String() == "ed845f8b4f2a6d5da86a3bec90352d916d6a66e3420d720e16439adf238f129182c8c64fc4ec8c1e6506bc2b4888baf9")
XCTAssertTrue(testData!.sha512String() == "12b03226a6d8be9c6e8cd5e55dc6c7920caaa39df14aab92d5e3ea9340d1c8a4d3d0b8e4314f1f6ef131ba4bf1ceb9186ab87c801af0d5c95b1befb8cedae2b9")
XCTAssertTrue(testData!.hmacMD5StringWithKey(self.encryptKey) == "01de3e2062a1d46209dbef9a685e19c8")
XCTAssertTrue(testData!.hmacSHA1StringWithKey(self.encryptKey) == "571a9d96fa688df2a51edf57086d0d0b5fd36e3c")
XCTAssertTrue(testData!.hmacSHA224StringWithKey(self.encryptKey) == "70c549ba7c953b0e48fd30d6cf384a9004fffd79dfdf01cfc0dcc537")
XCTAssertTrue(testData!.hmacSHA256StringWithKey(self.encryptKey) == "bf14cf26fd5beafd9575a764158ae03cb6a5b6fbb72492bdb2052e8ff1e4a721")
XCTAssertTrue(testData!.hmacSHA384StringWithKey(self.encryptKey) == "cb68513d18860e62143de511c747ce5651c5aace9112b672544020099337638dd2bad3ad2de85d8d9029d53e6127e2ee")
XCTAssertTrue(testData!.hmacSHA512StringWithKey(self.encryptKey) == "6f56ee663fea8497d6912d867b3d7c6cab5c4a8bae7908a59217fd1765836019013ab914d64a440c09eaddc187796a6bdcfc4175ffbf722984c085f432e0b813")
testString = "一段测试转换文字"
testData = testString.data(using: String.Encoding.utf8)
XCTAssertTrue(testData!.utf8String() == self.testString)
XCTAssertTrue(testData!.hexString() == "E4B880E6AEB5E6B58BE8AF95E8BDACE68DA2E69687E5AD97")
XCTAssertTrue(testData!.base64EncodedString() == "5LiA5q615rWL6K+V6L2s5o2i5paH5a2X")
let jsonDic = ["name":"Test"]
let jsonData : Data = try JSONSerialization.data(withJSONObject: jsonDic, options: JSONSerialization.WritingOptions.prettyPrinted)
print(jsonData.jsonValueDecoded() as Any)
}
func test_General() throws {
var data = Bundle.mainBundleData(name: "test", type: "jpg")
var gzipData = data!.gzippedData()
XCTAssertTrue(gzipData!.isGzippedData(),"gzip压缩")
XCTAssertTrue(data!.gzippedDataWithCompressionLevel(0.9)!.isGzippedData(),"gzip压缩")
XCTAssertTrue(gzipData?.gunzippedData() == data,"gzip解压缩")
print("=====" + gzipData!.description)
print("=====" + data!.gzippedDataWithCompressionLevel(0)!.description)
}
}
| 51.573529 | 207 | 0.750499 |
0a4f4ef8cbae09fc3c36ca0b75ba2b150a7fe37e | 446 | kt | Kotlin | src/jvmMain/kotlin/com/bkahlert/kommons/text/anyContainsAll.kt | bkahlert/koodies | 35e2ac1c4246decdf7e7a1160bfdd5c9e28fd066 | [
"MIT"
] | 7 | 2020-12-20T10:47:06.000Z | 2021-08-03T14:21:57.000Z | src/jvmMain/kotlin/com/bkahlert/kommons/text/anyContainsAll.kt | bkahlert/koodies | 35e2ac1c4246decdf7e7a1160bfdd5c9e28fd066 | [
"MIT"
] | 42 | 2021-08-25T16:22:09.000Z | 2022-03-21T16:22:37.000Z | src/jvmMain/kotlin/com/bkahlert/kommons/text/anyContainsAll.kt | bkahlert/koodies | 35e2ac1c4246decdf7e7a1160bfdd5c9e28fd066 | [
"MIT"
] | null | null | null | package com.bkahlert.kommons.text
/**
* Returns `true` if any of the character sequences contains all of the specified [others] as a substring.
*
* @param ignoreCase `true` to ignore character case when comparing strings. By default `false`.
*/
public fun <T : CharSequence, U : CharSequence> Iterable<T>.anyContainsAll(others: Iterable<U>, ignoreCase: Boolean = false): Boolean =
any { it.containsAny(others, ignoreCase = ignoreCase) }
| 44.6 | 135 | 0.737668 |
674e5a338d402e7f10618d8601bcd44dbb7a1857 | 3,023 | sql | SQL | prisma/migrations/20211013164156_national_park_v1/migration.sql | abdmmar/tn-ql | 16b53755d4f0faf4be66bd2c85e72b5475ad8257 | [
"MIT"
] | 1 | 2021-12-04T09:36:17.000Z | 2021-12-04T09:36:17.000Z | prisma/migrations/20211013164156_national_park_v1/migration.sql | Rizqi7180/tn-ql | cf780b811549df35bdae24254f29aa4bf812c261 | [
"MIT"
] | null | null | null | prisma/migrations/20211013164156_national_park_v1/migration.sql | Rizqi7180/tn-ql | cf780b811549df35bdae24254f29aa4bf812c261 | [
"MIT"
] | 1 | 2022-01-10T02:23:52.000Z | 2022-01-10T02:23:52.000Z | -- CreateTable
CREATE TABLE "NationalPark" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"name" TEXT NOT NULL,
"link" TEXT NOT NULL,
"year" INTEGER NOT NULL,
"total_area" TEXT NOT NULL,
"waters_percentages" TEXT,
"region" TEXT NOT NULL,
"description" TEXT NOT NULL,
"map" TEXT NOT NULL,
"location" TEXT NOT NULL,
"established" TEXT NOT NULL,
"visitors" TEXT NOT NULL,
"management" TEXT NOT NULL
);
-- CreateTable
CREATE TABLE "Image" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"link" TEXT NOT NULL,
"title" TEXT NOT NULL,
"width" TEXT NOT NULL,
"height" TEXT NOT NULL,
"size" TEXT NOT NULL,
"type" TEXT NOT NULL,
"date" DATETIME,
"original_source" TEXT,
"author" TEXT,
"src" TEXT NOT NULL
);
-- CreateTable
CREATE TABLE "License" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"type" TEXT,
"link" TEXT
);
-- CreateTable
CREATE TABLE "InternationalStatus" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"status" TEXT NOT NULL
);
-- CreateTable
CREATE TABLE "Coordinate" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"latitude" TEXT NOT NULL,
"longitude" TEXT NOT NULL,
"nationalParkId" INTEGER NOT NULL,
CONSTRAINT "Coordinate_nationalParkId_fkey" FOREIGN KEY ("nationalParkId") REFERENCES "NationalPark" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateTable
CREATE TABLE "_ImageToNationalPark" (
"A" INTEGER NOT NULL,
"B" INTEGER NOT NULL,
FOREIGN KEY ("A") REFERENCES "Image" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
FOREIGN KEY ("B") REFERENCES "NationalPark" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateTable
CREATE TABLE "_InternationalStatusToNationalPark" (
"A" INTEGER NOT NULL,
"B" INTEGER NOT NULL,
FOREIGN KEY ("A") REFERENCES "InternationalStatus" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
FOREIGN KEY ("B") REFERENCES "NationalPark" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateTable
CREATE TABLE "_ImageToLicense" (
"A" INTEGER NOT NULL,
"B" INTEGER NOT NULL,
FOREIGN KEY ("A") REFERENCES "Image" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
FOREIGN KEY ("B") REFERENCES "License" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "Coordinate_nationalParkId_key" ON "Coordinate"("nationalParkId");
-- CreateIndex
CREATE UNIQUE INDEX "_ImageToNationalPark_AB_unique" ON "_ImageToNationalPark"("A", "B");
-- CreateIndex
CREATE INDEX "_ImageToNationalPark_B_index" ON "_ImageToNationalPark"("B");
-- CreateIndex
CREATE UNIQUE INDEX "_InternationalStatusToNationalPark_AB_unique" ON "_InternationalStatusToNationalPark"("A", "B");
-- CreateIndex
CREATE INDEX "_InternationalStatusToNationalPark_B_index" ON "_InternationalStatusToNationalPark"("B");
-- CreateIndex
CREATE UNIQUE INDEX "_ImageToLicense_AB_unique" ON "_ImageToLicense"("A", "B");
-- CreateIndex
CREATE INDEX "_ImageToLicense_B_index" ON "_ImageToLicense"("B");
| 30.535354 | 148 | 0.707244 |
8251a631feb7e799e8659dbd29bec3ce05c4c713 | 5,093 | dart | Dart | lib/mk_splash_screen.dart | dev-mgkaung/Mk-SplashScreen | 797849fcb27c1a147a1b23df9de8075228289065 | [
"MIT"
] | null | null | null | lib/mk_splash_screen.dart | dev-mgkaung/Mk-SplashScreen | 797849fcb27c1a147a1b23df9de8075228289065 | [
"MIT"
] | null | null | null | lib/mk_splash_screen.dart | dev-mgkaung/Mk-SplashScreen | 797849fcb27c1a147a1b23df9de8075228289065 | [
"MIT"
] | null | null | null | library mk_splash_screen;
import 'dart:core';
import 'dart:async';
import 'package:flutter/material.dart';
class MkSplashScreen extends StatefulWidget {
final int seconds;
final Text title;
final Color backgroundColor;
final dynamic navigateAfterSeconds;
final double photoSize;
final dynamic onClick;
final Color loaderColor;
final Image image;
final Text loadingText;
final ImageProvider imageBackground;
final Gradient gradientBackground;
final bool useLoader;
final Route pageRoute;
final Future<dynamic> navigateAfterFuture;
MkSplashScreen({
this.loaderColor,
this.navigateAfterFuture,
@required this.seconds,
this.photoSize,
this.pageRoute,
this.onClick,
this.navigateAfterSeconds,
this.title = const Text(''),
this.backgroundColor = Colors.white,
this.image,
this.loadingText = const Text(""),
this.imageBackground,
this.gradientBackground,
this.useLoader = true,
});
@override
_SplashScreenState createState() => _SplashScreenState();
}
class _SplashScreenState extends State<MkSplashScreen> {
@override
void initState() {
super.initState();
if (widget.navigateAfterFuture == null) {
Timer(Duration(seconds: widget.seconds), () {
if (widget.navigateAfterSeconds is String) {
// It's fairly safe to assume this is using the in-built material
// named route component
Navigator.of(context)
.pushReplacementNamed(widget.navigateAfterSeconds);
} else if (widget.navigateAfterSeconds is Widget) {
Navigator.of(context).pushReplacement(widget.pageRoute != null
? widget.pageRoute
: new MaterialPageRoute(
builder: (BuildContext context) =>
widget.navigateAfterSeconds));
} else {
throw new ArgumentError(
'widget.navigateAfterSeconds must either be a String or Widget');
}
});
} else {
widget.navigateAfterFuture.then((navigateTo) {
if (navigateTo is String) {
// It's fairly safe to assume this is using the in-built material
// named route component
Navigator.of(context).pushReplacementNamed(navigateTo);
} else if (navigateTo is Widget) {
Navigator.of(context).pushReplacement(widget.pageRoute != null
? widget.pageRoute
: new MaterialPageRoute(
builder: (BuildContext context) => navigateTo));
} else {
throw new ArgumentError(
'widget.navigateAfterFuture must either be a String or Widget');
}
});
}
}
@override
Widget build(BuildContext context) {
return Scaffold(
body: new InkWell(
onTap: widget.onClick,
child: new Stack(
fit: StackFit.expand,
children: <Widget>[
new Container(
decoration: new BoxDecoration(
image: widget.imageBackground == null
? null
: new DecorationImage(
fit: BoxFit.cover,
image: widget.imageBackground,
),
gradient: widget.gradientBackground,
color: widget.backgroundColor,
),
),
new Column(
mainAxisAlignment: MainAxisAlignment.start,
children: <Widget>[
new Expanded(
flex: 2,
child: new Container(
child: new Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
new CircleAvatar(
backgroundColor: Colors.transparent,
child: Hero(
tag: "splashscreenImage",
child: new Container(child: widget.image),
),
radius: widget.photoSize,
),
new Padding(
padding: const EdgeInsets.only(top: 10.0),
),
widget.title
],
)),
),
Expanded(
flex: 1,
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
!widget.useLoader
? Container()
: CircularProgressIndicator(
valueColor: new AlwaysStoppedAnimation<Color>(
widget.loaderColor),
),
Padding(
padding: const EdgeInsets.only(top: 20.0),
),
widget.loadingText
],
),
),
],
),
],
),
),
);
}
} | 32.858065 | 79 | 0.520126 |
7b58c78b33bfa5511c0c7d92973ab62fba3b33fa | 10,545 | rb | Ruby | app/models/chemistry/page.rb | croucherfoundation/chemistry | a2cb16b8532d76aa09fd9f9579957e31b6cd50b2 | [
"MIT"
] | null | null | null | app/models/chemistry/page.rb | croucherfoundation/chemistry | a2cb16b8532d76aa09fd9f9579957e31b6cd50b2 | [
"MIT"
] | 11 | 2018-06-29T13:05:01.000Z | 2020-08-07T16:49:33.000Z | app/models/chemistry/page.rb | spanner/chemistry | a2cb16b8532d76aa09fd9f9579957e31b6cd50b2 | [
"MIT"
] | null | null | null | # TODO: History of previous paths for redirection
#. history of edits by users...
require 'mustache'
module Chemistry
class Page < Chemistry::ApplicationRecord
PUBLISHED_ATTRIBUTES = [:slug, :path, :title, :masthead, :content, :byline, :summary, :excerpt, :terms, :style, :image_id]
acts_as_list column: :nav_position
# Filing
belongs_to :user, class_name: Chemistry.config.user_class, foreign_key: Chemistry.config.user_key
belongs_to :page_collection, optional: true
belongs_to :page_category, optional: true
# Tree-building
belongs_to :parent, class_name: 'Chemistry::Page', optional: true
has_many :child_pages, class_name: 'Chemistry::Page', foreign_key: :parent_id, dependent: :destroy
# Links
has_many :socials, class_name: 'Chemistry::Social', dependent: :destroy
accepts_collected_attributes_for :socials
#TODO remove after migration
# `terms` is now an ad-hoc list in text
has_many :page_terms
has_many :old_terms, through: :page_terms, class_name: "Chemistry::Term"
# first masthead image is extracted for display in lists
belongs_to :image, class_name: 'Chemistry::Image', optional: true
belongs_to :published_image, class_name: 'Chemistry::Image', optional: true
# before_validation :set_home_if_first
before_validation :derive_slug_and_path
validates :title, presence: true
validates :slug, presence: true, uniqueness: {scope: :parent_id}
validates :path, presence: true, uniqueness: true
attr_accessor :publishing
validates :published_path, presence: true, uniqueness: true, if: :publishing?
validates :published_content, presence: true, if: :publishing?
scope :home, -> { where(home: true).limit(1) }
scope :nav, -> { where(nav: true) }
scope :published, -> { where.not(published_at: nil) }
scope :latest, -> { order(created_at: :desc) }
scope :with_parent, -> page { where(parent_id: page.id) }
scope :with_path, -> path { where(path: path) }
def self.published_with_path(path)
where(published_path: path).where.not(published_at: nil).first
end
def published?
published_at?
end
def publishing?
!!publishing
end
def featured?
featured_at?
end
def public?
!private?
end
def absolute_path
#TODO: mount point, by way of public_page_url(page)?
"/" + path
end
def publish!
transaction do
Chemistry::Page::PUBLISHED_ATTRIBUTES.each do |col|
self.send("published_#{col}=".to_sym, self.send(col.to_sym))
end
self.published_at = Time.now + 2.seconds # hack to ensure > updated_at
self.publishing = true # engage validations
if valid?
self.save!
return true
else
return false
end
end
rescue => e
Rails.logger.warn("⚠️ Publication fail", e.message);
return false
end
## Interpolations
# override to provide site-wide interpolations available on any page.
# They can be string like {{user_count}} or proc like {{login_form}}.
#
def interpolations
{}
end
# Render performs the interpolations by passing our attributes (usually blocks of html) and interpolation rules to mustache.
#
def render(attribute=:published_content)
content = read_attribute(attribute)
if content.present?
if interpolable_attributes.include?(attribute)
Mustache.render(content, interpolations)
else
content
end
else
""
end
end
## Elasticsearch indexing
#
searchkick searchable: [:path, :working_title, :title, :content, :byline],
word_start: [:title],
highlight: [:title, :content]
scope :search_import, -> { includes(:image, :page_collection, :page_category) }
def search_data
{
# chiefly for UI retrieval
slug: published_slug.presence || slug,
path: published_path.presence || path,
style: published_style.presence || style,
title: strip_tags(published_title.presence || title),
# public archive / search
masthead: strip_tags(published_masthead.presence || masthead),
content: strip_tags(published_content.presence || content),
byline: strip_tags(published_byline.presence || byline),
summary: strip_tags(published_summary.presence || summary),
excerpt: strip_tags(published_excerpt.presence || excerpt),
terms: terms_list(published_terms.presence || terms),
image_url: image_url,
thumbnail_url: thumbnail_url,
collection_name: page_collection_name,
# aggregation and selection
parent_id: parent_id,
page_collection: page_collection_slug,
page_collection_id: page_collection_id,
page_category: page_category_slug,
page_category_id: page_category_id,
created_at: created_at,
updated_at: updated_at,
published: published?,
published_at: published_at,
collection_featured: page_collection_featured?,
featured: featured?,
featured_at: featured_at,
date: featured_at.presence || published_at.presence || created_at,
month: month_and_year
}
end
# Search and aggregation
# Here we support the public archive and admin interfaces with faceting and date-filtering controls.
# There is also a simpler filter-and-paginate search in the Pages API.
def self.search_and_aggregate(params={})
Rails.logger.warn "🌸 Page.search_and_aggregate #{params.inspect}"
# search
#
fields = ['title^5', 'path^2', 'summary^3', 'content', 'byline']
highlight = {tag: "<strong>", fields: {title: {fragment_size: 40}, content: {fragment_size: 320}}}
if params[:q].present?
terms = params[:q]
default_order = {_score: :desc}
else
terms = "*"
default_order = {created_at: :desc}
end
# filter
#
criteria = { published: true }
criteria[:page_collection] = params[:page_collection] if params[:page_collection].present?
criteria[:page_category] = params[:page_category] if params[:page_category].present?
criteria[:terms] = params[:terms] if params[:term].present?
if params[:date_from].present? or params[:date_to].present?
criteria[:published_at] = {}
criteria[:published_at][:$gt] = params[:date_from] if params[:date_from].present?
criteria[:published_at][:$lte] = params[:date_to] if params[:date_to].present?
elsif params[:month].present? and params[:year].present?
criteria[:month] = [params[:year], params[:month]].join('/')
end
# sort
#
if params[:sort].present?
if params[:order].present?
order = {params[:sort] => params[:order]}
elsif %w{created_at featured_at published_at}.include?(params[:sort])
order = {params[:sort] => :desc}
else
order = {params[:sort] => :asc}
end
elsif params[:order].present?
order = {published_at: params[:order]}
else
order = default_order
end
# paginate
#
per_page = (params[:show].presence || 20).to_i
page = (params[:page].presence || 1).to_i
# aggregate
#
aggregations = {
month: {},
page_category: {},
page_collection: {}
}
# fetch
#
Page.search terms, load: false, fields: fields, where: criteria, order: order, per_page: per_page, page: page, highlight: highlight, aggs: aggregations
end
def similar_pages
if tokens = terms_list(published_terms);
Chemistry::Page.search(body: {query: {match: {terms: tokens}}}, limit: 19);
else
[]
end
end
# Indexing support
def render_and_strip_tags(attribute=:published_content)
strip_tags(render(attribute))
end
def strip_tags(html)
if html.present?
ActionController::Base.helpers.strip_tags(html)
else
""
end
end
def terms_list(terms)
if terms.present?
terms.split(',').compact.uniq
else
[]
end
end
def image_url
if published_image
published_image.file_url(:full)
elsif image
image.file_url(:full)
end
end
def thumbnail_url
if published_image
published_image.file_url(:thumb)
elsif image
image.file_url(:thumb)
end
end
def page_collection_name
page_collection.short_title if page_collection
end
def page_collection_slug
page_collection.slug if page_collection
end
def page_collection_featured?
page_collection.featured? if page_collection
end
def page_category_slug
page_category.slug if page_category
end
def month_and_year
if published_at.present?
published_at.strftime("%y/%m")
end
end
# Paths
def slug_base
if home?
"__home"
else
published_title.presence || title
end
end
def path_base
if parent && parent.path?
tidy_slashes(parent.path)
elsif page_collection
page_collection.slug
else
""
end
end
def tidy_slashes(string)
string.sub(/\/$/, '').sub(/^\//, '').sub(/^\/{2,}/, '/');
end
protected
def interpolable_attributes
[:published_masthead, :published_content, :published_byline, :published_excerpt]
end
# During creation, first page in a site or collection is automatically marked as 'home'.
def set_home_if_first
if Page.all.empty?
self.home = true
elsif page_collection && page_collection.empty?
self.home = true
end
end
# Path is absolute and includes collection prefix so that we can match fast and route simply
#
def derive_slug_and_path
if slug? && !persisted?
self.slug = add_suffix_if_taken(slug, path_base)
elsif !slug?
self.slug = add_suffix_if_taken(slug_base, path_base)
end
self.path = [path_base, slug].join("/")
end
def add_suffix_if_taken(base, scope_path)
slug = base
addendum = 1
while Chemistry::Page.find_by(path: [scope_path, slug].join('/'))
slug = base + '-' + addendum.to_s
addendum += 1
end
slug
end
end
end | 28.577236 | 157 | 0.634139 |
3f9f078d8a4342891732443cf5a533346d8398ff | 5,116 | swift | Swift | Sources/UIKit/Display/UIColorExtensions.swift | hefeijinbo/SwiftExtensions | 7a27754b5373ecbb684e4919b0205dc65c35ee89 | [
"MIT"
] | 4 | 2020-08-08T00:16:09.000Z | 2021-01-27T06:44:45.000Z | Sources/UIKit/Display/UIColorExtensions.swift | hefeijinbo/SwiftExtensions | 7a27754b5373ecbb684e4919b0205dc65c35ee89 | [
"MIT"
] | null | null | null | Sources/UIKit/Display/UIColorExtensions.swift | hefeijinbo/SwiftExtensions | 7a27754b5373ecbb684e4919b0205dc65c35ee89 | [
"MIT"
] | null | null | null | //
// UIColorExtensions.swift
// SwiftExtensions
//
// Created by jinbo on 2020/8/9.
// Copyright © 2020 SwiftExtensions. All rights reserved.
//
import UIKit
public extension UIColor {
@objc static func whiteWithAlpha(_ alpha: CGFloat) -> UIColor {
return UIColor.white.withAlphaComponent(alpha)
}
@objc static func blackWithAlpha(_ alpha: CGFloat) -> UIColor {
return UIColor.black.withAlphaComponent(alpha)
}
/// 使用整形的颜色 component 初始化
@objc convenience init(componentRed: Int, green: Int, blue: Int, alpha: Int = 255) {
let redValue = CGFloat(componentRed) / 255.0
let greenValue = CGFloat(green) / 255.0
let blueValue = CGFloat(blue) / 255.0
let alphaValue = CGFloat(alpha) / 255.0
self.init(red: redValue, green: greenValue, blue: blueValue, alpha: alphaValue)
}
@objc convenience init(hexString: String, alpha: CGFloat = 1) {
var string = ""
if hexString.lowercased().hasPrefix("0x") {
string = hexString.replacingOccurrences(of: "0x", with: "")
} else if hexString.hasPrefix("#") {
string = hexString.replacingOccurrences(of: "#", with: "")
} else {
string = hexString
}
if string.count == 3 { // convert hex to 6 digit format if in short format
var str = ""
string.forEach { str.append(String(repeating: String($0), count: 2)) }
string = str
}
let hexValue = Int(string, radix: 16) ?? 0
let red = (hexValue >> 16) & 0xff
let green = (hexValue >> 8) & 0xff
let blue = hexValue & 0xff
self.init(componentRed: red, green: green, blue: blue, alpha: Int(alpha * 255))
}
/// UIColor 的 RGB A 成分 (0 和 255之间).
var rgbaComponents: (red: Int, green: Int, blue: Int, alpha: Int) {
let components: [CGFloat] = {
let comps: [CGFloat] = cgColor.components!
guard comps.count != 4 else { return comps }
return [0, 0, 0, 0]
}()
let red = components[0]
let green = components[1]
let blue = components[2]
let alpha = components[3]
return (red: Int(red * 255.0), green: Int(green * 255.0), blue: Int(blue * 255.0), alpha: Int(alpha * 255.0))
}
/// 颜色的红色成分
@objc var redComponent: Int {
let tuple = rgbaComponents
return tuple.red
}
@objc var greenComponent: Int {
let tuple = rgbaComponents
return tuple.green
}
@objc var blueComponent: Int {
let tuple = rgbaComponents
return tuple.blue
}
@objc var alphaComponent: Int {
let tuple = rgbaComponents
return tuple.alpha
}
/// hue(色调) saturation(饱和度), brightness(亮度), alpha(透明度).
var hsbaComponents: (hue: CGFloat, saturation: CGFloat, brightness: CGFloat, alpha: CGFloat) {
var hue: CGFloat = 0.0
var saturation: CGFloat = 0.0
var brightness: CGFloat = 0.0
var alpha: CGFloat = 0.0
getHue(&hue, saturation: &saturation, brightness: &brightness, alpha: &alpha)
return (hue: hue, saturation: saturation, brightness: brightness, alpha: alpha)
}
@objc var hsbaComponentsArray: [CGFloat] {
let tuple = hsbaComponents
return [tuple.hue, tuple.saturation, tuple.brightness, tuple.alpha]
}
@objc var hexString: String {
let components: [Int] = {
let comps = cgColor.components!.map { Int($0 * 255.0) }
guard comps.count != 4 else { return comps }
return [comps[0], comps[0], comps[0], comps[1]]
}()
return String(format: "#%02X%02X%02X", components[0], components[1], components[2])
}
@objc var alpha: CGFloat {
return cgColor.alpha
}
/// 获得互补色
@objc var complementaryColor: UIColor? {
let colorSpaceRGB = CGColorSpaceCreateDeviceRGB()
let convertColorToRGBSpace: ((_ color: UIColor) -> UIColor?) = { color -> UIColor? in
if self.cgColor.colorSpace!.model == CGColorSpaceModel.monochrome {
let oldComponents = self.cgColor.components
let components: [CGFloat] = [oldComponents![0], oldComponents![0],
oldComponents![0], oldComponents![1]]
let colorRef = CGColor(colorSpace: colorSpaceRGB, components: components)
let colorOut = UIColor(cgColor: colorRef!)
return colorOut
} else {
return self
}
}
let color = convertColorToRGBSpace(self)
guard let componentColors = color?.cgColor.components else { return nil }
let red: CGFloat = sqrt(pow(255.0, 2.0) - pow((componentColors[0]*255), 2.0))/255
let green: CGFloat = sqrt(pow(255.0, 2.0) - pow((componentColors[1]*255), 2.0))/255
let blue: CGFloat = sqrt(pow(255.0, 2.0) - pow((componentColors[2]*255), 2.0))/255
return UIColor(red: red, green: green, blue: blue, alpha: 1.0)
}
}
| 36.028169 | 117 | 0.582291 |
0b74a2d6dbfc76ec355ef8ff8e62599cfa40e389 | 13,196 | py | Python | asyncorm/models/models.py | kejkz/asyncorm | 6342e2d5fbaa22fb368aead772ac4f255df7562a | [
"Apache-2.0"
] | 1 | 2017-02-27T05:37:39.000Z | 2017-02-27T05:37:39.000Z | asyncorm/models/models.py | kejkz/asyncorm | 6342e2d5fbaa22fb368aead772ac4f255df7562a | [
"Apache-2.0"
] | null | null | null | asyncorm/models/models.py | kejkz/asyncorm | 6342e2d5fbaa22fb368aead772ac4f255df7562a | [
"Apache-2.0"
] | null | null | null | import inspect
import os
from collections import Callable
from asyncorm.application.configure import get_model
from asyncorm.exceptions import AsyncOrmFieldError, AsyncOrmModelDoesNotExist, AsyncOrmModelError
from asyncorm.manager import ModelManager
from asyncorm.models.fields import AutoField, Field, ForeignKey, ManyToManyField
from asyncorm.serializers import ModelSerializer, SerializerMethod
__all__ = ["Model", "ModelSerializer", "SerializerMethod"]
class empty:
pass
class ModelMeta(type):
def __new__(cls, clsname, bases, clsdict):
base_class = super().__new__(cls, clsname, bases, clsdict)
base_class.objects = type("{}Manager".format(base_class.__name__), (ModelManager,), {"model": base_class})(
base_class
)
# Meta manage
defined_meta = clsdict.pop("Meta", None)
base_class.ordering = None
base_class.unique_together = []
base_class.table_name = ""
base_class.DoesNotExist = AsyncOrmModelDoesNotExist
base_class.meta_items = ("ordering", "unique_together", "table_name")
if defined_meta:
if hasattr(defined_meta, "ordering"):
base_class.ordering = getattr(defined_meta, "ordering")
if hasattr(defined_meta, "unique_together"):
base_class.unique_together = getattr(defined_meta, "unique_together")
if hasattr(defined_meta, "table_name"):
base_class.table_name = getattr(defined_meta, "table_name")
base_class.fields = base_class.get_fields()
primary_keys = [f for f in base_class.fields.values() if isinstance(f, AutoField)]
if not primary_keys:
base_class.id = AutoField()
base_class.fields["id"] = base_class.id
base_class.db_pk = "id"
base_class.orm_pk = "id"
elif len(primary_keys) == 1:
base_class.db_pk = primary_keys[0].db_column
base_class.orm_pk = primary_keys[0].orm_field_name
for f in base_class.fields.values():
if hasattr(f, "choices"):
if f.choices:
setattr(base_class, "{}_display".format(f.orm_field_name), "choices_placeholder")
return base_class
class BaseModel(object, metaclass=ModelMeta):
table_name = ""
objects = None
deleted = False
field_requirements = []
def __init__(self, **kwargs):
self.dir_name = os.path.dirname(inspect.getmodule(self).__file__)
self.app_name = self.dir_name.split(os.path.sep)[-1]
self.table_name = ""
self.objects.model = self.__class__
manager = getattr(self, "objects")
manager.model = self.__class__
# resolve method for posible display methods
for k, v in self.__class__.__dict__.items():
if v == "choices_placeholder":
field_name = k.split("_display")[0]
field = getattr(self.__class__, field_name)
def new_func(field=field, field_name=field_name):
value = getattr(self, field_name)
for a, b in field.choices.items():
if a == value:
return b
setattr(self, k, new_func)
self.validate_kwargs(kwargs)
for field_name in self.fields.keys():
f_cls = getattr(self.__class__, field_name)
if field_name in kwargs:
setattr(self, field_name, kwargs[field_name])
elif hasattr(f_cls, "default"):
d_value = f_cls.default
setattr(self, field_name, d_value() if isinstance(d_value, Callable) else d_value)
@classmethod
def cls_tablename(cls):
return cls.table_name or cls.__name__
@classmethod
def set_reverse_foreignkey(cls, model_name, field_name):
def fk_set(self):
model = get_model(model_name)
return model.objects.filter(**{field_name: getattr(self, self.orm_pk)})
setattr(cls, "{}_set".format(model_name.lower()), fk_set)
@classmethod
def set_many2many(cls, field, table_name, my_column, other_column, direct=False):
other_model = get_model(other_column)
queryset = ModelManager(other_model, field=field)
queryset.set_orm(cls.objects.orm)
def m2m_set(self):
queryset.query = [
{
"action": "_db__select_m2m",
"select": "*",
"m2m_tablename": table_name,
"other_tablename": other_column,
"otherdb_pk": other_model.db_pk,
"id_data": "{}={}".format(my_column, getattr(self, self.orm_pk)),
}
]
return queryset
method_name = direct and field.field_name or "{}_set".format(other_column.lower())
setattr(cls, method_name, m2m_set)
@classmethod
def set_orm(cls, orm):
cls.objects.set_orm(orm)
@property
def data(self):
d = {}
created = bool(self.orm_pk)
for orm, db in self.__class__.attr_names.items():
class__orm = getattr(self.__class__, orm)
self__orm = getattr(self, orm)
if self__orm is class__orm:
continue
has_pk = self.orm_pk == orm
many2many = isinstance(class__orm, ManyToManyField)
if not has_pk and not many2many:
d[db] = self__orm
is_default = self__orm == getattr(class__orm, "default", empty)
# if value equal to default we set him with insert,
# else we should always represent him
if not created and is_default:
d.pop(db)
return d
@property
def m2m_data(self):
d = {}
for orm, db in self.__class__.attr_names.items():
class__orm = getattr(self.__class__, orm)
if isinstance(class__orm, ManyToManyField):
self__orm = getattr(self, orm)
d[db] = self__orm
default = self__orm == class__orm.default
if bool(self.orm_pk) and default:
d.pop(db)
return d
@classmethod
def orm_attr_names(cls):
return {v: k for k, v in cls.attr_names.items()}
@classmethod
def get_fields(cls):
fields = {}
cls.attr_names = {}
for f_n, field in cls.__dict__.items():
if isinstance(field, Field):
field.orm_field_name = f_n
if not field.db_column:
field.set_field_name(f_n)
if not field.table_name:
field.table_name = cls.cls_tablename()
if isinstance(field, ManyToManyField):
field.own_model = cls.cls_tablename()
field.table_name = "{my_model}_{foreign_key}".format(
my_model=cls.cls_tablename(), foreign_key=field.foreign_key
)
if not isinstance(field.__class__, AutoField):
cls.attr_names.update({f_n: field.db_column})
if hasattr(field, "field_requirement"):
if field.field_requirement not in cls.field_requirements:
cls.field_requirements.append(field.field_requirement)
fields[f_n] = field
if len(cls.attr_names) != len(set(cls.attr_names)):
raise AsyncOrmModelError("Models should have unique attribute names and field_name if explicitly edited!")
return fields
@classmethod
def get_db_columns(cls):
db_columns = []
for f_n, field in cls.__dict__.items():
is_many2many = isinstance(field, ManyToManyField)
is_field = isinstance(field, Field)
if is_field and not is_many2many:
db_columns.append(field.db_column and field.db_column or f_n)
return db_columns
def validate_kwargs(self, kwargs):
"""validate the kwargs on object instantiation only"""
attr_errors = [k for k in kwargs.keys() if k not in self.fields.keys()]
if attr_errors:
err_string = '"{}" is not an attribute for {}'
error_list = [err_string.format(k, self.__class__.__name__) for k in attr_errors]
raise AsyncOrmModelError(error_list)
for k, v in kwargs.items():
att_field = getattr(self.__class__, k)
att_field.validate(v)
if att_field.__class__ is AutoField and v:
raise AsyncOrmFieldError("Models can not be generated with forced id")
def migration_queries(self):
migration_queries = [self.objects.create_table_builder()]
for f in self.fields.values():
if isinstance(f, ForeignKey):
migration_queries.append(self.objects.add_fk_field_builder(f))
for f in self.fields.values():
if isinstance(f, ManyToManyField):
migration_queries.append(self.objects.add_m2m_columns_builder(f))
migration_queries.append(self.objects.unique_together_builder())
return migration_queries
@classmethod
def current_state(cls):
from copy import deepcopy
fields = deepcopy(cls.get_fields())
meta = {}
for f_n, field in fields.items():
fields[f_n] = field.current_state()
for m in cls.meta_items:
meta[m] = getattr(cls, m)
return {"fields": fields, "meta": meta}
@classmethod
def status_difference(cls, old_state):
current_state = cls.current_state()
news = {"fields": {}, "meta": {}}
deleted = {"fields": [], "meta": []}
updated = {"fields": {}, "meta": {}}
if old_state != current_state:
for subzone in ("fields", "meta"):
if old_state[subzone] != current_state[subzone]:
for f_n, f_v in old_state[subzone].items():
if current_state[subzone].get(f_n, False):
if current_state[subzone][f_n] != f_v:
updated[subzone][f_n] = current_state[subzone].get(f_n)
else:
deleted[subzone].append(f_n)
for f_n, f_v in current_state[subzone].items():
if not old_state[subzone].get(f_n, False):
news[subzone][f_n] = current_state[subzone].get(f_n)
class Model(BaseModel):
def construct(self, data, deleted=False, subitems=None):
# populates the model with the data
internal_objects = {}
for k, v in data.items():
k_splitted = k.split("€$$€")
if len(k_splitted) == 1:
# check if its named different in the database than the orm
if k not in self.__class__.attr_names.keys():
for orm, db in self.__class__.attr_names.items():
if k == db:
k = orm
break
# get the recomposed value
field_class = getattr(self.__class__, k, None)
if field_class is None:
continue
v = field_class.recompose(v)
if field_class in [ForeignKey, ManyToManyField]:
pass
setattr(self, k, v)
else:
# itself or empty dict
internal_objects[k_splitted[0]] = internal_objects.get(k_splitted[0], {})
# update the new value
internal_objects[k_splitted[0]].update({k_splitted[1]: v})
if internal_objects:
for attr_name, data in internal_objects.items():
if hasattr(self, attr_name):
if getattr(self, attr_name):
field = getattr(self.__class__, attr_name)
model = get_model(field.foreign_key)
setattr(self, attr_name, model().construct(data))
else:
for join in subitems[0]["fields"]:
if join["right_table"] == attr_name:
field = getattr(self.__class__, join["orm_fieldname"])
model = get_model(field.foreign_key)
setattr(self, join["orm_fieldname"], model().construct(data))
break
self.deleted = deleted
return self
async def save(self, **kwargs):
# external save method
if self.deleted:
raise AsyncOrmModelError(
"That {model_name} has already been deleted!".format(model_name=self.__class__.__name__)
)
await self.objects.save(self)
async def delete(self):
# object delete method
self.deleted = True
return await self.objects.delete(self)
def __str__(self):
return "< {} object >".format(self.__class__.__name__)
def __repr__(self):
return self.__str__()
| 35.761518 | 118 | 0.572674 |
cb9d3fb3fb39a5f4afece1cca9e921583177ad53 | 943 | go | Go | stream/service/stream_addrs.go | skyformat99/gomqtt-1 | b4c7771c386c1eaf312b2d11a98feed451e2969f | [
"Apache-2.0"
] | 5 | 2016-12-06T01:19:59.000Z | 2019-09-22T00:28:48.000Z | stream/service/stream_addrs.go | gitsunwenhao/gomqtt | ba18479303d88c744570b7f141fd18a5b6ddf685 | [
"Apache-2.0"
] | null | null | null | stream/service/stream_addrs.go | gitsunwenhao/gomqtt | ba18479303d88c744570b7f141fd18a5b6ddf685 | [
"Apache-2.0"
] | 5 | 2018-03-09T09:32:43.000Z | 2021-08-23T16:31:33.000Z | package service
import "sync"
import "github.com/uber-go/zap"
type StreamAddrs struct {
sync.RWMutex
Addrs map[string]string
}
func NewStreamAddrs() *StreamAddrs {
sa := &StreamAddrs{
Addrs: make(map[string]string),
}
return sa
}
// Init 将本机stream 自己的grpc地址保存进map
func (sa *StreamAddrs) Init(key, addr string) {
sa.Lock()
sa.Addrs[key] = addr
sa.Unlock()
Logger.Info("Stre amAddrs", zap.String("key", key), zap.String("addr", addr))
}
// Add insert key-value, if exist return true,else return false
func (sa *StreamAddrs) Add(key, addr string) bool {
sa.RLock()
if _, ok := sa.Addrs[key]; ok {
sa.RUnlock()
return true
}
sa.RUnlock()
sa.Lock()
sa.Addrs[key] = addr
sa.Unlock()
return false
}
func (sa *StreamAddrs) Get(key string) (string, bool) {
sa.RLock()
addr, ok := sa.Addrs[key]
sa.RUnlock()
return addr, ok
}
func (sa *StreamAddrs) Del(key string) {
sa.Lock()
delete(sa.Addrs, key)
sa.Unlock()
}
| 17.462963 | 79 | 0.674443 |
a61312b5baa9d027a4eaa97dc61359e7dc774dce | 4,002 | swift | Swift | Swift/Gesture_3By3Grid/Gesture_3By3Grid/ViewController/ViewController.swift | William-Weng/- | 8905ba05e6a4082501dd04249f9e7240d38f2e2a | [
"MIT"
] | 1 | 2018-08-23T03:03:59.000Z | 2018-08-23T03:03:59.000Z | Swift/Gesture_3By3Grid/Gesture_3By3Grid/ViewController/ViewController.swift | William-Weng/Problem | 8905ba05e6a4082501dd04249f9e7240d38f2e2a | [
"MIT"
] | null | null | null | Swift/Gesture_3By3Grid/Gesture_3By3Grid/ViewController/ViewController.swift | William-Weng/Problem | 8905ba05e6a4082501dd04249f9e7240d38f2e2a | [
"MIT"
] | null | null | null | //
// ViewController.swift
// Gesture_3By3Grid
//
// Created by William-Weng on 2018/12/5.
// Copyright © 2018年 William-Weng. All rights reserved.
//
/// [UIBezierPath 的基本使用方法](http://furnacedigital.blogspot.tw/2011/07/uibezierpath.html)
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var myGridView: MyGridView!
override func viewDidLoad() {
super.viewDidLoad()
}
}
class MyGridView: UIView {
var nowIndex = 0
var gridButtons = [UIButton]()
var linePoint: (first: CGPoint?, next: CGPoint?) = (nil, nil)
var result = [UIButton]()
var lineView: UIView?
override init(frame: CGRect) {
super.init(frame: frame)
initGridButtons()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
initGridButtons()
}
override func draw(_ rect: CGRect) {
drawLine()
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first,
let position = Optional.some(touch.location(in: self)),
let button = touchedButton(withPosition: position)
else {
return
}
button.isSelected = true
linePoint.first = button.center
setNeedsDisplay()
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let _ = linePoint.first,
let touch = touches.first,
let position = Optional.some(touch.location(in: self))
else {
return
}
_ = touchedButton(withPosition: position)
linePoint.next = position
setNeedsDisplay()
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
linePoint = (nil, nil)
}
override func touchesCancelled(_ touches: Set<UITouch>, with event: UIEvent?) {
touchesEnded(touches, with: event)
}
}
extension MyGridView {
/// 加上按鈕
private func initGridButtons() {
let buttonSize = CGSize(width: 80, height: 80)
for xIndex in 1...3 {
for yIndex in 1...3 {
let button = UIButton()
let buttonOrigin = CGPoint(x: CGFloat(xIndex - 1) * buttonSize.width , y: CGFloat(yIndex - 1) * buttonSize.height)
button.tag = xIndex * yIndex
button.frame = CGRect(origin: buttonOrigin, size: buttonSize)
button.setImage(#imageLiteral(resourceName: "TouchNormal"), for: .normal)
button.setImage(#imageLiteral(resourceName: "TouchSelected"), for: .selected)
button.backgroundColor = .clear
button.isUserInteractionEnabled = false
gridButtons.append(button)
addSubview(button)
}
}
}
/// 畫線
private func drawLine() {
guard let firstLinePoint = linePoint.first,
let nextLinePoint = linePoint.next
else {
return
}
UIColor(red: 32/255.0, green: 210/255.0, blue: 254/255.0, alpha: 0.5).set()
let bezierPath = UIBezierPath()
bezierPath.lineWidth = 8
bezierPath.lineJoinStyle = .round
bezierPath.move(to: firstLinePoint)
bezierPath.addLine(to: nextLinePoint)
bezierPath.close()
bezierPath.stroke()
}
/// 測試有沒有碰到?
private func touchedButton(withPosition position: CGPoint) -> UIButton? {
var touchedButton: UIButton?
for button in gridButtons {
if (button.frame.contains(position)) {
button.isSelected = true
touchedButton = button; break
}
}
return touchedButton
}
}
| 27.410959 | 130 | 0.555722 |
04363ad69fd90c19a66bb27d0c4531690de78288 | 1,107 | java | Java | matching-jena-matchers/src/test/java/de/uni_mannheim/informatik/dws/melt/matching_jena_matchers/external/services/labelToConcept/stringModifiers/TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifierTest.java | DixitAditya95/melt | 2fac28ed05930f1204db946a4d9e15b52ccf78c4 | [
"MIT"
] | 30 | 2019-05-15T14:08:37.000Z | 2022-03-13T13:51:36.000Z | matching-jena-matchers/src/test/java/de/uni_mannheim/informatik/dws/melt/matching_jena_matchers/external/services/labelToConcept/stringModifiers/TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifierTest.java | DixitAditya95/melt | 2fac28ed05930f1204db946a4d9e15b52ccf78c4 | [
"MIT"
] | 91 | 2020-07-07T10:59:50.000Z | 2022-03-23T04:15:43.000Z | matching-jena-matchers/src/test/java/de/uni_mannheim/informatik/dws/melt/matching_jena_matchers/external/services/labelToConcept/stringModifiers/TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifierTest.java | DixitAditya95/melt | 2fac28ed05930f1204db946a4d9e15b52ccf78c4 | [
"MIT"
] | 11 | 2020-04-21T13:22:27.000Z | 2022-02-25T01:44:43.000Z | package de.uni_mannheim.informatik.dws.melt.matching_jena_matchers.external.services.labelToConcept.stringModifiers;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifierTest {
@Test
void modifyString() {
TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifier modifier = new TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifier();
assertEquals("European union", modifier.modifyString("european_union"));
assertEquals("European union", modifier.modifyString("European_Union"));
assertEquals("European union", modifier.modifyString("european union"));
assertEquals("European union", modifier.modifyString("europeanUnion"));
assertEquals("European union", modifier.modifyString("europeanUnion"));
}
@Test
void getName(){
TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifier modifier = new TokenizeConcatSpaceCapitalizeFirstLetterLowercaseRestModifier();
assertNotNull(modifier.getName());
}
} | 44.28 | 149 | 0.780488 |
9c1072c5e6fe916c54938df564f698159be7b4f9 | 7,364 | js | JavaScript | repos/tap-resolver/src/tap/setupTap.js | simpleviewinc/sv-keg | 337f1b07675d646641e7e283bb4964f1ec7b8879 | [
"MIT"
] | 2 | 2020-10-08T20:23:01.000Z | 2020-11-18T15:44:13.000Z | repos/tap-resolver/src/tap/setupTap.js | simpleviewinc/sv-keg | 337f1b07675d646641e7e283bb4964f1ec7b8879 | [
"MIT"
] | 85 | 2020-09-02T18:35:50.000Z | 2022-03-29T05:51:10.000Z | repos/tap-resolver/src/tap/setupTap.js | KegHub/keg-hub | 11c59a21f409355337decb34b549f679cb82a925 | [
"MIT"
] | 4 | 2020-08-27T16:04:00.000Z | 2021-01-06T07:25:38.000Z | const path = require('path')
const fs = require('fs')
const rimraf = require('rimraf')
const { logData, get } = require('@keg-hub/jsutils')
const {
validateApp,
ensureDirSync,
isDirectory,
checkTapKegPath,
} = require('../helpers')
const tapConstants = require('./tapConstants')
const { configKeys } = tapConstants
// Default location to store files
const TEMP_DEF_FOLDER = './temp'
/**
* Gets the path to the app.json tap folder
* @param {Object} options - Settings to built the babel config
* @param {Object} options.config - Joined Tap and Keg configs
* @param {string} options.tapPath - Path to the tap
* @param {string} options.kegPath - Path to the keg
*
* @returns {string} - path to the base tap
*/
const getBaseTapPath = ({ config, tapPath, kegPath }) => {
// Get the base tap path
const baseLoc = get(config, [ 'keg', 'tapResolver', 'paths', 'kegSrc' ])
// Find the full path
const basePath = checkTapKegPath(tapPath, kegPath, baseLoc)
// Ensure it's a directory, and return
return isDirectory(basePath, true) && basePath
}
/**
* Get the name of the active tap from the passed in param, ENV, app.json config
* @param {Object} config - mobile keg app.json config
*
* @returns {string} - name of the active tap
*/
const getActiveTapName = config => {
const tapName = process.env.TAP || config.name
if (tapName !== config.name) config.name = tapName
return config.name
}
/**
* Get the tap source directory if it exists, otherwise use the tapPath
* @param {Object} options - Settings to built the babel config
* @param {Object} options.config - Joined Tap and Keg configs
* @param {string} options.tapPath - Path to the tap
* @param {string} options.kegPath - Path to the keg
* @param {boolean} HAS_TAP - if a tap exists for not
*
* @returns {string} - tap source directory
*/
const getTapSrc = (options, HAS_TAP) => {
const tapSrc =
HAS_TAP &&
get(options, [ 'config', 'keg', 'tapResolver', 'paths', 'tapSrc' ], '')
return (tapSrc && path.join(options.tapPath, tapSrc)) || options.tapPath
}
/**
* Try's to remove the fold temp folder if it exists
* @param {string} TEMP_FOLDER_PATH - Path to the config temp folder
*
* @returns {void}
*/
const cleanupOldTempConfig = TEMP_FOLDER_PATH => {
// Try to remove the current temp file if it exits
try {
rimraf.sync(TEMP_FOLDER_PATH)
}
catch (e) {
// If there is a different error then the folder doesn't exist, throw it
if (e.code !== 'ENOENT') throw e
}
}
/**
* Finds the path where temp config files should be stored
* If the temp path is defined in config
* It's resolved relative to the specific clients folder
* @param {Object} options - Settings to built the babel config
* @param {Object} options.config - Joined Tap and Keg configs
* @param {string} options.tapPath - Path to the tap
* @param {string} options.kegPath - Path to the keg
* @param {string} TAP_PATH - path to the clients folder
*
* @returns {string} - path to the temp folder
*/
const getTempFolderPath = (options, TAP_PATH) => {
// Check the app config for a temp folder path
// Or use the default
const tempLocation = get(
options,
[ 'config', 'keg', 'tapResolver', 'paths', 'temp' ],
TEMP_DEF_FOLDER
)
// Build the path
const configTemp = path.join(TAP_PATH, tempLocation)
// Ensure the directory exists
ensureDirSync(configTemp)
return configTemp
}
/**
* Joins the app config root with the taps config
* <br> Writes the joined config to disk inside a temp folder
* @param {Object} config - default app.json config
* @param {string} TAP_PATH - Path to the taps folder
* @param {string} TEMP_FOLDER_PATH - Path to the temp folder
*
* @returns {Object} - Merged app config, and it's path
*/
const buildJoinedConfigs = (config, TAP_PATH, TEMP_FOLDER_PATH) => {
// Rebuild the temp folder path
fs.mkdirSync(TEMP_FOLDER_PATH)
// make temp file name, ensuring it has the same name as the source but with .json extension
const srcConfigName = config[configKeys.TAP_RESOLVER_FILE]
const tempConfigName = `${path.parse(srcConfigName).name}.json`
// Build the temp config path with the temp folder path and the name of the config file
const TEMP_CONFIG_PATH = path.join(TEMP_FOLDER_PATH, tempConfigName)
// Write the temp config file
fs.writeFileSync(TEMP_CONFIG_PATH, JSON.stringify(config, null, 2), 'utf8')
// Return the joined config, and the path to the temp config file
return { APP_CONFIG: config, APP_CONFIG_PATH: TEMP_CONFIG_PATH }
}
/**
* Looks up the taps app.json file and joins it with the default app.json
* <br> Writes the joined config to disk inside a temp folder
* @param {Object} options - Settings to built the babel config
* @param {Object} options.config - Joined Tap and Keg configs
* @param {string} options.tapPath - Path to the tap
* @param {string} options.kegPath - Path to the keg
* @param {string} TAP_PATH - path to the tap folder
* @param {boolean} HAS_TAP - if an active tap is set
*
* @returns {Object} - Merged app config, and it's path
*/
const setupTapConfig = (options, TAP_PATH, HAS_TAP) => {
const { config } = options
// Data to load tap from
let tapData = {
APP_CONFIG: config,
APP_CONFIG_PATH: config[configKeys.TAP_RESOLVER_LOC],
}
// If no tap just return the default tapData
if (!HAS_TAP) return tapData
// Get the location where temp tap configs should be stored
const TEMP_FOLDER_PATH = getTempFolderPath(options, TAP_PATH)
// Clean up any past client configs
cleanupOldTempConfig(TEMP_FOLDER_PATH)
try {
// Join the root config with the tap config
tapData = buildJoinedConfigs(config, TAP_PATH, TEMP_FOLDER_PATH)
}
catch (e) {
// If there's an error, just show the message, and will return the default tapData
logData(e.message, 'warn')
}
return tapData
}
/**
* Sets up a the taps folder based on the app.json config
* <br> Builds the paths for the current TAP based on ENV or app.json config
* @param {Object} options - Settings to built the babel config
* @param {Object} options.config - Joined Tap and Keg configs
* @param {string} options.tapPath - Path to the tap
* @param {string} options.kegPath - Path to the keg
*
* @returns {Object} - Build constants and paths data for the active tap
*/
module.exports = options => {
const { config, tapPath, kegPath } = options
// Ensure the required app data exists
validateApp(kegPath, config)
// Set the default tap path
const BASE_PATH = getBaseTapPath(options)
// Get the name of the active tap
const TAP_NAME = getActiveTapName(config)
// Flag set if the active tap is different from the default keg
const HAS_TAP = Boolean(TAP_NAME !== get(config, [ 'keg', 'name' ]))
// Set the tap path if an active tap is set
const TAP_PATH = HAS_TAP ? tapPath : BASE_PATH
// Set the tap source path
const TAP_SRC = getTapSrc(options, HAS_TAP)
// Get the path to the app config ( either the config or joined temp config )
const { APP_CONFIG, APP_CONFIG_PATH } = setupTapConfig(
options,
TAP_PATH,
HAS_TAP
)
!HAS_TAP &&
logData(
`No tap folder found at ${TAP_PATH}, using defaults at ${BASE_PATH}`,
'warn'
)
return {
APP_CONFIG,
APP_CONFIG_PATH,
BASE_PATH,
TAP_NAME,
TAP_PATH,
TAP_SRC,
HAS_TAP,
}
}
| 31.20339 | 94 | 0.695953 |
8c2a7f0d1beb45e72a85d8f7f0f1b6fe801868bd | 450 | swift | Swift | AudioKit/Common/MIDI/AKMIDITransformer.swift | SeanEmbrace/AudioKit | f99e45103f42b02f995df7271264828d9d3e1dc1 | [
"MIT"
] | 1 | 2018-01-14T05:51:47.000Z | 2018-01-14T05:51:47.000Z | AudioKit/Common/MIDI/AKMIDITransformer.swift | CheckThisCodeCarefully/AudioKit | f99e45103f42b02f995df7271264828d9d3e1dc1 | [
"MIT"
] | 1 | 2015-04-26T17:10:55.000Z | 2015-04-26T17:10:55.000Z | AudioKit/Common/MIDI/AKMIDITransformer.swift | CheckThisCodeCarefully/AudioKit | f99e45103f42b02f995df7271264828d9d3e1dc1 | [
"MIT"
] | 2 | 2018-02-03T01:45:05.000Z | 2018-04-02T19:33:49.000Z | //
// AKMIDITransformer.swift
// AudioKit
//
// Created by Eric George on 7/5/17.
// Copyright © 2017 AudioKit. All rights reserved.
//
public protocol AKMIDITransformer {
func transform(eventList: [AKMIDIEvent]) -> [AKMIDIEvent]
}
/// Default transformer function
public extension AKMIDITransformer {
func transform(eventList: [AKMIDIEvent]) -> [AKMIDIEvent] {
AKLog("MIDI Transformer called")
return eventList
}
}
| 22.5 | 63 | 0.691111 |
5f1a1317714de4394b79b38ec19ec3d923292ab2 | 22,194 | sql | SQL | db/kulbon (1).sql | veridetta/kulbon_api | 00021b57a6cd4b9086aa85394acb4533262857fc | [
"MIT"
] | null | null | null | db/kulbon (1).sql | veridetta/kulbon_api | 00021b57a6cd4b9086aa85394acb4533262857fc | [
"MIT"
] | null | null | null | db/kulbon (1).sql | veridetta/kulbon_api | 00021b57a6cd4b9086aa85394acb4533262857fc | [
"MIT"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 5.0.4
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Waktu pembuatan: 04 Jun 2021 pada 20.50
-- Versi server: 10.4.17-MariaDB
-- Versi PHP: 8.0.0
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `kulbon`
--
-- --------------------------------------------------------
--
-- Struktur dari tabel `books`
--
CREATE TABLE `books` (
`id` bigint(20) UNSIGNED NOT NULL,
`title` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`author` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`release_year` int(11) NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`cover_url` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`cover_id` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`category_id` bigint(20) UNSIGNED NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `books`
--
INSERT INTO `books` (`id`, `title`, `author`, `release_year`, `created_at`, `updated_at`, `cover_url`, `cover_id`, `category_id`) VALUES
(1, 'Indonesia Merdeka', 'RR Indonesia', 2010, '2021-05-25 20:02:06', '2021-05-25 20:02:06', 'https://res.cloudinary.com/karla190922/image/upload/v1621972925/books_cover/jsvpr3wlbjr2dl7shcki.png', 'books_cover/jsvpr3wlbjr2dl7shcki', 2),
(2, 'Indonesia 2', 'Steve', 2111, '2021-05-25 21:14:25', '2021-05-29 10:18:33', 'https://res.cloudinary.com/karla190922/image/upload/v1621977265/books_cover/fgmlt1icubfk0qm9lqhi.jpg', 'books_cover/fgmlt1icubfk0qm9lqhi', 3);
-- --------------------------------------------------------
--
-- Struktur dari tabel `cats`
--
CREATE TABLE `cats` (
`id` bigint(20) UNSIGNED NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`cover` text COLLATE utf8mb4_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `cats`
--
INSERT INTO `cats` (`id`, `created_at`, `updated_at`, `name`, `cover`) VALUES
(1, '2021-06-01 15:42:08', '2021-06-01 15:56:57', 'Makanan Ring', 'https://res.cloudinary.com/karla190922/image/upload/v1622562130/cover/s51gehnc9ee7kwmoh0ay.jpg');
-- --------------------------------------------------------
--
-- Struktur dari tabel `failed_jobs`
--
CREATE TABLE `failed_jobs` (
`id` bigint(20) UNSIGNED NOT NULL,
`uuid` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`connection` text COLLATE utf8mb4_unicode_ci NOT NULL,
`queue` text COLLATE utf8mb4_unicode_ci NOT NULL,
`payload` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`exception` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`failed_at` timestamp NOT NULL DEFAULT current_timestamp()
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Struktur dari tabel `food`
--
CREATE TABLE `food` (
`id` bigint(20) UNSIGNED NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`address` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`price` text COLLATE utf8mb4_unicode_ci NOT NULL,
`phone` varchar(12) COLLATE utf8mb4_unicode_ci NOT NULL,
`open` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`description` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`facility` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`map` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`gallery` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`cover` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`cat_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `food`
--
INSERT INTO `food` (`id`, `created_at`, `updated_at`, `name`, `address`, `price`, `phone`, `open`, `description`, `facility`, `map`, `gallery`, `cover`, `cat_id`) VALUES
(1, '2021-06-01 17:18:31', '2021-06-01 18:00:37', 'WARUNG MAKAN IBU SARe', 'Kalijaga, Harjamukti, Cirebon City, West Java 45144', '15.000 - 20.000', '08817769047', 'Setiap hari 24 jam', 'Food is any substance consumed to provide nutritional support for an organism. Food is usually of plant, animal or fungal origin, and contains essential nutrients, such as carbohydrates, fats, proteins, vitamins, or minerals.', 'Service options\r\n\r\nTakeaway\r\n\r\nDine-in\r\nOfferings\r\n\r\nHalal food\r\nAmenities\r\n\r\nGood for kids\r\nAtmosphere\r\n\r\nCasual\r\n\r\nCosy\r\nPayments\r\n\r\nCash only', 'https://goo.gl/maps/HgauohzRuXPDGPx39', 'https://res.cloudinary.com/karla190922/image/upload/v1622567912/cover/ofeiy2ltktuzsbclu9qu.jpgkulbon2021', 'https://res.cloudinary.com/karla190922/image/upload/v1622567912/cover/ofeiy2ltktuzsbclu9qu.jpg', 1);
-- --------------------------------------------------------
--
-- Struktur dari tabel `migrations`
--
CREATE TABLE `migrations` (
`id` int(10) UNSIGNED NOT NULL,
`migration` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`batch` int(11) NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `migrations`
--
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(1, '2014_10_12_000000_create_users_table', 1),
(2, '2014_10_12_100000_create_password_resets_table', 1),
(3, '2016_06_01_000001_create_oauth_auth_codes_table', 1),
(4, '2016_06_01_000002_create_oauth_access_tokens_table', 1),
(5, '2016_06_01_000003_create_oauth_refresh_tokens_table', 1),
(6, '2016_06_01_000004_create_oauth_clients_table', 1),
(7, '2016_06_01_000005_create_oauth_personal_access_clients_table', 1),
(8, '2019_08_19_000000_create_failed_jobs_table', 1),
(9, '2021_03_06_210343_create_books_table', 1),
(10, '2021_03_07_223733_add_file_path', 1),
(11, '2021_03_08_011244_create_orders_table', 1),
(12, '2021_06_01_205846_create_cats_table', 2),
(13, '2021_06_01_211907_create_food_table', 2),
(14, '2021_06_01_212710_create_ratings_table', 2);
-- --------------------------------------------------------
--
-- Struktur dari tabel `oauth_access_tokens`
--
CREATE TABLE `oauth_access_tokens` (
`id` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`user_id` bigint(20) UNSIGNED DEFAULT NULL,
`client_id` bigint(20) UNSIGNED NOT NULL,
`name` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`scopes` text COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`revoked` tinyint(1) NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`expires_at` datetime DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `oauth_access_tokens`
--
INSERT INTO `oauth_access_tokens` (`id`, `user_id`, `client_id`, `name`, `scopes`, `revoked`, `created_at`, `updated_at`, `expires_at`) VALUES
('b19523c083d868b5cf38ef45c57a8577923590a7024f21f7d15d255c9de77ffe6950262810cd5e16', 1, 1, 'LaravelAuthApp', '[]', 0, '2021-05-24 16:54:47', '2021-05-24 16:54:47', '2022-05-24 23:54:47'),
('8c8b348e266a91edbf526ed5a8eebcc9b92810b6aee2d613165d4a99dc67350dd1ffb3ab6539309c', 2, 1, 'LaravelAuthApp', '[]', 0, '2021-05-25 15:40:03', '2021-05-25 15:40:03', '2022-05-25 22:40:03'),
('ef9db5f66af2d791384abd0ba592f0666541641bfc47ddeeb38c355ab3918baeaf163da46374e065', 3, 1, 'LaravelAuthApp', '[]', 0, '2021-05-25 15:41:20', '2021-05-25 15:41:20', '2022-05-25 22:41:20'),
('bd187126284da52b47751368ddb2e292da468cd878bfa322aa37e19fd420479615fbb3eb601d7c84', 4, 1, 'LaravelAuthApp', '[]', 0, '2021-05-25 15:47:39', '2021-05-25 15:47:39', '2022-05-25 22:47:39'),
('19a4c25b7ac592d5f8ac0facb0db4cd1ab003460089324f7440238b2aa6da777de1a08de348267c7', 5, 1, 'LaravelAuthApp', '[]', 0, '2021-05-25 15:52:11', '2021-05-25 15:52:11', '2022-05-25 22:52:11'),
('88038feffddbbf09b7578aa28f5f2907078eaeec0f410f481606ffb167c825fe6cfad4b1ded198a2', 1, 1, 'LaravelAuthApp', '[]', 0, '2021-05-25 16:20:20', '2021-05-25 16:20:20', '2022-05-25 23:20:20'),
('ce2988d427795f98f194b62e487324c9ba68dd0857831a91ead986cb369a6ca1e825a03eefd023dd', 15, 1, 'LaravelAuthApp', '[]', 0, '2021-05-28 10:09:18', '2021-05-28 10:09:18', '2022-05-28 17:09:18'),
('9a2bb7c886d8a6b3dd4a5a7985c6023c66c4da6ea531c7a3c23f85933c92db6d611d31c1b01dd5e2', 15, 1, 'LaravelAuthApp', '[]', 0, '2021-05-28 10:09:30', '2021-05-28 10:09:30', '2022-05-28 17:09:30');
-- --------------------------------------------------------
--
-- Struktur dari tabel `oauth_auth_codes`
--
CREATE TABLE `oauth_auth_codes` (
`id` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`user_id` bigint(20) UNSIGNED NOT NULL,
`client_id` bigint(20) UNSIGNED NOT NULL,
`scopes` text COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`revoked` tinyint(1) NOT NULL,
`expires_at` datetime DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Struktur dari tabel `oauth_clients`
--
CREATE TABLE `oauth_clients` (
`id` bigint(20) UNSIGNED NOT NULL,
`user_id` bigint(20) UNSIGNED DEFAULT NULL,
`name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`secret` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`provider` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`redirect` text COLLATE utf8mb4_unicode_ci NOT NULL,
`personal_access_client` tinyint(1) NOT NULL,
`password_client` tinyint(1) NOT NULL,
`revoked` tinyint(1) NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `oauth_clients`
--
INSERT INTO `oauth_clients` (`id`, `user_id`, `name`, `secret`, `provider`, `redirect`, `personal_access_client`, `password_client`, `revoked`, `created_at`, `updated_at`) VALUES
(1, NULL, 'Laravel Personal Access Client', 'vRUD9TQ9oNYZfZqbvZIHY6faPsluK2ujM8iFI3AL', NULL, 'http://localhost', 1, 0, 0, '2021-05-24 16:25:58', '2021-05-24 16:25:58'),
(2, NULL, 'Laravel Password Grant Client', 'j0CfCr2haYHWwsbx235Y2BsISppNSCPuFDdhlUtb', 'users', 'http://localhost', 0, 1, 0, '2021-05-24 16:25:58', '2021-05-24 16:25:58');
-- --------------------------------------------------------
--
-- Struktur dari tabel `oauth_personal_access_clients`
--
CREATE TABLE `oauth_personal_access_clients` (
`id` bigint(20) UNSIGNED NOT NULL,
`client_id` bigint(20) UNSIGNED NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `oauth_personal_access_clients`
--
INSERT INTO `oauth_personal_access_clients` (`id`, `client_id`, `created_at`, `updated_at`) VALUES
(1, 1, '2021-05-24 16:25:58', '2021-05-24 16:25:58');
-- --------------------------------------------------------
--
-- Struktur dari tabel `oauth_refresh_tokens`
--
CREATE TABLE `oauth_refresh_tokens` (
`id` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`access_token_id` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`revoked` tinyint(1) NOT NULL,
`expires_at` datetime DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Struktur dari tabel `orders`
--
CREATE TABLE `orders` (
`id` bigint(20) UNSIGNED NOT NULL,
`user_id` bigint(20) UNSIGNED NOT NULL,
`book_id` bigint(20) UNSIGNED NOT NULL,
`status` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`tanggal` date DEFAULT NULL,
`jam` text COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `orders`
--
INSERT INTO `orders` (`id`, `user_id`, `book_id`, `status`, `tanggal`, `jam`, `created_at`, `updated_at`) VALUES
(1, 13, 2, 'Approved', '2021-05-27', '22:42', '2021-05-27 15:42:58', '2021-05-29 13:01:21'),
(2, 13, 1, 'Submitted', '2021-05-28', '10:20', '2021-05-27 16:55:31', '2021-05-27 16:55:31'),
(3, 13, 1, 'Rejected', '2021-05-29', '08:21', '2021-05-28 02:13:07', '2021-05-29 20:10:49'),
(4, 13, 1, 'Selesai', '2021-05-30', '09:00', '2021-05-28 14:39:03', '2021-05-29 15:08:37'),
(5, 3, 2, 'Submitted', '2021-05-29', '21:20', '2021-05-29 14:19:39', '2021-05-29 14:19:39'),
(23, 13, 1, 'Approved', '2021-05-31', '03:15', '2021-05-29 20:10:06', '2021-05-29 20:10:31'),
(22, 13, 1, 'Approved', '2021-05-31', '05:15', '2021-05-29 20:07:18', '2021-05-29 20:09:42');
-- --------------------------------------------------------
--
-- Struktur dari tabel `password_resets`
--
CREATE TABLE `password_resets` (
`email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Struktur dari tabel `ratings`
--
CREATE TABLE `ratings` (
`id` bigint(20) UNSIGNED NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`comment` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`rating` int(11) NOT NULL,
`food_id` int(11) NOT NULL,
`user_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `ratings`
--
INSERT INTO `ratings` (`id`, `created_at`, `updated_at`, `comment`, `rating`, `food_id`, `user_id`) VALUES
(2, '2021-06-02 15:26:31', '2021-06-02 15:26:31', 'Rasa Mantap nih', 4, 1, 15),
(5, '2021-06-04 18:17:47', '2021-06-04 18:17:47', 'lumayan enak makanan nya, hara terjangkau', 5, 1, 20);
-- --------------------------------------------------------
--
-- Struktur dari tabel `users`
--
CREATE TABLE `users` (
`id` bigint(20) UNSIGNED NOT NULL,
`name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`password` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL,
`role` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`kelas` text COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`token` text COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data untuk tabel `users`
--
INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `role`, `kelas`, `token`, `remember_token`, `created_at`, `updated_at`) VALUES
(1, 'Dede', 'dede55@gmail.com', NULL, '$2y$10$oAyfBe3jBsNzkMy4hS3xSueL4UR9QN7rNOIzzWhtNoIyAMi2Bz/w6', 'admin', NULL, NULL, NULL, '2021-05-24 16:24:39', '2021-05-29 17:26:42'),
(2, 'dede', 'dede57@gmail.com', NULL, '$2y$10$91iYILQ7N0g7YvB7WY16m.pWqGlT8P1uA9ov9B9nFqZkEk24Pl6N6', 'admin', NULL, NULL, NULL, '2021-05-25 15:40:03', '2021-05-25 15:40:03'),
(3, 'Alex', 'alex@gmail.com', NULL, '$2y$10$keVa44REodOG.KrdtZHfyu.tTiQ2UHggLH2aqmMgB3YwaxoTnc1aC', 'user', NULL, NULL, NULL, '2021-05-25 15:41:20', '2021-05-25 15:41:20'),
(4, 'Alex', 'alex22@gmail.com', NULL, '$2y$10$oiSJDohKgVgy9XajVmfEU.8.gEWPLpEsZ0tTeZtqkj.KSlXTf18VO', 'user', NULL, NULL, NULL, '2021-05-25 15:47:39', '2021-05-25 15:47:39'),
(5, 'Alex', 'alex223@gmail.com', NULL, '$2y$10$OiW0Coh/wErG4Pm1r3NtgeX8Q4cQNfhAwj1KeGW3q/I0Esg/oAkGq', 'user', NULL, NULL, NULL, '2021-05-25 15:52:11', '2021-05-25 15:52:11'),
(6, 'Alex', 'alex23@gmail.com', NULL, '$2y$10$jd5wHjFl7.Ddr/c5bqFXEuDipU3vlxjNusQs4iupJGq5R46dTRaOO', 'user', NULL, NULL, NULL, '2021-05-25 15:54:57', '2021-05-25 15:54:57'),
(7, 'dede', 'dede570@gmail.com', NULL, '$2y$10$gR0IpApCuabd2P./Ydhr.elkmjbokjbwsTZbSgk83ySmBxueFUx0W', 'admin', NULL, NULL, NULL, '2021-05-25 15:56:35', '2021-05-25 15:56:35'),
(8, 'dede', 'dede50@gmail.com', NULL, '$2y$10$CTAtIpMGlmwKFtN9TpCsDuSKVPTxg6Y6V5YFnsLTuD3r1wlqIP.M6', 'admin', NULL, NULL, NULL, '2021-05-25 15:57:31', '2021-05-25 15:57:31'),
(9, 'dede', 'dede52@gmail.com', NULL, '$2y$10$J6/6FXINdWgXplru8vNXMeQTDf7e9YkAusxznlx1EvmAKi272Ty4u', 'admin', NULL, NULL, NULL, '2021-05-25 16:01:02', '2021-05-25 16:01:02'),
(10, 'Alex', 'alex2s@gmail.com', NULL, '$2y$10$/WENx151sFg7bmJqAif.Ku2BKvHQcVTx2VQJMhQP1LSRwW1RiOllS', 'user', NULL, NULL, NULL, '2021-05-25 16:01:46', '2021-05-25 16:01:46'),
(11, 'Alex', 'alex2n4@gmail.com', NULL, '$2y$10$KiGeScs0Kpqi8popYtQETOxNi4YQDSF1ObJ4PZi1FG/v995UKaUMO', 'user', NULL, NULL, NULL, '2021-05-25 16:04:27', '2021-05-25 16:04:27'),
(12, 'Alex', 'alex2n@gmail.com', NULL, '$2y$10$V3hTcUyJ6vG9vOl8c6Od5.TyXT9UdHkWZVmTDwRKh738kIHdjaz42', 'user', NULL, NULL, NULL, '2021-05-25 16:05:25', '2021-05-25 16:05:25'),
(13, 'fery tes', 'fery@gmail.com', NULL, '$2y$10$W4yDNT.xF1qQ7f15wEBnW.w1xotaD2KGbVMIO0IE5D.U6JuIZk4Ne', 'user', 'XII', 'e07g2zpKQGi3GIYeen9mw5:APA91bFTAF-zO6OJvN6Js7XiTCd0244sKdS1X3eVbIuFAwS348L0IT4KCnrSX9JM_tpg12n1UfnYkjphp2ryQIA_qmoMDwxjUBVbzze5irJZn7VvEUwC6FOD-cFWVx4NB3Ols7_wfNNx', NULL, '2021-05-25 19:34:45', '2021-05-29 18:40:41'),
(14, 'Admin', 'perpus.skadala@gmail.com', NULL, '$2y$10$IotzWVz7ui6qdw0Ry01VwOZUwTYNX5D.l.d1D8u1jO4.E.HB6cQ7G', 'admin', NULL, NULL, NULL, '2021-05-25 20:01:15', '2021-05-25 20:01:15'),
(15, 'Fery', 'fery2@gmail.com', NULL, '$2y$10$yH5oRQ4qQfiIxRl4Ag.ik.ixEFlo8E5mMMYWdVW.QT2Z0SWvTrwn.', 'user', NULL, NULL, NULL, '2021-05-28 10:03:22', '2021-05-28 10:03:22'),
(16, 'dede', 'dede27@gmail.com', NULL, '$2y$10$yiLGg1LeqgjJVtvpwbrTMuZ4xAwhz3E14ahVkqe5b1t.3YPeJq5.u', 'admin', NULL, NULL, NULL, '2021-05-28 10:12:34', '2021-05-28 10:12:34'),
(17, 'dede', 'dede270@gmail.com', NULL, '$2y$10$.EG68xLj1UN0g4T6ydZqdOo0o9XlsZY/0dEFTgAUXB5ZpjkHCeT7y', 'admin', 'XII', NULL, NULL, '2021-05-28 10:13:46', '2021-05-28 10:13:46'),
(18, 'dede123', 'dede@gmail.com', NULL, '$2y$10$QP.zjKlgY9AbH9DK33VCheP1Vi.Snjv63E4P7.X0djBoTs5ARsZiy', 'user', 'dede@gmail.com', NULL, NULL, '2021-05-28 14:36:35', '2021-05-28 14:36:35'),
(19, 'admin test', 'admin@gmail.com', NULL, '$2y$10$5rXDqM1tRWXVRJFUdpQlb.Zo/ESASqqac4L4QgUUpIfXJq5vTAyxe', 'admin', NULL, NULL, NULL, '2021-05-28 14:53:15', '2021-05-28 14:53:15');
--
-- Indexes for dumped tables
--
--
-- Indeks untuk tabel `books`
--
ALTER TABLE `books`
ADD PRIMARY KEY (`id`);
--
-- Indeks untuk tabel `cats`
--
ALTER TABLE `cats`
ADD PRIMARY KEY (`id`);
--
-- Indeks untuk tabel `failed_jobs`
--
ALTER TABLE `failed_jobs`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `failed_jobs_uuid_unique` (`uuid`);
--
-- Indeks untuk tabel `food`
--
ALTER TABLE `food`
ADD PRIMARY KEY (`id`);
--
-- Indeks untuk tabel `migrations`
--
ALTER TABLE `migrations`
ADD PRIMARY KEY (`id`);
--
-- Indeks untuk tabel `oauth_access_tokens`
--
ALTER TABLE `oauth_access_tokens`
ADD PRIMARY KEY (`id`),
ADD KEY `oauth_access_tokens_user_id_index` (`user_id`);
--
-- Indeks untuk tabel `oauth_auth_codes`
--
ALTER TABLE `oauth_auth_codes`
ADD PRIMARY KEY (`id`),
ADD KEY `oauth_auth_codes_user_id_index` (`user_id`);
--
-- Indeks untuk tabel `oauth_clients`
--
ALTER TABLE `oauth_clients`
ADD PRIMARY KEY (`id`),
ADD KEY `oauth_clients_user_id_index` (`user_id`);
--
-- Indeks untuk tabel `oauth_personal_access_clients`
--
ALTER TABLE `oauth_personal_access_clients`
ADD PRIMARY KEY (`id`);
--
-- Indeks untuk tabel `oauth_refresh_tokens`
--
ALTER TABLE `oauth_refresh_tokens`
ADD PRIMARY KEY (`id`),
ADD KEY `oauth_refresh_tokens_access_token_id_index` (`access_token_id`);
--
-- Indeks untuk tabel `orders`
--
ALTER TABLE `orders`
ADD PRIMARY KEY (`id`),
ADD KEY `orders_user_id_foreign` (`user_id`),
ADD KEY `orders_book_id_foreign` (`book_id`);
--
-- Indeks untuk tabel `password_resets`
--
ALTER TABLE `password_resets`
ADD KEY `password_resets_email_index` (`email`);
--
-- Indeks untuk tabel `ratings`
--
ALTER TABLE `ratings`
ADD PRIMARY KEY (`id`);
--
-- Indeks untuk tabel `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `users_email_unique` (`email`);
--
-- AUTO_INCREMENT untuk tabel yang dibuang
--
--
-- AUTO_INCREMENT untuk tabel `books`
--
ALTER TABLE `books`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT untuk tabel `cats`
--
ALTER TABLE `cats`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT untuk tabel `failed_jobs`
--
ALTER TABLE `failed_jobs`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT untuk tabel `food`
--
ALTER TABLE `food`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT untuk tabel `migrations`
--
ALTER TABLE `migrations`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=15;
--
-- AUTO_INCREMENT untuk tabel `oauth_clients`
--
ALTER TABLE `oauth_clients`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT untuk tabel `oauth_personal_access_clients`
--
ALTER TABLE `oauth_personal_access_clients`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT untuk tabel `orders`
--
ALTER TABLE `orders`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=24;
--
-- AUTO_INCREMENT untuk tabel `ratings`
--
ALTER TABLE `ratings`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT untuk tabel `users`
--
ALTER TABLE `users`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=20;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 41.718045 | 849 | 0.702037 |
b5c0c833d88bbd8aa7dec2996abb604d17424dc3 | 549 | sql | SQL | LogIn.sql | fesabelilla/Park-Kori | ef39a0059d3c49666f557df4b121e899e157cefa | [
"MIT"
] | null | null | null | LogIn.sql | fesabelilla/Park-Kori | ef39a0059d3c49666f557df4b121e899e157cefa | [
"MIT"
] | null | null | null | LogIn.sql | fesabelilla/Park-Kori | ef39a0059d3c49666f557df4b121e899e157cefa | [
"MIT"
] | null | null | null | clear screen;
set serveroutput on;
set verify off;
declare
userName users.uname@site_link%TYPE ;
pass users.password@site_link%TYPE ;
lin number := 0 ;
--s users.id@site_link%TYPE := 0 ;
loc location.area@site_link%TYPE;
locid number;
begin
userName := '&x';
pass := '&y';
lin := myPackage.login(userName,pass);
dbms_output.put_line('login successful . User Id : '||lin);
--for searching area
loc := '&z';
locid := SearchArea.searchPlace(loc);
--dbms_output.put_line(locid);
SearchArea.ShowParkingSloat(locid);
end;
/ | 18.931034 | 62 | 0.686703 |
f4c35c6e933b597fa72216f50c3e4eff3a79e115 | 908 | kt | Kotlin | src/test/kotlin/raptor/commands/admin/ShutdownCommand.kt | Nallaka/raptor | 541594afbb321405b5679e5042cdac6069c24b97 | [
"MIT"
] | null | null | null | src/test/kotlin/raptor/commands/admin/ShutdownCommand.kt | Nallaka/raptor | 541594afbb321405b5679e5042cdac6069c24b97 | [
"MIT"
] | null | null | null | src/test/kotlin/raptor/commands/admin/ShutdownCommand.kt | Nallaka/raptor | 541594afbb321405b5679e5042cdac6069c24b97 | [
"MIT"
] | 1 | 2017-11-01T03:14:51.000Z | 2017-11-01T03:14:51.000Z | package raptor.commands.admin
import raptor.RaptorBot
import raptor.handlers.CommandHandler
import raptor.utils.BotProperties
import raptor.utils.commandmeta.Command
import raptor.utils.commandmeta.ICommand
import raptor.utils.permissionmeta.PermissionLevel
@ICommand(
name = "Shutdown",
emoji = ":radio_button:",
description = "Shutsdown the bot",
usage = "shutdown",
aliases = ["shutdown", "kill"],
commandPermissionLevel = PermissionLevel.OWNER,
isOwnerOnly = true
)
class ShutdownCommand : Command() {
override fun runCommand(args: Array<String>, commandContainer: CommandHandler.CommandContainer) {
embeddedMessageBuilder.addField("Shutting Down :radio_button:", "Bye", true)
sendMessage(commandContainer.event)
BotProperties.botConfigYamlReader.close()
RaptorBot.jda.shutdown()
System.exit(0)
}
} | 33.62963 | 101 | 0.715859 |
3c3e9cb4a40c12dbd25a4c4c19530600ba4b9fd3 | 3,073 | rs | Rust | backend/src/models.rs | notgull/dnd-calculator | b4bf787e349ce84d0e29a1bf6b4107aa0060318b | [
"MIT"
] | 1 | 2021-01-01T10:24:16.000Z | 2021-01-01T10:24:16.000Z | backend/src/models.rs | not-a-seagull/dnd-calculator | b4bf787e349ce84d0e29a1bf6b4107aa0060318b | [
"MIT"
] | 3 | 2021-03-10T13:14:01.000Z | 2021-10-06T14:51:38.000Z | backend/src/models.rs | notgull/dnd-calculator | b4bf787e349ce84d0e29a1bf6b4107aa0060318b | [
"MIT"
] | null | null | null | /*
* Copyright 2020 not_a_seagull
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
* Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
use serde::{Deserialize, Serialize};
use crate::schema::{actiontakers as action_takers, items, moves, room as rooms, templates};
#[derive(Debug, Clone, Serialize, Deserialize, Queryable)]
pub struct ActionTaker {
pub room_id: i32,
pub name: String,
pub x: f64,
pub y: f64,
pub armor_class: i32,
pub health: i32,
pub dead: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable)]
pub struct Room {
pub id: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable)]
pub struct Move {
pub id: i32,
pub name: String,
hit_type: i32,
pub hit_radius: Option<i32>,
pub dice_count: i32,
pub dice_type: i32,
pub dice_modifier: i32,
stat_boost: i32,
saving_throw: i32,
pub effect: Option<i32>,
pub effect_severity: Option<i32>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable)]
pub struct Item {
pub id: i32,
pub name: String,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Insertable)]
#[table_name = "items"]
pub struct NewItem {
pub name: String,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, AsChangeset)]
#[table_name = "items"]
pub struct ChangedItem {
pub name: Option<String>,
pub description: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable)]
pub struct Template {
pub id: i32,
pub name: String,
pub health: i32,
pub armor_class: i32,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Insertable)]
#[table_name = "templates"]
pub struct NewTemplate {
pub name: String,
pub health: i32,
pub armor_class: i32,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, AsChangeset)]
#[table_name = "templates"]
pub struct ChangedTemplate {
pub name: Option<String>,
pub health: Option<i32>,
pub armor_class: Option<i32>,
pub description: Option<String>,
}
| 30.127451 | 112 | 0.712008 |
9a42d843a4aee377bcc08e2b9fea947c42f045bc | 7,763 | asm | Assembly | Transynther/x86/_processed/NONE/_xt_/i3-7100_9_0x84_notsx.log_21829_1746.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 9 | 2020-08-13T19:41:58.000Z | 2022-03-30T12:22:51.000Z | Transynther/x86/_processed/NONE/_xt_/i3-7100_9_0x84_notsx.log_21829_1746.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 1 | 2021-04-29T06:29:35.000Z | 2021-05-13T21:02:30.000Z | Transynther/x86/_processed/NONE/_xt_/i3-7100_9_0x84_notsx.log_21829_1746.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 3 | 2020-07-14T17:07:07.000Z | 2022-03-21T01:12:22.000Z | .global s_prepare_buffers
s_prepare_buffers:
push %r10
push %r14
push %r8
push %r9
push %rax
push %rcx
push %rdi
push %rsi
lea addresses_D_ht+0x117e6, %rsi
lea addresses_normal_ht+0xb0e6, %rdi
nop
nop
nop
nop
nop
dec %r10
mov $85, %rcx
rep movsw
nop
nop
nop
nop
nop
and %r14, %r14
lea addresses_A_ht+0xa926, %r9
nop
nop
nop
nop
nop
add %rcx, %rcx
movl $0x61626364, (%r9)
nop
nop
xor $24872, %rcx
lea addresses_WT_ht+0x1ed26, %r9
nop
nop
nop
nop
dec %r8
movb (%r9), %cl
inc %r9
lea addresses_normal_ht+0x1e926, %rcx
nop
nop
inc %rsi
movb (%rcx), %r8b
nop
nop
nop
nop
sub $32679, %r8
lea addresses_normal_ht+0xa326, %rcx
nop
nop
nop
xor $1921, %r8
mov (%rcx), %edi
cmp %rsi, %rsi
lea addresses_WT_ht+0x4326, %rsi
lea addresses_UC_ht+0xf203, %rdi
nop
dec %rax
mov $12, %rcx
rep movsw
nop
nop
nop
inc %r8
lea addresses_UC_ht+0x56f2, %r14
nop
nop
nop
nop
lfence
mov (%r14), %rdi
xor %r10, %r10
lea addresses_UC_ht+0xf326, %rsi
lea addresses_UC_ht+0xb89a, %rdi
nop
nop
and $32134, %r8
mov $4, %rcx
rep movsb
nop
nop
cmp %r10, %r10
lea addresses_normal_ht+0x45f2, %rdi
nop
lfence
movb $0x61, (%rdi)
mfence
lea addresses_D_ht+0x20e6, %rsi
lea addresses_WC_ht+0x12626, %rdi
nop
nop
nop
and $33782, %r9
mov $126, %rcx
rep movsw
nop
nop
nop
nop
nop
and $23207, %rdi
lea addresses_normal_ht+0xed26, %rsi
lea addresses_UC_ht+0x9a6, %rdi
clflush (%rdi)
nop
nop
nop
nop
nop
sub $28423, %r14
mov $34, %rcx
rep movsq
nop
nop
nop
nop
and $64399, %r10
lea addresses_normal_ht+0xae26, %rsi
lea addresses_D_ht+0x10846, %rdi
nop
nop
xor $6569, %r10
mov $122, %rcx
rep movsl
nop
nop
nop
sub %r8, %r8
lea addresses_UC_ht+0x18f26, %r9
nop
nop
nop
nop
nop
sub $6719, %rdi
mov $0x6162636465666768, %rcx
movq %rcx, (%r9)
nop
and $1020, %r10
lea addresses_WT_ht+0x83e6, %r8
xor %r9, %r9
vmovups (%r8), %ymm2
vextracti128 $0, %ymm2, %xmm2
vpextrq $1, %xmm2, %r10
nop
add $65172, %r9
lea addresses_WT_ht+0x16c86, %r8
xor %r9, %r9
vmovups (%r8), %ymm5
vextracti128 $1, %ymm5, %xmm5
vpextrq $0, %xmm5, %rsi
sub %r9, %r9
pop %rsi
pop %rdi
pop %rcx
pop %rax
pop %r9
pop %r8
pop %r14
pop %r10
ret
.global s_faulty_load
s_faulty_load:
push %r11
push %r13
push %r14
push %r15
push %rcx
push %rdi
// Faulty Load
lea addresses_UC+0x17b26, %rcx
nop
mfence
movups (%rcx), %xmm6
vpextrq $1, %xmm6, %r15
lea oracles, %r11
and $0xff, %r15
shlq $12, %r15
mov (%r11,%r15,1), %r15
pop %rdi
pop %rcx
pop %r15
pop %r14
pop %r13
pop %r11
ret
/*
<gen_faulty_load>
[REF]
{'src': {'type': 'addresses_UC', 'same': False, 'size': 16, 'congruent': 0, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
[Faulty Load]
{'src': {'type': 'addresses_UC', 'same': True, 'size': 16, 'congruent': 0, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
<gen_prepare_buffer>
{'src': {'type': 'addresses_D_ht', 'congruent': 5, 'same': False}, 'dst': {'type': 'addresses_normal_ht', 'congruent': 6, 'same': False}, 'OP': 'REPM'}
{'dst': {'type': 'addresses_A_ht', 'same': False, 'size': 4, 'congruent': 8, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'src': {'type': 'addresses_WT_ht', 'same': False, 'size': 1, 'congruent': 8, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_normal_ht', 'same': False, 'size': 1, 'congruent': 9, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_normal_ht', 'same': False, 'size': 4, 'congruent': 9, 'NT': True, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_WT_ht', 'congruent': 7, 'same': False}, 'dst': {'type': 'addresses_UC_ht', 'congruent': 0, 'same': False}, 'OP': 'REPM'}
{'src': {'type': 'addresses_UC_ht', 'same': False, 'size': 8, 'congruent': 0, 'NT': True, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_UC_ht', 'congruent': 11, 'same': False}, 'dst': {'type': 'addresses_UC_ht', 'congruent': 0, 'same': False}, 'OP': 'REPM'}
{'dst': {'type': 'addresses_normal_ht', 'same': False, 'size': 1, 'congruent': 1, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'src': {'type': 'addresses_D_ht', 'congruent': 6, 'same': False}, 'dst': {'type': 'addresses_WC_ht', 'congruent': 4, 'same': False}, 'OP': 'REPM'}
{'src': {'type': 'addresses_normal_ht', 'congruent': 9, 'same': False}, 'dst': {'type': 'addresses_UC_ht', 'congruent': 7, 'same': False}, 'OP': 'REPM'}
{'src': {'type': 'addresses_normal_ht', 'congruent': 7, 'same': False}, 'dst': {'type': 'addresses_D_ht', 'congruent': 4, 'same': False}, 'OP': 'REPM'}
{'dst': {'type': 'addresses_UC_ht', 'same': False, 'size': 8, 'congruent': 10, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'src': {'type': 'addresses_WT_ht', 'same': False, 'size': 32, 'congruent': 5, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_WT_ht', 'same': False, 'size': 32, 'congruent': 3, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'37': 21829}
37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37
*/
| 34.811659 | 2,999 | 0.658895 |
40ec25b7e99fa6334b636594ac76ef61617dbe80 | 348 | py | Python | docfmt/docstring.py | skasch/docfmt | 9eeb1d6fc614424cb02125c95ef4a5758104535b | [
"MIT"
] | null | null | null | docfmt/docstring.py | skasch/docfmt | 9eeb1d6fc614424cb02125c95ef4a5758104535b | [
"MIT"
] | null | null | null | docfmt/docstring.py | skasch/docfmt | 9eeb1d6fc614424cb02125c95ef4a5758104535b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
The docstring module.
Represent a docstring.
Created by Romain Mondon-Cancel on 2019/10/08 20:14.
"""
import abc
class Docstring(abc.ABC):
@abc.abstractmethod
def __init__(self, docstring: str) -> None:
pass
@abc.abstractmethod
def __str__(self) -> str:
pass
| 15.818182 | 52 | 0.640805 |
7604bdf1355f850126e9946daf187ffbeb877b0d | 1,634 | go | Go | fsm/fsm.go | KnifeFly/toykv | 6e5d7c2cbcf52c4828e9daa0eaa40d16984db37d | [
"Apache-2.0"
] | null | null | null | fsm/fsm.go | KnifeFly/toykv | 6e5d7c2cbcf52c4828e9daa0eaa40d16984db37d | [
"Apache-2.0"
] | null | null | null | fsm/fsm.go | KnifeFly/toykv | 6e5d7c2cbcf52c4828e9daa0eaa40d16984db37d | [
"Apache-2.0"
] | null | null | null | package fsm
import (
"encoding/json"
"fmt"
"io"
"log"
"os"
"strings"
"github.com/hashicorp/raft"
)
// RaftFsm Raft fsm
type RaftFsm struct {
store Store
logger *log.Logger
}
// NewFSM return new instances of raft fsm
func NewFSM(dir string) *RaftFsm {
os.MkdirAll(dir, 0755)
return &RaftFsm{
store: NewMemStore(),
logger: log.New(os.Stderr, "[fsm] ", log.LstdFlags|log.Lshortfile),
}
}
// Get get the value of specified value
func (f *RaftFsm) Get(key string) (string, error) {
v, err := f.store.Get(key)
if err != nil {
return "", err
}
return v, nil
}
// Apply applies a Raft log entry to the key-value store.
func (f *RaftFsm) Apply(l *raft.Log) interface{} {
var c KeyValueCommand
if err := json.Unmarshal(l.Data, &c); err != nil {
err := fmt.Errorf("failed to unmarshal raft log")
f.logger.Println(err.Error())
return err
}
switch strings.ToUpper(c.Operation) {
case "SET":
return f.applySet(c.Key, c.Value)
case "DEL":
return f.applyDel(c.Key)
default:
err := fmt.Errorf("operation not support")
f.logger.Println(err.Error())
return err
}
}
// applySet apply set operation
func (f *RaftFsm) applySet(key, value string) interface{} {
f.logger.Printf("apply set %s to %s", key, value)
return f.store.Set(key, value)
}
// applyDel apply del operation
func (f *RaftFsm) applyDel(key string) interface{} {
f.logger.Printf("apply del %s", key)
_, err := f.store.Delete(key)
return err
}
// Restore Restore
func (f *RaftFsm) Restore(rc io.ReadCloser) error {
return nil
}
// Snapshot Snapshot
func (f *RaftFsm) Snapshot() (raft.FSMSnapshot, error) {
return nil, nil
}
| 19.926829 | 69 | 0.675031 |
25e7803de1e8ec21af5221b222127a64aa9b266e | 1,772 | swift | Swift | CorporateCup/Corporate-Cup/Core/Cells/PalmaresRankTableViewCell.swift | wesleyaurore/Groupe6_WebP2019 | 5eacad50ae40000e782c2c7d646c2bb824020d59 | [
"MIT"
] | null | null | null | CorporateCup/Corporate-Cup/Core/Cells/PalmaresRankTableViewCell.swift | wesleyaurore/Groupe6_WebP2019 | 5eacad50ae40000e782c2c7d646c2bb824020d59 | [
"MIT"
] | null | null | null | CorporateCup/Corporate-Cup/Core/Cells/PalmaresRankTableViewCell.swift | wesleyaurore/Groupe6_WebP2019 | 5eacad50ae40000e782c2c7d646c2bb824020d59 | [
"MIT"
] | null | null | null | //
// RankingTableViewCell.swift
// Corporate-Cup
//
// Created by wesley on 13/05/2019.
// Copyright © 2019 corporate-cup. All rights reserved.
//
import UIKit
class PalmaresRankTableViewCell: UITableViewCell {
// Properties
@IBOutlet weak var rank: UILabel!
@IBOutlet weak var avatar: UIImageView!
@IBOutlet weak var playerName: UILabel!
@IBOutlet weak var score: UILabel!
@IBOutlet weak var star: UIImageView!
let user:User = UserDefaults.getTheUserStored() ?? User()
// Initialization
func initializeRank(player: Player, rank: Int) {
switch rank {
case 1:
self.rank.backgroundColor = UIColor.FlatColor.sunflower
self.rank.textColor = UIColor.white
case 2:
self.rank.backgroundColor = UIColor.FlatColor.orangePeel
self.rank.textColor = UIColor.white
case 3:
self.rank.backgroundColor = UIColor.FlatColor.grey
self.rank.textColor = UIColor.white
default:
self.rank.backgroundColor = UIColor.FlatColor.acideGrey
self.rank.textColor = UIColor.FlatColor.darkGrey
}
if player.id == user.id {
playerName.font = UIFont.systemFont(ofSize: self.playerName.font.pointSize, weight: .bold)
}
self.rank.text = String(rank)
avatar = player.avatar
playerName.text = player.name
score.text = String(player.score)
}
override func awakeFromNib() {
super.awakeFromNib()
}
override func prepareForReuse() {
super.prepareForReuse()
playerName.font = UIFont.systemFont(ofSize: self.playerName.font.pointSize, weight: .regular)
}
}
| 29.04918 | 102 | 0.621896 |
4298ad902896b7f305c7773aa62ad22dd4ce68aa | 457 | asm | Assembly | oeis/101/A101669.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 11 | 2021-08-22T19:44:55.000Z | 2022-03-20T16:47:57.000Z | oeis/101/A101669.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 9 | 2021-08-29T13:15:54.000Z | 2022-03-09T19:52:31.000Z | oeis/101/A101669.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 3 | 2021-08-22T20:56:47.000Z | 2021-09-29T06:26:12.000Z | ; A101669: Fixed point of morphism 0 -> 01, 1 -> 20, 2 -> 01.
; Submitted by Jon Maiga
; 0,1,2,0,0,1,0,1,0,1,2,0,0,1,2,0,0,1,2,0,0,1,0,1,0,1,2,0,0,1,0,1,0,1,2,0,0,1,0,1,0,1,2,0,0,1,2,0,0,1,2,0,0,1,0,1,0,1,2,0,0,1,2,0,0,1,2,0,0,1,0,1,0,1,2,0,0,1,2,0,0,1,2,0,0,1,0,1,0,1,2,0,0,1,0,1,0,1,2,0
mov $2,$0
mod $0,2
add $0,12
mul $0,2
seq $2,39963 ; The period-doubling sequence A035263 repeated.
add $2,1
bin $0,$2
sub $1,$0
mov $0,$1
sub $0,7
mod $0,10
add $0,3
| 26.882353 | 201 | 0.571116 |
ff7b2766a4c7cc1d2efaf4ad4e1ea155b6a11c11 | 859 | ps1 | PowerShell | Create Certificate.ps1 | nhs-digital-gp-it-futures/BuyingCatalogueIdentity | d30b923e3ba1bce059e40ddd2366623d34ec1fd1 | [
"MIT"
] | 1 | 2020-07-03T12:08:52.000Z | 2020-07-03T12:08:52.000Z | Create Certificate.ps1 | nhs-digital-gp-it-futures/BuyingCatalogueIdentity | d30b923e3ba1bce059e40ddd2366623d34ec1fd1 | [
"MIT"
] | 119 | 2020-03-12T11:08:33.000Z | 2021-12-13T11:08:08.000Z | Create Certificate.ps1 | nhs-digital-gp-it-futures/BuyingCatalogueIdentity | d30b923e3ba1bce059e40ddd2366623d34ec1fd1 | [
"MIT"
] | 1 | 2020-07-03T12:08:54.000Z | 2020-07-03T12:08:54.000Z | $mkcertVersion = 'v1.4.3';
$mkcertDir = Get-Location | Join-Path -ChildPath 'mkcert';
$outFile = Join-Path $mkcertDir -ChildPath 'mkcert.exe';
New-Item -ItemType Directory -Path $mkcertDir -ErrorAction SilentlyContinue;
if (-not(Test-Path $outFile -PathType Leaf))
{
Invoke-WebRequest -Uri "https://github.com/FiloSottile/mkcert/releases/download/$mkcertVersion/mkcert-$mkcertVersion-windows-amd64.exe" -OutFile $outFile;
}
$installMkcert = $outFile + ' -install';
Invoke-Expression $installMkcert;
$certFile = Join-Path $mkcertDir -ChildPath 'localhost.pfx';
$generateCert = $outFile + " -pkcs12 -p12-file $certFile localhost host.docker.internal 127.0.0.1";
Invoke-Expression $generateCert;
$getCAROOT = $outFile + ' -CAROOT';
$rootCert = Invoke-Expression $getCAROOT | Join-Path -ChildPath 'rootCA.pem';
Copy-Item $rootCert -Destination $mkcertDir;
| 39.045455 | 156 | 0.750873 |
dfee80902caed69c98d8eca6aee3ec1d930b175f | 974 | ts | TypeScript | src/modules/nav-persistence/nav-persistence-adapter.service.ts | AlexSamarkin/alexsamarkin.com-server | d10020209592f7ac21e4885451f47e5a3fbb0cb8 | [
"MIT"
] | null | null | null | src/modules/nav-persistence/nav-persistence-adapter.service.ts | AlexSamarkin/alexsamarkin.com-server | d10020209592f7ac21e4885451f47e5a3fbb0cb8 | [
"MIT"
] | null | null | null | src/modules/nav-persistence/nav-persistence-adapter.service.ts | AlexSamarkin/alexsamarkin.com-server | d10020209592f7ac21e4885451f47e5a3fbb0cb8 | [
"MIT"
] | null | null | null | import {LoadNavPort} from "../../domain/ports/out/load-navigation.port";
import {GraphqlRequestService} from "../graphql-request/graphql-request.service";
import {Locale} from "../../domain/entities/locale";
import {Navigation} from "../../domain/entities/navigation";
import navsQuery from "../../graphql/queries/external/navs.graphql";
import {Injectable} from "@nestjs/common";
@Injectable()
export class NavPersistenceAdapterService implements LoadNavPort {
constructor(private graphQLRequestService: GraphqlRequestService) {}
async loadNav(locale?: Locale): Promise<Navigation[]> {
this.graphQLRequestService.setLocale(locale);
const data = await this.graphQLRequestService.query(navsQuery);
if (!data || !data.navs) {
throw new Error('no nav found')
}
return data.navs.map((nav) => {
return new Navigation(
nav.title,
nav.link
);
});
}
} | 37.461538 | 81 | 0.655031 |
39790030953dd3a22ebd5c76118c429f791a4af3 | 770 | html | HTML | dest/html/b.html | build-future/build-future-simple-template | 312759721e2d92b09088f6b0e38da310f7f54cba | [
"MIT"
] | null | null | null | dest/html/b.html | build-future/build-future-simple-template | 312759721e2d92b09088f6b0e38da310f7f54cba | [
"MIT"
] | null | null | null | dest/html/b.html | build-future/build-future-simple-template | 312759721e2d92b09088f6b0e38da310f7f54cba | [
"MIT"
] | null | null | null | <!DOCTYPE html><html lang=en><meta charset=UTF-8><title>Build Future -- SimpleTemplate</title><link href=css/animate.css rel=stylesheet><link href=css/index.css rel=stylesheet><div class="spin block"><span style="vertical-align: top;line-height: 50px">S</span></div><div class="spin block"><span style="vertical-align: top;line-height: 50px">A</span></div><div class="spin block"><span style="vertical-align: top;line-height: 50px">M</span></div><div class="spin block"><span style="vertical-align: top;line-height: 50px">U</span></div><div class="spin block"><span style="vertical-align: top;line-height: 50px">E</span></div><div class="spin block"><span style="vertical-align: top;line-height: 50px">L</span></div><script src=js/index.js type=text/javascript></script> | 770 | 770 | 0.731169 |
20f2f51da2866f362a55f7b04daa15f2202dd958 | 878 | css | CSS | TributePage/css/style.css | mtevfik41/FCC-ResponsiveDesignProjects | 92170b0dcf5dc3f3f5a30c54d1b4b540378f7b89 | [
"MIT",
"Unlicense"
] | null | null | null | TributePage/css/style.css | mtevfik41/FCC-ResponsiveDesignProjects | 92170b0dcf5dc3f3f5a30c54d1b4b540378f7b89 | [
"MIT",
"Unlicense"
] | null | null | null | TributePage/css/style.css | mtevfik41/FCC-ResponsiveDesignProjects | 92170b0dcf5dc3f3f5a30c54d1b4b540378f7b89 | [
"MIT",
"Unlicense"
] | null | null | null | @import url('https://fonts.googleapis.com/css2?family=Roboto&display=swap');
* {
margin: 0;
padding: 0;
}
body {
background-color: #b59c89;
font-family: 'Roboto', sans-serif;
line-height: 1.2;
}
#main {
text-align: center;
margin: 2rem;
}
#main #title {
padding: 2rem;
}
#main #title h1,
#main #title h3,
#main #title h4 {
padding: 0.5rem 0;
}
#image {
margin: auto;
max-width: 70%;
height: auto;
display: block;
}
#img-caption p {
padding-top: 1.5rem;
}
#tribute-info h2 {
padding: 2rem 0;
}
ul {
list-style-type: none;
text-align: center;
margin: auto;
width: 60%;
line-height: 1.3;
}
ul li h3 {
padding: 1.5rem 0;
}
ul li p {
padding: 1rem 0;
}
.words {
border-top: 3px solid #333;
border-bottom: 3px solid #333;
padding: 3rem 0;
}
#more-about {
padding: 1.5rem 0;
}
#more-about a {
text-decoration: none;
}
| 12.194444 | 76 | 0.612756 |
4c76dafe021278dd8ad2362ad0fc11d988983e95 | 149 | sql | SQL | integration_tests/projects/001_flow_run_with_selectors/models/agent_wait_time.sql | emekdahl/fal | 8a6670a2a1940cdd3e838073894d643e1050b654 | [
"Apache-2.0"
] | 360 | 2021-11-10T18:43:03.000Z | 2022-03-31T14:57:19.000Z | integration_tests/projects/001_flow_run_with_selectors/models/agent_wait_time.sql | emekdahl/fal | 8a6670a2a1940cdd3e838073894d643e1050b654 | [
"Apache-2.0"
] | 64 | 2021-11-11T22:05:53.000Z | 2022-03-30T18:14:05.000Z | integration_tests/projects/001_flow_run_with_selectors/models/agent_wait_time.sql | emekdahl/fal | 8a6670a2a1940cdd3e838073894d643e1050b654 | [
"Apache-2.0"
] | 9 | 2021-12-17T02:49:36.000Z | 2022-03-31T14:57:21.000Z | {{ config(materialized='table', tags='daily') }}
with source_data as (
select y, ds from {{ ref('time_series') }}
)
select *
from source_data
| 14.9 | 48 | 0.651007 |
e7ea53499a1036ff66a19b087caa1e1bd3155a2f | 14,582 | lua | Lua | lua/gDuel/vgui/cl_vgui.lua | DidVaitel/gDuels | 5bd19e5ca2d4402d438e3a448679bf05dce38b7d | [
"Apache-2.0"
] | null | null | null | lua/gDuel/vgui/cl_vgui.lua | DidVaitel/gDuels | 5bd19e5ca2d4402d438e3a448679bf05dce38b7d | [
"Apache-2.0"
] | null | null | null | lua/gDuel/vgui/cl_vgui.lua | DidVaitel/gDuels | 5bd19e5ca2d4402d438e3a448679bf05dce38b7d | [
"Apache-2.0"
] | 2 | 2020-05-16T09:10:24.000Z | 2022-02-27T23:35:56.000Z | --[[
Created by DidVaitel (http://steamcommunity.com/profiles/76561198108670811)
]]
gDuel.vgui = gDuel.vgui or {}
local function gDuelMenu()
local w, h = ScrW() * .7, ScrH() * .7
local selectedPlayer = nil
local selectedDuel = 1
local curWager = 0
local Leaders = net.ReadTable()
// Our main frame
local bg = vgui.Create('DFrame')
bg:MakePopup()
bg:SetSize(w, h)
bg:Center()
bg:SetDraggable( false )
bg:ShowCloseButton(false)
bg:SetTitle("")
bg.Paint = function(self, W, H)
gDuel.vgui.DrawBlur( self, 8 )
gDuel.vgui.DrawBox(0.5, 0, W , 50, Color(0, 0, 0, 170))
gDuel.vgui.DrawBox(0, 0, W , H, Color(0, 0, 0, 180))
gDuel.vgui.DrawBox(20, 70, W-40 , H-89, Color(0, 0, 0, 150))
gDuel.vgui.DrawOutlinedBox( 0, 0, W, H )
gDuel.vgui.DrawOutlinedBox( 20, 70, W-40, H-89 )
gDuel.vgui.DrawOutlinedBox( 0, 0, W, 50 )
end
local MainDop = vgui.Create( "DPanel", bg)
MainDop:SetPos( 20, 70 )
MainDop:SetSize(bg:GetWide()/2-40,bg:GetTall()-89)
MainDop.Paint = function(self,w,h)
end
local MainAdditional = vgui.Create( "DPanel", bg)
MainAdditional:SetPos( MainDop:GetWide() + 20, 75 )
MainAdditional:SetSize(MainDop:GetWide() + 35,bg:GetTall()-105)
MainAdditional.Paint = function(self,w,h)
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h-60 )
end
local function GuiButton(name, parent, x, y, w, h, doclick, btnclr, removeonclick)
local btn = parent:Add 'DButton'
btn:SetSize(w, h)
btn:SetPos(x, y)
btn:SetText(name)
btn:SetTextColor(Color(255,255,255,200))
btn.DoClick = function()
doclick(btn)
if removeonclick then
parent:Remove()
end
end
btn.Paint = function(self,w,h)
gDuel.vgui.DrawBox(0, 0, w , h, Color(10, 189, 227, 100))
gDuel.vgui.DrawOutlinedBox( 0, 0,w, h )
end
return btn
end
if !table.IsEmpty(DarkRP) then
local wager = gDuel.vgui.txtentry(gDuel.Translate("EnterBetVgui")..""..gDuel.minBet .. "".. gDuel.Translate("EnterBetVgui2") .."".. gDuel.maxBet .. ')', MainAdditional, 0, MainAdditional:GetTall()-45, MainAdditional:GetWide(), 35, function(self)
curWager = tonumber(self:GetValue())
end)
end
local plList = gDuel.vgui.scroll(MainDop, 5, 30, MainDop:GetWide() - 10, MainDop:GetTall() - 35) // Players slider
local displayPlayers = function(id) // let's get players
plList:Clear()
local plys = {}
if id and id ~= '' then
for _, pl in ipairs(player.GetAll()) do
if string.find(pl:Name():lower(), id, 1, true) then
table.insert(plys, pl)
end
end
else
plys = player.GetAll()
end
local curY = 0
for k,v in pairs(plys) do
if v == LocalPlayer() then continue end
local pnl = plList:Add 'DPanel'
pnl:SetSize(plList:GetWide(), 50)
pnl:SetPos(0, curY)
avatar = pnl:Add 'AvatarImage'
avatar:SetPos(7,5)
avatar:SetSize(40, 40)
avatar:SetPlayer(v)
function pnl:Paint(w, h)
if not IsValid(v) then return end
gDuel.vgui.DrawBox(0, 0, w, h, Color(20, 20, 20, 100))
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
gDuel.vgui.txt(gDuel.Translate("Name")..'' .. v:Name(), 15, 55, 0)
if !table.IsEmpty(DarkRP) then
gDuel.vgui.txt(gDuel.Translate("Money")..'' .. DarkRP.formatMoney(v:getDarkRPVar 'money'), 15, 55, 12)
end
end
pnl.select = GuiButton(gDuel.Translate("SendChallenge"), pnl, 55, pnl:GetTall() - (17 + 5), pnl:GetWide() - 60, 17, function(self)
if self.clickable then
net.Start 'gDuel.SendRequest'
net.WriteEntity(v)
net.WriteInt(curWager, 32)
net.WriteInt(selectedDuel, 16)
net.SendToServer()
surface.PlaySound( 'buttons/button15.wav' )
end
end)
function pnl.select:Think()
if curWager == nil or not isnumber(curWager) then self.clickable = false self:SetText(gDuel.Translate("UnavailableWager")) return end
if !table.IsEmpty(DarkRP) then
if curWager > gDuel.maxBet or curWager < gDuel.minBet then
self.clickable = false
self:SetText(gDuel.Translate("UnavailableWager"))
return
end
end
self.clickable = true
self:SetText(gDuel.Translate("SendChallenge"))
end
plList:AddItem(pnl)
curY = curY + pnl:GetTall() + 5
end
end
displayPlayers()
local plSearch = gDuel.vgui.txtentry(gDuel.Translate("SearchaPlayer"), MainDop, 5, 5, MainDop:GetWide() - 10, 20, function(self)
displayPlayers(self:GetValue())
end)
// Duel types
local chList = gDuel.vgui.scroll(MainAdditional, 5, 5, MainAdditional:GetWide() - 10, MainAdditional:GetTall() - 70)
local curX = 0
local curY = 0
for _, data in ipairs(gDuel.Types) do
local pnl = chList:Add 'DPanel'
pnl:SetSize(chList:GetWide() / 2, 100)
pnl:SetPos(curX, curY)
function pnl:Paint(w, h)
gDuel.vgui.DrawBox(0, 0, w, h, Color(50, 50, 50, 100))
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
gDuel.vgui.txt(data.name, 20, w / 2, 3, nil, 1)
end
pnl.desc = pnl:Add 'DLabel'
pnl.desc:SetSize(pnl:GetWide() - 10, pnl:GetTall() - 75)
pnl.desc:SetPos(5, 25)
pnl.desc:SetText(gDuel.Translate("Description") .."" .. data.desc)
pnl.desc:SetAutoStretchVertical(true)
pnl.desc:SetWrap(true)
pnl.desc:SetFont 'gDuelFont14'
pnl.desc:SetTextColor(color_white)
pnl.select = GuiButton(gDuel.Translate("Select"), pnl, 5, pnl:GetTall() - 22, pnl:GetWide() - 10, 17, function(self)
selectedDuel = _
surface.PlaySound( 'buttons/button15.wav' )
end)
function pnl.select:Think()
if selectedDuel == _ then
self:SetText(gDuel.Translate("Selected"))
else
self:SetText(gDuel.Translate("Select"))
end
end
curX = curX + pnl:GetWide() + 5
if curX > plList:GetWide() then
curX = 0
curY = curY + pnl:GetTall() + 5
end
end
// MainDop:Hide()
//MainAdditional:Hide()
local LeadersLead = vgui.Create( "DPanel", bg)
LeadersLead:SetPos( 20, 70 )
LeadersLead:SetSize(bg:GetWide()-40,bg:GetTall()-89)
LeadersLead.Paint = function(self,w,h)
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
end
local zr = 0
local plList2 = gDuel.vgui.scroll(LeadersLead, 5, 5, LeadersLead:GetWide() - 10, LeadersLead:GetTall() ) // Players slider
local curYxx = 0
table.SortByMember( Leaders, "Duelswin" )
for k,v in pairs(Leaders) do
local pnl = plList2:Add 'DPanel'
pnl:SetSize(plList2:GetWide(), 50)
pnl:SetPos(0, curYxx)
avatar = pnl:Add 'AvatarImage'
avatar:SetPos(7,5)
avatar:SetSize(40, 40)
avatar:SetSteamID(util.SteamIDTo64(v.steamID),64 )
function pnl:Paint(w, h)
gDuel.vgui.DrawBox(0, 0, w, h, Color(20, 20, 20, 100))
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
gDuel.vgui.txt(gDuel.Translate("Name")..'' .. v.Name, 15, 55, 0)
gDuel.vgui.txt('Wins: ' .. v.Duelswin, 15, 55, 15)
gDuel.vgui.txt('Loses: ' .. v.Duelslose, 15, 55, 28)
end
plList2:AddItem(pnl)
curYxx = curYxx + pnl:GetTall() + 5
zr=zr+1
if zr > 10 then break end
end
LeadersLead:Hide()
local MainButton = vgui.Create("DButton", bg)
MainButton:SetPos(30, 10)
MainButton:SetSize(100,30)
MainButton:SetText("")
MainButton.DoClick = function()
surface.PlaySound( 'buttons/button15.wav' )
LeadersLead:Hide()
MainDop:Show()
MainAdditional:Show()
end
MainButton.Paint = function(self,w,h)
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
gDuel.vgui.DrawBox(0, 0, w , h, Color(10, 189, 227, 100))
draw.SimpleText( "Main", 'gDuelFont18', 34, 6, Color(255,255,255,200))
end
MainButton.OnCursorEntered = function()
surface.PlaySound( 'UI/buttonrollover.wav' )
end
local LeadersButton = vgui.Create("DButton", bg)
LeadersButton:SetPos(160, 10)
LeadersButton:SetSize(100,30)
LeadersButton:SetText("")
LeadersButton.DoClick = function()
surface.PlaySound( 'buttons/button15.wav' )
MainDop:Hide()
MainAdditional:Hide()
LeadersLead:Show()
end
LeadersButton.Paint = function(self,w,h)
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
gDuel.vgui.DrawBox(0, 0, w , h, Color(10, 189, 227, 100))
draw.SimpleText( "Leaderboard", 'gDuelFont18', 8, 6, Color(255,255,255,200))
end
LeadersButton.OnCursorEntered = function()
surface.PlaySound( 'UI/buttonrollover.wav' )
end
local CloseButton = vgui.Create("DButton", bg)
CloseButton:SetPos(bg:GetWide()-130, 10)
CloseButton:SetSize(100,30)
CloseButton:SetText("")
CloseButton.DoClick = function()
bg:SizeTo( 0, 0, 0.3)
surface.PlaySound( 'buttons/button15.wav' )
timer.Simple( 0.3, function()
bg:Remove()
end )
end
CloseButton.Paint = function(self,w,h)
gDuel.vgui.DrawOutlinedBox( 0, 0, w, h )
gDuel.vgui.DrawBox(0, 0, w , h, Color(10, 189, 227, 100))
draw.SimpleText( "Close", 'gDuelFont18', 34, 6, Color(255,255,255,200))
end
CloseButton.OnCursorEntered = function()
surface.PlaySound( 'UI/buttonrollover.wav' )
end
end
net.Receive("gDuel.Menu", gDuelMenu)
net.Receive('gDuel.SendRequest', function()
local dpl = net.ReadEntity()
local wager = net.ReadInt(32)
local type = net.ReadInt(16)
local id = net.ReadInt(16)
if not gDuel.Types[type] then return end
local removetime = CurTime() + 15
local bg = vgui.Create('DFrame')
//bg:MakePopup()
bg:SetSize(520, 100)
bg:SetPos(0, ScrH() - 100)
bg:SetDraggable( false )
bg:ShowCloseButton(false)
bg:SetTitle("Duel - Challenge!")
bg.Paint = function(self, W, H)
gDuel.vgui.DrawBlur( self, 6 )
gDuel.vgui.DrawBox(0.5, 0, W , 50, Color(0, 0, 0, 170))
gDuel.vgui.DrawBox(20, 70, W-40 , H-89, Color(0, 0, 0, 150))
gDuel.vgui.DrawOutlinedBox( 0, 0, W, H )
gDuel.vgui.DrawOutlinedBox( 20, 70, W-40, H-89 )
gDuel.vgui.DrawOutlinedBox( 0, 0, W, 50 )
end
bg.titlex = 520 / 2
function bg:PaintOver(w, h)
if not IsValid(dpl) then bg:Remove() return end
if !table.IsEmpty(DarkRP) then
gDuel.vgui.txt(dpl:Name() .. ''.. gDuel.Translate("AcceptVgui") ..'' .. gDuel.Types[type].name .. ''.. gDuel.Translate("AcceptVgui2") ..'' .. DarkRP.formatMoney(wager) , 15, w / 2, 28, nil, 1)
else
gDuel.vgui.txt(dpl:Name() .. ''.. gDuel.Translate("AcceptVgui") ..'' .. gDuel.Types[type].name , 15, w / 2, 28, nil, 1)
end
end
function bg:Think()
if removetime <= CurTime() then
net.Start 'gDuel.DeclineRequest'
net.WriteInt(id, 24)
net.SendToServer()
self:Remove()
end
end
local btns =
{
[1] =
{
str = gDuel.Translate("AcceptVgui3"),
func = function()
surface.PlaySound( 'buttons/button15.wav' )
net.Start 'gDuel.AcceptRequest'
net.WriteInt(id, 24)
net.SendToServer()
bg:Remove()
end
},
[2] =
{
str = gDuel.Translate("AcceptVgui4"),
func = function()
surface.PlaySound( 'buttons/button15.wav' )
net.Start 'gDuel.DeclineRequest'
net.WriteInt(id, 25)
net.SendToServer()
bg:Remove()
end
}
}
local h = (100 - 48) / 2
local cury = 100 - (h * 2)
for i = 1, 2 do
local data = btns[i]
if not data then continue end
local btn = bg:Add 'DButton'
btn:SetSize(520, h)
btn:SetPos(0, cury)
btn:SetText(data.str)
btn:SetTextColor(color_white)
btn.DoClick = data.func
btn.Paint = function(self,w,h)
gDuel.vgui.DrawBox(0, 0, w , h, Color(10, 189, 227, 100))
gDuel.vgui.DrawOutlinedBox( 0, 0,w, h )
end
cury = cury + h
end
end)
| 36.183623 | 251 | 0.502469 |
e1b463cfcbc4efdd9547d7e4a999f4a91e497d41 | 2,607 | sql | SQL | ETL_Select.sql | nletcher/SQL-Challenge | d1b3f4e1146735e8d994f5249604f4d59aee76ec | [
"ADSL"
] | null | null | null | ETL_Select.sql | nletcher/SQL-Challenge | d1b3f4e1146735e8d994f5249604f4d59aee76ec | [
"ADSL"
] | null | null | null | ETL_Select.sql | nletcher/SQL-Challenge | d1b3f4e1146735e8d994f5249604f4d59aee76ec | [
"ADSL"
] | null | null | null | -- List the following details of each employee: employee number, last name, first name, sex, and salary.
SELECT e.emp_no, e.last_name, e.first_name, e.sex, s.salary
FROM Employee AS e
INNER JOIN Salaries AS s
ON e.emp_no = s.emp_no;
-- List first name, last name, and hire date for Employee who were hired in 1986.
SELECT first_name, last_name, hire_date
FROM Employee
WHERE hire_date
BETWEEN '01/01/1986' AND '12/31/1986';
-- List the manager of each department with the following information: department number, department name, the manager's employee number, last name, first name.
SELECT d.dept_no, d.dept_name, e.emp_no, e.first_name, e.last_name
FROM Departments AS d
LEFT JOIN managers AS m
ON d.dept_no = m.dept_no
LEFT JOIN Employee AS e
ON m.emp_no = e.emp_no;
-- List the department of each employee with the following information: employee number, last name, first name, and department name.
SELECT e.emp_no, e.last_name, e.first_name, d.dept_name
FROM Employee AS e
LEFT JOIN Emp_Dept AS de
ON e.emp_no = de.emp_no
LEFT JOIN Departments AS d
ON de.dept_no = d.dept_no;
-- List first name, last name, and sex for Employee whose first name is "Hercules" and last names begin with "B."
SELECT first_name, last_name, sex
FROM Employee
WHERE first_name = 'Hercules'
AND last_name LIKE 'B%';
-- List all Employee in the Sales department, including their employee number, last name, first name, and department name.
SELECT e.emp_no, e.last_name, e.first_name, d.dept_name
FROM Employee AS e
LEFT JOIN Emp_Dept AS de
ON e.emp_no = de.emp_no
LEFT JOIN Departments AS d
ON de.dept_no = d.dept_no
WHERE d.dept_name = 'Sales';
-- List all Employee in the Sales and Development Departments, including their employee number, last name, first name, and department name.
SELECT e.emp_no, e.last_name, e.first_name, d.dept_name
FROM Employee AS e
LEFT JOIN Emp_Dept AS de
ON e.emp_no = de.emp_no
LEFT JOIN Departments AS d
ON de.dept_no = d.dept_no
WHERE d.dept_name = 'Sales'
OR d.dept_name = 'Development';
-- In descending order, list the frequency count of employee last names, i.e., how many Employee share each last name.
SELECT last_name, count(last_name) AS "No of Employee with last_name"
FROM Employee
GROUP BY last_name
ORDER BY "No of Employee with last_name" DESC;
-- Find average salary by title
SELECT t.title, round(avg(s.salary))
FROM Salaries s
LEFT JOIN Employee e
ON e.emp_no = s.emp_no
LEFT JOIN Titles t
ON e.emp_title_id = t.title_id
GROUP BY t.title;
SELECT * FROM Employee;
-- Epilogue
SELECT * FROM Employee
WHERE emp_no = '499942';
-- Hahaa.. April Foolsday, but why!? :-)
| 34.76 | 160 | 0.764097 |
d25dd3490f735a69ae588e529ca3d202d1b765a7 | 2,782 | dart | Dart | lib/data/providers/exam_provider.dart | enzanumo/CustedNG | 2964bc01ab3949df379b1a7564d0a61b0fee2f99 | [
"Apache-2.0"
] | 38 | 2020-02-03T01:34:23.000Z | 2022-03-22T09:20:00.000Z | lib/data/providers/exam_provider.dart | enzanumo/CustedNG | 2964bc01ab3949df379b1a7564d0a61b0fee2f99 | [
"Apache-2.0"
] | 5 | 2020-10-01T05:31:59.000Z | 2022-02-18T12:13:40.000Z | lib/data/providers/exam_provider.dart | enzanumo/CustedNG | 2964bc01ab3949df379b1a7564d0a61b0fee2f99 | [
"Apache-2.0"
] | 14 | 2020-02-03T01:34:28.000Z | 2021-12-11T07:23:13.000Z | import 'dart:async';
import 'package:custed2/core/provider/busy_provider.dart';
import 'package:custed2/data/models/jw_exam.dart';
import 'package:custed2/data/store/exam_store.dart';
import 'package:custed2/locator.dart';
import 'package:custed2/service/custed_service.dart';
import 'package:custed2/service/jw_service.dart';
int sortExamByTime(JwExamRows a, JwExamRows b) {
return a.examTask.beginDate.compareTo(b.examTask.beginDate);
}
class ExamProvider extends BusyProvider {
JwExamData data;
var show = true;
var failed = false;
bool useCache = false;
Timer _updateTimer;
Future<void> init() async {
show = await CustedService().getShouldShowExam();
if (!show) {
return;
}
loadLocalData();
// setBusyState(true);
// await refreshData();
notifyListeners();
startAutoRefresh();
}
JwExamRows getNextExam() {
if (data == null) {
return null;
}
for (JwExamRows exam in data.rows) {
final examTime = exam.examTask.beginDate.substring(0, 11) +
exam.examTask.beginTime.substring(6);
if (DateTime.parse(examTime).isAfter(DateTime.now())) {
return exam;
}
}
return null;
}
int getRemainExam() {
if (data == null) {
return null;
}
for (JwExamRows exam in data.rows) {
final examTime =
exam.examTask.beginDate.substring(0, 11) + exam.examTask.beginTime;
if (DateTime.parse(examTime).isAfter(DateTime.now())) {
return data.rows.length - data.rows.indexOf(exam);
}
}
return 0;
}
Future<void> loadLocalData() async {
final examStore = await locator.getAsync<ExamStore>();
var cacheExamData = examStore.fetch();
if (cacheExamData != null) {
print('use cached exam data.');
data = cacheExamData;
useCache = true;
data.rows.sort((a, b) => sortExamByTime(a, b));
}
notifyListeners();
}
Future<void> refreshData() async {
setBusyState(true);
final examStore = await locator.getAsync<ExamStore>();
try {
final exam = await JwService().getExam();
data = exam.data;
if (data != null) examStore.put(data);
failed = false;
useCache = false;
} catch (e) {
failed = true;
var cacheExamData = examStore.fetch();
if (cacheExamData != null) {
print('use cached exam data.');
data = cacheExamData;
useCache = true;
}
} finally {
setBusyState(false);
}
if (data == null) return;
data.rows.sort((a, b) => sortExamByTime(a, b));
}
void startAutoRefresh() {
if (_updateTimer != null && _updateTimer.isActive) {
return;
}
_updateTimer = Timer.periodic(Duration(minutes: 1), (_) {
notifyListeners();
});
}
}
| 23.982759 | 77 | 0.624012 |
0c500c135387d8339d1d902a0311c7491fcd1364 | 249 | lua | Lua | src/utils/Shaders.lua | eniallator/2D-Random-Terrain-RPG | d70e741d2c39f0930fb40d0c711801866f6d3aba | [
"MIT"
] | null | null | null | src/utils/Shaders.lua | eniallator/2D-Random-Terrain-RPG | d70e741d2c39f0930fb40d0c711801866f6d3aba | [
"MIT"
] | null | null | null | src/utils/Shaders.lua | eniallator/2D-Random-Terrain-RPG | d70e741d2c39f0930fb40d0c711801866f6d3aba | [
"MIT"
] | null | null | null | return {
blackAndWhite = love.graphics.newShader('src/shaders/BlackAndWhite.frag'),
HSColourPicker = love.graphics.newShader('src/shaders/HSColourPicker.frag'),
VColourPicker = love.graphics.newShader('src/shaders/VColourPicker.frag')
}
| 41.5 | 80 | 0.767068 |
b1e01c26dd13ce399d4bc494f921eb82dde5d290 | 2,183 | h | C | Linux/Sources/Application/Rules/Dialog/CUploadScriptDialog.h | mulberry-mail/mulberry4-client | cdaae15c51dd759110b4fbdb2063d0e3d5202103 | [
"ECL-2.0",
"Apache-2.0"
] | 12 | 2015-04-21T16:10:43.000Z | 2021-11-05T13:41:46.000Z | Linux/Sources/Application/Rules/Dialog/CUploadScriptDialog.h | mulberry-mail/mulberry4-client | cdaae15c51dd759110b4fbdb2063d0e3d5202103 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2015-11-02T13:32:11.000Z | 2019-07-10T21:11:21.000Z | Linux/Sources/Application/Rules/Dialog/CUploadScriptDialog.h | mulberry-mail/mulberry4-client | cdaae15c51dd759110b4fbdb2063d0e3d5202103 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2015-01-12T08:49:12.000Z | 2021-03-27T09:11:10.000Z | /*
Copyright (c) 2007 Cyrus Daboo. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Header for CUploadScriptDialog class
#ifndef __CUPLOADSCRIPTDIALOG__MULBERRY__
#define __CUPLOADSCRIPTDIALOG__MULBERRY__
#include "CDialogDirector.h"
#include "CINETAccount.h"
#include "cdstring.h"
#include "HPopupMenu.h"
// Constants
// Type
struct SCreateMailbox
{
cdstring new_name;
cdstring parent;
cdstring account;
CINETAccount::EINETServerType account_type;
bool use_wd;
bool subscribe;
bool directory;
};
class JXTextButton;
class JXTextRadioButton;
class JXRadioGroup;
class CUploadScriptDialog : public CDialogDirector
{
public:
struct SUploadScript
{
bool mFile;
unsigned long mAccountIndex;
bool mUpload;
bool mActivate;
};
enum {eSaveFile, eSaveServer};
enum {eUploadOnly, eUploadActivate, eDelete};
CUploadScriptDialog(JXDirector* supervisor);
virtual ~CUploadScriptDialog();
static bool PoseDialog(SUploadScript& details);
protected:
static SUploadScript sLastChoice;
// begin JXLayout
JXRadioGroup* mSaveGroup;
JXTextRadioButton* mFileBtn;
JXRadioGroup* mUploadGroup;
JXTextRadioButton* mServerBtn;
HPopupMenu* mAccountPopup;
JXTextRadioButton* mUploadOnlyBtn;
JXTextRadioButton* mUploadActivateBtn;
JXTextRadioButton* mDeleteBtn;
JXTextButton* mOKBtn;
JXTextButton* mCancelBtn;
// end JXLayout
virtual void OnCreate();
virtual void Receive(JBroadcaster* sender, const Message& message);
void SetDetails(SUploadScript& create);
void GetDetails(SUploadScript& result);
void InitAccountMenu();
};
#endif
| 22.978947 | 76 | 0.746221 |
d3a7693c76f89f265865138755475318e783193b | 2,427 | ps1 | PowerShell | tasks/winbuildscripts/unittests.ps1 | kaarolch/datadog-agent | 88d2d9bdc262f3dba3f2b222557f67026bc6f59b | [
"Apache-2.0"
] | null | null | null | tasks/winbuildscripts/unittests.ps1 | kaarolch/datadog-agent | 88d2d9bdc262f3dba3f2b222557f67026bc6f59b | [
"Apache-2.0"
] | null | null | null | tasks/winbuildscripts/unittests.ps1 | kaarolch/datadog-agent | 88d2d9bdc262f3dba3f2b222557f67026bc6f59b | [
"Apache-2.0"
] | null | null | null | $Password = ConvertTo-SecureString "dummyPW_:-gch6Rejae9" -AsPlainText -Force
New-LocalUser -Name "ddagentuser" -Description "Test user for the secrets feature on windows." -Password $Password
$Env:Python2_ROOT_DIR=$Env:TEST_EMBEDDED_PY2
$Env:Python3_ROOT_DIR=$Env:TEST_EMBEDDED_PY3
if ($Env:NEW_BUILDER -eq "true") {
if ($Env:TARGET_ARCH -eq "x64") {
& ridk enable
}
& $Env:Python3_ROOT_DIR\python.exe -m pip install -r requirements.txt
}
$Env:BUILD_ROOT=(Get-Location).Path
$Env:PATH="$Env:BUILD_ROOT\dev\lib;$Env:GOPATH\bin;$Env:Python2_ROOT_DIR;$Env:Python2_ROOT_DIR\Scripts;$Env:Python3_ROOT_DIR;$Env:Python3_ROOT_DIR\Scripts;$Env:PATH"
& $Env:Python3_ROOT_DIR\python.exe -m pip install PyYAML==5.3
$archflag = "x64"
if ($Env:TARGET_ARCH -eq "x86") {
$archflag = "x86"
}
& go get gopkg.in/yaml.v2
& inv -e deps --verbose
& inv -e rtloader.make --python-runtimes="$Env:PY_RUNTIMES" --install-prefix=$Env:BUILD_ROOT\dev --cmake-options='-G \"Unix Makefiles\"' --arch $archflag
$err = $LASTEXITCODE
Write-Host Build result is $err
if($err -ne 0){
Write-Host -ForegroundColor Red "rtloader make failed $err"
[Environment]::Exit($err)
}
& inv -e rtloader.install
$err = $LASTEXITCODE
Write-Host rtloader install result is $err
if($err -ne 0){
Write-Host -ForegroundColor Red "rtloader install failed $err"
[Environment]::Exit($err)
}
# & inv -e rtloader.format --raise-if-changed
# $err = $LASTEXITCODE
# Write-Host Format result is $err
# if($err -ne 0){
# Write-Host -ForegroundColor Red "rtloader format failed $err"
# [Environment]::Exit($err)
# }
& inv -e rtloader.test
$err = $LASTEXITCODE
Write-Host rtloader test result is $err
if($err -ne 0){
Write-Host -ForegroundColor Red "rtloader test failed $err"
[Environment]::Exit($err)
}
if ($Env:NEW_BUILDER -eq "true"){
& inv -e test --profile --cpus 4 --arch $archflag --python-runtimes="$Env:PY_RUNTIMES" --python-home-2=$Env:Python2_ROOT_DIR --python-home-3=$Env:Python3_ROOT_DIR --rtloader-root=$Env:BUILD_ROOT\rtloader
} else {
& inv -e test --race --profile --cpus 4 --arch $archflag --python-runtimes="$Env:PY_RUNTIMES" --python-home-2=$Env:Python2_ROOT_DIR --python-home-3=$Env:Python3_ROOT_DIR --rtloader-root=$Env:BUILD_ROOT\rtloader
}
$err = $LASTEXITCODE
Write-Host Test result is $err
if($err -ne 0){
Write-Host -ForegroundColor Red "test failed $err"
[Environment]::Exit($err)
}
| 34.671429 | 214 | 0.710342 |
e1fcb795aaf7b92172a4bd6ddd027df3ab2203fc | 3,333 | sql | SQL | dmsdev/get_psutil_cpu.sql | PNNL-Comp-Mass-Spec/DBSchema_PgSQL_DMS | ef47cf47622522159ed1a670dfa14d020482cf6a | [
"Apache-2.0"
] | 3 | 2020-02-15T13:31:39.000Z | 2022-01-30T20:59:54.000Z | dmsdev/get_psutil_cpu.sql | PNNL-Comp-Mass-Spec/DBSchema_PgSQL_DMS | ef47cf47622522159ed1a670dfa14d020482cf6a | [
"Apache-2.0"
] | null | null | null | dmsdev/get_psutil_cpu.sql | PNNL-Comp-Mass-Spec/DBSchema_PgSQL_DMS | ef47cf47622522159ed1a670dfa14d020482cf6a | [
"Apache-2.0"
] | null | null | null | --
-- Name: get_psutil_cpu(); Type: FUNCTION; Schema: public; Owner: d3l243
--
CREATE OR REPLACE FUNCTION public.get_psutil_cpu(OUT cpu_utilization double precision, OUT load_1m_norm double precision, OUT load_1m double precision, OUT load_5m_norm double precision, OUT load_5m double precision, OUT "user" double precision, OUT system double precision, OUT idle double precision, OUT iowait double precision, OUT irqs double precision, OUT other double precision) RETURNS record
LANGUAGE plpython3u SECURITY DEFINER
AS $$
from os import getloadavg
from psutil import cpu_times_percent, cpu_percent, cpu_count
from threading import Thread
class GetCpuPercentThread(Thread):
def __init__(self, interval_seconds):
self.interval_seconds = interval_seconds
self.cpu_utilization_info = None
super(GetCpuPercentThread, self).__init__()
def run(self):
self.cpu_utilization_info = cpu_percent(self.interval_seconds)
t = GetCpuPercentThread(0.5)
t.start()
ct = cpu_times_percent(0.5)
la = getloadavg()
t.join()
return t.cpu_utilization_info, la[0] / cpu_count(), la[0], la[1] / cpu_count(), la[1], ct.user, ct.system, ct.idle, ct.iowait, ct.irq + ct.softirq, ct.steal + ct.guest + ct.guest_nice
$$;
ALTER FUNCTION public.get_psutil_cpu(OUT cpu_utilization double precision, OUT load_1m_norm double precision, OUT load_1m double precision, OUT load_5m_norm double precision, OUT load_5m double precision, OUT "user" double precision, OUT system double precision, OUT idle double precision, OUT iowait double precision, OUT irqs double precision, OUT other double precision) OWNER TO d3l243;
--
-- Name: FUNCTION get_psutil_cpu(OUT cpu_utilization double precision, OUT load_1m_norm double precision, OUT load_1m double precision, OUT load_5m_norm double precision, OUT load_5m double precision, OUT "user" double precision, OUT system double precision, OUT idle double precision, OUT iowait double precision, OUT irqs double precision, OUT other double precision); Type: COMMENT; Schema: public; Owner: d3l243
--
COMMENT ON FUNCTION public.get_psutil_cpu(OUT cpu_utilization double precision, OUT load_1m_norm double precision, OUT load_1m double precision, OUT load_5m_norm double precision, OUT load_5m double precision, OUT "user" double precision, OUT system double precision, OUT idle double precision, OUT iowait double precision, OUT irqs double precision, OUT other double precision) IS 'created for pgwatch2';
--
-- Name: FUNCTION get_psutil_cpu(OUT cpu_utilization double precision, OUT load_1m_norm double precision, OUT load_1m double precision, OUT load_5m_norm double precision, OUT load_5m double precision, OUT "user" double precision, OUT system double precision, OUT idle double precision, OUT iowait double precision, OUT irqs double precision, OUT other double precision); Type: ACL; Schema: public; Owner: d3l243
--
GRANT ALL ON FUNCTION public.get_psutil_cpu(OUT cpu_utilization double precision, OUT load_1m_norm double precision, OUT load_1m double precision, OUT load_5m_norm double precision, OUT load_5m double precision, OUT "user" double precision, OUT system double precision, OUT idle double precision, OUT iowait double precision, OUT irqs double precision, OUT other double precision) TO pgwatch2;
| 68.020408 | 416 | 0.782478 |
53df4101e63f61e24a99d64ea64ec3f05c0aba5e | 404 | java | Java | src/backend/math/ProductNode.java | matt-mosca/slogo | d18daea1b4b52e89e795eda71c045a4b569a5ef8 | [
"MIT"
] | null | null | null | src/backend/math/ProductNode.java | matt-mosca/slogo | d18daea1b4b52e89e795eda71c045a4b569a5ef8 | [
"MIT"
] | null | null | null | src/backend/math/ProductNode.java | matt-mosca/slogo | d18daea1b4b52e89e795eda71c045a4b569a5ef8 | [
"MIT"
] | null | null | null | package backend.math;
import backend.VarArgNode;
import java.util.Arrays;
/**
* Syntax node for computing the product of all its operands, of which there can be an arbitrary number.
*
* @author Ben Schwennesen
*/
public class ProductNode extends VarArgNode {
@Override
public double executeSelf(double... operands) {
return Arrays.stream(operands).reduce(1, (a,b) -> a*b);
}
}
| 19.238095 | 104 | 0.70297 |
6466d5d953f8e727df141d562ed74ba04ba5416b | 3,240 | dart | Dart | lib/main.dart | jerolimov/yo-flutter | 5dfc6893e446374a5fe922c349140a98f4df6ec4 | [
"Apache-2.0"
] | null | null | null | lib/main.dart | jerolimov/yo-flutter | 5dfc6893e446374a5fe922c349140a98f4df6ec4 | [
"Apache-2.0"
] | null | null | null | lib/main.dart | jerolimov/yo-flutter | 5dfc6893e446374a5fe922c349140a98f4df6ec4 | [
"Apache-2.0"
] | null | null | null | import 'package:flutter/material.dart';
import 'package:scoped_model/scoped_model.dart';
import 'session_model.dart';
import 'friends_model.dart';
import 'person.dart';
Future<void> main() async {
var sessionModel = SessionModel();
runApp(ScopedModel<SessionModel>(
model: sessionModel,
child: ScopedModel<FriendsModel>(
model: FriendsModel(sessionModel),
child: YoApp(),
),
));
}
class YoApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Yo!',
theme: ThemeData(
brightness: Brightness.dark,
accentColor: Color(0xFFF67280),
),
home: Scaffold(
body: ScopedModelDescendant<SessionModel>(
builder: (BuildContext context, Widget child, SessionModel sessionModel) {
if (!sessionModel.initialized) {
return LoadingPage();
} else if (!sessionModel.isUserLoggedIn) {
return LoginPage();
} else {
return FriendListPage();
}
}),
));
}
}
List<Color> _colors = [
Color(0xFFF8B195),
Color(0xFFF67280),
Color(0xFFC06C84),
Color(0xFF6C5B7B),
Color(0xFF355C7D),
Color(0xFF34495D),
];
class LoadingPage extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Container(
child: Center(child: Text("Loading...")),
);
}
}
class LoginPage extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Container(
color: Colors.red,
child: SafeArea(
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
children: <Widget>[
Expanded(
child: Center(
child: Text("YO!", style: TextStyle(fontSize: 72)))),
RaisedButton(
child: Text("Google Login"),
onPressed: () {
print("Login...");
ScopedModel.of<SessionModel>(context).googleLogin();
},
)
],
),
),
);
}
}
class FriendListPage extends StatelessWidget {
@override
Widget build(BuildContext context) {
return ScopedModelDescendant<FriendsModel>(
builder: (BuildContext context, Widget child, FriendsModel friendsModel) {
return ListView.builder(
itemCount: friendsModel.friends.length,
itemBuilder: (BuildContext context, int index) {
return Container(
color: _colors[index % _colors.length],
child: ListTile(
contentPadding: EdgeInsets.all(10),
leading: ClipRRect(
child: Image.network(
friendsModel.friends[index].photoUrl,
width: 50,
height: 50,
),
borderRadius: BorderRadius.circular(25),
),
title: Text(friendsModel.friends[index].name),
onTap: () {
friendsModel.sendYo(friendsModel.friends[index]);
},
),
);
},
);
}
);
}
}
| 27.226891 | 88 | 0.551852 |
4035236012b28c475e949ea7b823a940d73dbb36 | 3,643 | py | Python | tests/proto_dir/exonum_modules/main/exonum/key_value_sequence_pb2.py | aleksuss/exonum-python-client | 5e7ff330440cd2fe2c5b707a0cff46048b311ea2 | [
"Apache-2.0"
] | 5 | 2019-10-04T13:16:06.000Z | 2022-01-06T08:53:28.000Z | tests/proto_dir/exonum_modules/main/exonum/key_value_sequence_pb2.py | aleksuss/exonum-python-client | 5e7ff330440cd2fe2c5b707a0cff46048b311ea2 | [
"Apache-2.0"
] | 30 | 2019-11-28T07:06:36.000Z | 2022-02-11T02:59:05.000Z | tests/proto_dir/exonum_modules/main/exonum/key_value_sequence_pb2.py | aleksuss/exonum-python-client | 5e7ff330440cd2fe2c5b707a0cff46048b311ea2 | [
"Apache-2.0"
] | 11 | 2019-10-03T10:58:42.000Z | 2022-01-06T07:29:12.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: exonum/key_value_sequence.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='exonum/key_value_sequence.proto',
package='exonum',
syntax='proto3',
serialized_options=b'\n\030com.exonum.messages.core',
serialized_pb=b'\n\x1f\x65xonum/key_value_sequence.proto\x12\x06\x65xonum\"&\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\"5\n\x10KeyValueSequence\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.exonum.KeyValueB\x1a\n\x18\x63om.exonum.messages.coreb\x06proto3'
)
_KEYVALUE = _descriptor.Descriptor(
name='KeyValue',
full_name='exonum.KeyValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='exonum.KeyValue.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='exonum.KeyValue.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=43,
serialized_end=81,
)
_KEYVALUESEQUENCE = _descriptor.Descriptor(
name='KeyValueSequence',
full_name='exonum.KeyValueSequence',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entries', full_name='exonum.KeyValueSequence.entries', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=83,
serialized_end=136,
)
_KEYVALUESEQUENCE.fields_by_name['entries'].message_type = _KEYVALUE
DESCRIPTOR.message_types_by_name['KeyValue'] = _KEYVALUE
DESCRIPTOR.message_types_by_name['KeyValueSequence'] = _KEYVALUESEQUENCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
KeyValue = _reflection.GeneratedProtocolMessageType('KeyValue', (_message.Message,), {
'DESCRIPTOR' : _KEYVALUE,
'__module__' : 'exonum.key_value_sequence_pb2'
# @@protoc_insertion_point(class_scope:exonum.KeyValue)
})
_sym_db.RegisterMessage(KeyValue)
KeyValueSequence = _reflection.GeneratedProtocolMessageType('KeyValueSequence', (_message.Message,), {
'DESCRIPTOR' : _KEYVALUESEQUENCE,
'__module__' : 'exonum.key_value_sequence_pb2'
# @@protoc_insertion_point(class_scope:exonum.KeyValueSequence)
})
_sym_db.RegisterMessage(KeyValueSequence)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 31.136752 | 298 | 0.751578 |
56defc31107935e455e201d30492777023d2d83b | 713 | ts | TypeScript | src/libs/createCtx.ts | HongodoriDEVs/cappuccino-app | 824cf2295cebae85b762b6c7a21cbbe94bf1d0ee | [
"MIT"
] | 16 | 2021-03-30T09:22:05.000Z | 2022-03-06T06:43:53.000Z | src/libs/createCtx.ts | HongodoriDEVs/cappuccino-app | 824cf2295cebae85b762b6c7a21cbbe94bf1d0ee | [
"MIT"
] | 110 | 2021-04-03T10:06:17.000Z | 2022-02-27T09:10:04.000Z | src/libs/createCtx.ts | HongodoriDEVs/cappuccino-app | 824cf2295cebae85b762b6c7a21cbbe94bf1d0ee | [
"MIT"
] | 1 | 2022-03-08T07:16:01.000Z | 2022-03-08T07:16:01.000Z | import { Provider, createContext, useContext } from "react";
/**
* undefined を許さない createContext の wrapper
* <br>
* [【React】デフォルト値もundefinedチェックもいらないcreateContext【Typescript】 \- Qiita](https://qiita.com/johnmackay150/items/88654e5064290c24a32a)
*
* @return `[useCtx, CtxProvider]`
*
* @throws useCtx must be inside a Provider with a value
*/
export default function createCtx<ContextType>(): [
() => ContextType,
Provider<ContextType | undefined>
] {
const ctx = createContext<ContextType | undefined>(undefined);
function useCtx() {
const c = useContext(ctx);
if (!c) throw new Error("useCtx must be inside a Provider with a value");
return c;
}
return [useCtx, ctx.Provider];
}
| 29.708333 | 131 | 0.706872 |
d09b9fdff2a771acd791000c748a4ffff128d506 | 5,224 | sql | SQL | DMS5/UpdateMaterialLocations.sql | viswaratha12/dbwarden | 3931accda4fb401d21b6cb272fe3d6959915ceb8 | [
"Apache-2.0"
] | 2 | 2018-04-03T05:18:15.000Z | 2020-04-23T04:00:25.000Z | DMS5/UpdateMaterialLocations.sql | viswaratha12/dbwarden | 3931accda4fb401d21b6cb272fe3d6959915ceb8 | [
"Apache-2.0"
] | null | null | null | DMS5/UpdateMaterialLocations.sql | viswaratha12/dbwarden | 3931accda4fb401d21b6cb272fe3d6959915ceb8 | [
"Apache-2.0"
] | 4 | 2016-05-14T17:56:55.000Z | 2020-01-23T12:02:25.000Z | /****** Object: StoredProcedure [dbo].[UpdateMaterialLocations] ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE Procedure dbo.UpdateMaterialLocations
/****************************************************
**
** Desc:
** Change properties of given set of material locations
**
** @locationList will look like this:
**
** <r n="80B.na.na.na.na" i="425" a="Status" v="Active" />
** <r n="80B.2.na.na.na" i="439" a="Status" v="Active" />
** <r n="80B.3.3.na.na" i="558" a="Status" v="Active" />
**
** Return values: 0: success, otherwise, error code
**
** Parameters:
**
** Auth: grk
** Date: 06/02/2013 grk - initial release
** 06/03/2013 grk - added action attribute to XML
** 06/06/2013 grk - added code to update status
** 02/23/2016 mem - Add set XACT_ABORT on
** 11/08/2016 mem - Use GetUserLoginWithoutDomain to obtain the user's network login
** 11/10/2016 mem - Pass '' to GetUserLoginWithoutDomain
** 04/12/2017 mem - Log exceptions to T_Log_Entries
** 06/16/2017 mem - Restrict access using VerifySPAuthorized
** 08/01/2017 mem - Use THROW if not authorized
**
*****************************************************/
(
@locationList text,
@message varchar(512) OUTPUT,
@callingUser varchar(128) = '',
@infoOnly tinyint = 0 -- Set to 1 to preview the changes that would be made
)
As
Set XACT_ABORT, nocount on
declare @myError int = 0
declare @myRowCount int = 0
Declare @Msg2 varchar(512)
DECLARE @xml AS xml
SET CONCAT_NULL_YIELDS_NULL ON
SET ANSI_PADDING ON
SET @message = ''
---------------------------------------------------
-- Verify that the user can execute this procedure from the given client host
---------------------------------------------------
Declare @authorized tinyint = 0
Exec @authorized = VerifySPAuthorized 'UpdateMaterialLocations', @raiseError = 1
If @authorized = 0
Begin
THROW 51000, 'Access denied', 1;
End
BEGIN TRY
-----------------------------------------------------------
-- Validate the inputs
-----------------------------------------------------------
If IsNull(@callingUser, '') = ''
SET @callingUser = dbo.GetUserLoginWithoutDomain('')
Set @infoOnly = IsNull(@infoOnly, 0)
-----------------------------------------------------------
-- temp table to hold factors
-----------------------------------------------------------
--
CREATE TABLE #TMP (
Location VARCHAR(256),
ID VARCHAR(256) NULL,
[Action] VARCHAR(256) NULL,
[Value] VARCHAR(256) NULL,
[Old_Value] VARCHAR(256) NULL
)
-----------------------------------------------------------
-- Copy @locationList text variable into the XML variable
-----------------------------------------------------------
SET @xml = @locationList
-----------------------------------------------------------
-- populate temp table with new parameters
-----------------------------------------------------------
--
INSERT INTO #TMP
(Location, ID, [Action], [Value])
SELECT
xmlNode.value('@n', 'nvarchar(256)') Location,
xmlNode.value('@i', 'nvarchar(256)') ID,
xmlNode.value('@a', 'nvarchar(256)') [Action],
xmlNode.value('@v', 'nvarchar(256)') [Value]
FROM @xml.nodes('//r') AS R(xmlNode)
--
SELECT @myError = @@error, @myRowCount = @@rowcount
--
if @myError <> 0
RAISERROR ('Error reading in location list', 11, 9)
-----------------------------------------------------------
-- Get current status values
-----------------------------------------------------------
UPDATE [#TMP]
SET Old_Value = TML.Status
FROM T_Material_Locations AS TML
INNER JOIN [#TMP] ON TML.Tag = [#TMP].Location
WHERE ( [#TMP].Action = 'Status' )
-----------------------------------------------------------
-- Update status values that have changed
-----------------------------------------------------------
IF @infoOnly = 0
BEGIN
UPDATE T_Material_Locations
SET Status = [#TMP].Value
FROM [#TMP]
INNER JOIN T_Material_Locations ON T_Material_Locations.Tag = [#TMP].Location
WHERE ( [#TMP].Action = 'Status' )
AND ( NOT ( [#TMP].Value = ISNULL([#TMP].Old_Value, '') ) )
END
-----------------------------------------------------------
IF @infoOnly > 0
BEGIN
SELECT * FROM #TMP
END
---------------------------------------------------
-- Log SP usage
---------------------------------------------------
/*
IF @infoOnly = 0
BEGIN
Declare @UsageMessage varchar(512)
Set @UsageMessage = Cast(@locationList as varchar(512))
Exec PostUsageLogEntry 'UpdateMaterialLocations', @UsageMessage
END
*/
END TRY
BEGIN CATCH
EXEC FormatErrorMessage @message OUTPUT, @myError OUTPUT
Exec PostLogEntry 'Error', @message, 'UpdateMaterialLocations'
END CATCH
RETURN @myError
GO
GRANT VIEW DEFINITION ON [dbo].[UpdateMaterialLocations] TO [DDL_Viewer] AS [dbo]
GO
GRANT EXECUTE ON [dbo].[UpdateMaterialLocations] TO [DMS_SP_User] AS [dbo]
GO
GRANT EXECUTE ON [dbo].[UpdateMaterialLocations] TO [DMS2_SP_User] AS [dbo]
GO
| 31.095238 | 87 | 0.5067 |
92a66f5c4992b9af5267ac8b5d6edfa196f66486 | 1,924 | dart | Dart | lib/src/ui/c_controller_impl/pineaple_pos_controller_impl.dart | Root-101/pineaple_pos_client | cfcbf7f1f7102175a3c0e5a9d4f43729166d9a3a | [
"Apache-2.0"
] | 2 | 2022-03-22T23:47:50.000Z | 2022-03-24T15:15:54.000Z | lib/src/ui/c_controller_impl/pineaple_pos_controller_impl.dart | Root-101/pineaple_pos_client | cfcbf7f1f7102175a3c0e5a9d4f43729166d9a3a | [
"Apache-2.0"
] | null | null | null | lib/src/ui/c_controller_impl/pineaple_pos_controller_impl.dart | Root-101/pineaple_pos_client | cfcbf7f1f7102175a3c0e5a9d4f43729166d9a3a | [
"Apache-2.0"
] | null | null | null | import 'package:get/get.dart';
import 'package:pineaple_pos_client/clean/controller/default_crud_controller_async.dart';
import 'package:pineaple_pos_client/pineaple_exporter.dart';
import 'package:pull_to_refresh/pull_to_refresh.dart';
class PineaplePosControllerImpl
extends DefaultCRUDControllerAsync<PineaplePosDomain, PineaplePosUseCase>
implements PineaplePosController {
PineaplePosControllerImpl({
required PineaplePosUseCase posUseCase,
}) : super(useCase: posUseCase);
/// A controller to controll header and footer state, it can trigger driving request Refresh.
@override
final RefreshController refreshController = RefreshController(
initialRefresh: true,
);
//store the loaded elements in the list
//to be able to call it with out the future in findAll
@override
List<PineaplePosDomain> findAllLoaded = [];
//store the current count of the list
//for use it in the amount of shined tiles
@override
int loadedCount = 0;
/// Boolean to control if the app is refreshing.
@override
bool get isRefreshing => refreshController.isRefresh;
/// The function to execute when the user refresh the app.
@override
Future<void> onRefresh() async {
update(); //update si the shine change
findAllLoaded = await useCase.findAll(); //store the list
loadedCount = findAllLoaded.length; //store the amounts of tiles
await Future.delayed(const Duration(milliseconds: 3000));
refreshController.refreshCompleted();
update(); //actualiza con la lista de verdad
}
/// The function to execute when the user load the app.
@override
Future<void> onLoading() async {
await Future.delayed(const Duration(milliseconds: 250));
refreshController.loadComplete();
}
List<PineaplePosDomain> findByArea(PineapleAreaDomain areaDomain) {
return Get.find<PineaplePosUseCase>()
.findByAreaCache(findAllLoaded, areaDomain);
}
}
| 32.610169 | 95 | 0.752079 |
86807f971a8db1f95bcb23866165cf23594b5856 | 1,418 | go | Go | main.go | Dewberry/mcat-hms | 6caa76b4beb66442d1774774aa7cfc3c76c9f9ee | [
"MIT"
] | null | null | null | main.go | Dewberry/mcat-hms | 6caa76b4beb66442d1774774aa7cfc3c76c9f9ee | [
"MIT"
] | 1 | 2022-03-08T20:34:33.000Z | 2022-03-08T20:34:33.000Z | main.go | Dewberry/mcat-hms | 6caa76b4beb66442d1774774aa7cfc3c76c9f9ee | [
"MIT"
] | null | null | null | package main
// @title HMS MCAT API
// @version 1.0
// @description API for the HMS MCAT
// @termsOfService http://swagger.io/terms/
// @contact.name API Support
// @contact.email slawler@dewberry.com
// @host localhost:5900
import (
"app/config"
// _ "app/docs"
"app/handlers"
"app/pgdb"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
// echoSwagger "github.com/swaggo/echo-swagger"
)
func main() {
// Connect to backend services
appConfig := config.Init()
// Instantiate echo
e := echo.New()
e.Use(middleware.Logger())
e.Use(middleware.Recover())
// HealthCheck
e.GET("/ping", handlers.Ping(appConfig))
// Swagger
// e.GET("/swagger/*", echoSwagger.WrapHandler)
// hms endpoints
e.GET("/index", handlers.Index(appConfig.FileStore))
e.GET("/isamodel", handlers.IsAModel(appConfig.FileStore))
e.GET("/isgeospatial", handlers.IsGeospatial(appConfig.FileStore))
e.GET("/modeltype", handlers.ModelType(appConfig.FileStore))
e.GET("/modelversion", handlers.ModelVersion(appConfig.FileStore))
e.GET("/geospatialdata", handlers.GeospatialData(appConfig.FileStore))
// pgdb endpoints
e.POST("/upsert/model", pgdb.UpsertHMSModel(appConfig))
// e.POST("/upsert/geometry", pgdb.UpsertHMSGeometry(appConfig))
e.POST("/refresh", pgdb.RefreshHMSViews(appConfig.DB))
e.POST("/vacuum", pgdb.VacuumHMSViews(appConfig.DB))
e.Logger.Fatal(e.Start(appConfig.Address()))
}
| 26.259259 | 71 | 0.722144 |
53bfb6d5dc6705ff01378fb0a712b80ff17486f5 | 1,430 | java | Java | Classwork/Object-Oriented Programming/Chapter4/src/Calculator.java | narlock/java-archive | 4910d42d54f3d3dfb333727a70d8a9d9ffa5b03d | [
"MIT"
] | null | null | null | Classwork/Object-Oriented Programming/Chapter4/src/Calculator.java | narlock/java-archive | 4910d42d54f3d3dfb333727a70d8a9d9ffa5b03d | [
"MIT"
] | null | null | null | Classwork/Object-Oriented Programming/Chapter4/src/Calculator.java | narlock/java-archive | 4910d42d54f3d3dfb333727a70d8a9d9ffa5b03d | [
"MIT"
] | null | null | null | /*
* Calculator Example
* By: Anthony Narlock
* Date: January 28th, 2020
*/
import java.util.Scanner;
public class Calculator {
public static double getSum(double num1, double num2) {
return num1 + num2;
}
public static double getSubSum(double num1, double num2) {
return num1 - num2;
}
public static double getProduct(double num1, double num2) {
return num1 * num2;
}
public static double getQuotient(double num1, double num2) {
return num1 / num2;
}
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
double num1, num2, result;
String operator;
System.out.print("Enter first number: ");
num1 = input.nextDouble();
System.out.print("Enter operator: ");
operator = input.next();
System.out.print("Enter second number: ");
num2 = input.nextDouble();
switch(operator) {
case "+":
result = getSum(num1,num2);
System.out.println(num1 + " + " + num2 + " = " + result);
break;
case "-":
result = getSubSum(num1,num2);
System.out.println(num1 + " - " + num2 + " = " + result);
break;
case "*":
result = getProduct(num1,num2);
System.out.println(num1 + " * " + num2 + " = " + result);
break;
case "/":
result = getQuotient(num1,num2);
System.out.println(num1 + " / " + num2 + " = " + result);
break;
default:
System.err.println("Invalid operator.");
break;
}
input.close();
}
}
| 20.724638 | 61 | 0.622378 |
acd7b50fdbfad21489eedbe7326e6432ff61d966 | 470 | asm | Assembly | session_02/02-array4/array4.asm | DigiOhhh/LabArchitettura2-2017-2018 | da34b9a75ab2945ac70d5cbf69395d0db5172f6f | [
"MIT"
] | 1 | 2019-03-06T13:26:10.000Z | 2019-03-06T13:26:10.000Z | session_02/02-array4/array4.asm | DigiOhhh/LabArchitettura2-2017-2018 | da34b9a75ab2945ac70d5cbf69395d0db5172f6f | [
"MIT"
] | null | null | null | session_02/02-array4/array4.asm | DigiOhhh/LabArchitettura2-2017-2018 | da34b9a75ab2945ac70d5cbf69395d0db5172f6f | [
"MIT"
] | 1 | 2019-03-06T13:25:28.000Z | 2019-03-06T13:25:28.000Z | .data
A: .space 16 # Alloca 16 bytes per A
.text
.globl main
main:
la $t0, A # Scrive base address di A in $t0
addi $t1, $zero, 0 # $t1 = 0
sw $t1, 0($t0) # A[0] = 0
addi $t1, $zero, 4 # $t1 = 4
addi $t0, $t0, 4 # indirizzo di A[1]
sw $t1, 0($t0) # A[1] = 4
addi $t1, $zero, 8 # $t1 = 8
addi $t0, $t0, 4 # indirizzo di A[2]
sw $t1, 0($t0) # A[2] = 8
addi $t1, $zero, 12 # $t1 = 12
addi $t0, $t0, 4 # indirizzo di A[3]
sw $t1, 0($t0) # A[3] = 12
| 22.380952 | 45 | 0.510638 |
04f70fea466ae8be8d81f5d34945dae7f33931e9 | 4,293 | java | Java | adventure-editor/src/main/java/com/bladecoder/engineeditor/ui/components/ActorAnimationInputPanel.java | leereilly/bladecoder-adventure-engine | f696e84821cf26f6ccb2e3b2cc732f41102c9598 | [
"Apache-2.0"
] | 1 | 2021-05-12T06:08:03.000Z | 2021-05-12T06:08:03.000Z | adventure-editor/src/main/java/com/bladecoder/engineeditor/ui/components/ActorAnimationInputPanel.java | leereilly/bladecoder-adventure-engine | f696e84821cf26f6ccb2e3b2cc732f41102c9598 | [
"Apache-2.0"
] | null | null | null | adventure-editor/src/main/java/com/bladecoder/engineeditor/ui/components/ActorAnimationInputPanel.java | leereilly/bladecoder-adventure-engine | f696e84821cf26f6ccb2e3b2cc732f41102c9598 | [
"Apache-2.0"
] | null | null | null | /*******************************************************************************
* Copyright 2014 Rafael Garcia Moreno.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.bladecoder.engineeditor.ui.components;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.bladecoder.engine.actions.ActorAnimationRef;
import com.bladecoder.engine.anim.AnimationDesc;
import com.bladecoder.engine.model.BaseActor;
import com.bladecoder.engine.model.SpriteActor;
import com.bladecoder.engineeditor.Ctx;
public class ActorAnimationInputPanel extends InputPanel {
EditableSelectBox<String> animation;
EditableSelectBox<String> actor;
Table panel;
ActorAnimationInputPanel(Skin skin, String title, String desc,
boolean mandatory, String defaultValue) {
panel = new Table(skin);
animation = new EditableSelectBox<>(skin);
actor = new EditableSelectBox<>(skin);
panel.add(new Label(" Actor ", skin));
panel.add(actor);
panel.add(new Label(" Animation ", skin));
panel.add(animation);
ArrayList<String> values = new ArrayList<String>();
// values.add("");
for (BaseActor a: Ctx.project.getSelectedScene().getActors().values()) {
if (a instanceof SpriteActor) {
values.add(a.getId());
}
}
actor.addListener(new ChangeListener() {
@Override
public void changed(ChangeEvent event, Actor actor) {
actorSelected();
}
});
init(skin, title, desc, panel, mandatory, defaultValue);
String[] array = values.toArray(new String[values.size()]);
Arrays.sort(array);
actor.setItems(array);
if (values.size() > 0) {
if (defaultValue != null)
setText(defaultValue);
else
actor.setSelected("");
}
}
private void actorSelected() {
String s = actor.getSelected();
SpriteActor a = null;
if(Ctx.project.getSelectedActor() instanceof SpriteActor)
a = (SpriteActor) Ctx.project.getSelectedActor();
ArrayList<String> values = new ArrayList<String>();
if (s != null && !s.isEmpty()) {
a = (SpriteActor)Ctx.project.getSelectedScene().getActor(s, false);
}
if (a != null) {
HashMap<String, AnimationDesc> animations = a.getRenderer().getAnimations();
if (!isMandatory()) {
values.add("");
}
for (AnimationDesc anim:animations.values()) {
values.add(anim.id);
String flipped = AnimationDesc.getFlipId(anim.id);
if (!flipped.isEmpty()) {
values.add(flipped);
}
}
}
String[] array = values.toArray(new String[values.size()]);
Arrays.sort(array);
animation.setItems(array);
if (values.size() > 0)
animation.setSelected("");
}
public String getText() {
String selectedActor = !actor.getSelected().isEmpty() ? actor.getSelected():Ctx.project.getSelectedActor().getId();
return (new ActorAnimationRef(selectedActor, animation.getSelected())).toString();
}
public void setText(String s) {
ActorAnimationRef aa = new ActorAnimationRef(s);
actor.setSelected(aa.getActorId() == null?"":aa.getActorId());
actorSelected();
animation.setSelected(aa.getAnimationId());
}
@Override
public boolean validateField() {
ActorAnimationRef a = new ActorAnimationRef(getText());
if(isMandatory()) {
if(a.getActorId() == null || a.getActorId().trim().isEmpty() ||
a.getAnimationId() == null || a.getAnimationId().trim().isEmpty()) {
setError(true);
return false;
}
}
setError(false);
return true;
}
}
| 27.696774 | 117 | 0.676683 |
81c1c493ece3de263c969d6b612ab1f852df3ef1 | 394 | html | HTML | NewsPaper/templates/news/post_edit.html | alsigna/skillfactory_fpw | 1f5b59cddc7d53a826ea623b3191e6a9bdc056be | [
"MIT"
] | null | null | null | NewsPaper/templates/news/post_edit.html | alsigna/skillfactory_fpw | 1f5b59cddc7d53a826ea623b3191e6a9bdc056be | [
"MIT"
] | null | null | null | NewsPaper/templates/news/post_edit.html | alsigna/skillfactory_fpw | 1f5b59cddc7d53a826ea623b3191e6a9bdc056be | [
"MIT"
] | null | null | null | {% extends "default.html" %}
{% block title %}
Пост
{% endblock title %}
{% block content %}
<h1>Пост</h1>
<form action="" method="POST">
{% csrf_token %}
{{ form.as_p }}
<!-- <input type="submit" value="Save"> -->
<a class="btn btn-secondary" href="{{ return_url }}">Cancel</a>
<input class="btn btn-success" type="submit" value="Save" />
</form>
{% endblock content %} | 21.888889 | 67 | 0.583756 |
043b569636f0ba1f6bc10353c79291a010e409cf | 7,159 | java | Java | src/main/java/net/sourceforge/pebble/event/response/IpAddressListener.java | Mantheshmanju/pebble | 9bc1c98d15e5f09f51a00611c3cbc51084d26907 | [
"Apache-1.1",
"BSD-3-Clause"
] | 43 | 2015-02-09T12:17:01.000Z | 2022-02-20T17:46:51.000Z | src/main/java/net/sourceforge/pebble/event/response/IpAddressListener.java | Mantheshmanju/pebble | 9bc1c98d15e5f09f51a00611c3cbc51084d26907 | [
"Apache-1.1",
"BSD-3-Clause"
] | 3 | 2015-07-08T06:50:27.000Z | 2020-04-06T15:38:40.000Z | src/main/java/net/sourceforge/pebble/event/response/IpAddressListener.java | Mahesh0390/test1 | 5987999d80f5a099ee70d0491487d1c140438051 | [
"Apache-1.1",
"BSD-3-Clause"
] | 51 | 2015-02-10T12:31:52.000Z | 2020-08-20T09:53:55.000Z | /*
* Copyright (c) 2003-2011, Simon Brown
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* - Neither the name of Pebble nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package net.sourceforge.pebble.event.response;
import net.sourceforge.pebble.PluginProperties;
import net.sourceforge.pebble.domain.Response;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Checks comment and TrackBack IP address against a whitelist and a blacklist.
* If in the whitelist, the response is left as-is. If in the blacklist,
* the response is set to pending and the spam score incremented by 1 point.
* If in neither, the response is set to pending but the spam score isn't
* increased. This allows responses from new IP addresses to be manually
* verified before publication.
*
* @author Simon Brown
*/
public class IpAddressListener extends BlogEntryResponseListenerSupport {
/** the log used by this class */
private static final Log log = LogFactory.getLog(IpAddressListener.class);
/** the name of the whitelist property */
public static final String WHITELIST_KEY = "IpAddressListener.whitelist";
/** the name of the blacklist property */
public static final String BLACKLIST_KEY = "IpAddressListener.blacklist";
/**
* Called when a comment or TrackBack has been added.
*
* @param response a Response
*/
protected void blogEntryResponseAdded(Response response) {
PluginProperties props = response.getBlogEntry().getBlog().getPluginProperties();
if (isListed(response, props.getProperty(BLACKLIST_KEY))) {
log.info(response.getTitle() + " marked as pending : IP address " + response.getIpAddress() + " is on blacklist");
response.setPending();
response.incrementSpamScore();
} else if (isListed(response, props.getProperty(WHITELIST_KEY))) {
// do nothing
} else {
log.info(response.getTitle() + " marked as pending : IP address " + response.getIpAddress() + " not on blacklist or whitelist");
response.setPending();
}
}
/**
* Called when a comment or TrackBack has been approved.
*
* @param response a Response
*/
protected void blogEntryResponseApproved(Response response) {
PluginProperties props = response.getBlogEntry().getBlog().getPluginProperties();
if (response.getIpAddress() == null || response.getIpAddress().trim().length() == 0) {
return;
}
synchronized (props) {
String whitelist = props.getProperty(WHITELIST_KEY);
String blacklist = props.getProperty(BLACKLIST_KEY);
whitelist = addIpAddress(response, whitelist);
blacklist = removeIpAddress(response, blacklist);
props.setProperty(WHITELIST_KEY, whitelist);
props.setProperty(BLACKLIST_KEY, blacklist);
props.store();
}
}
/**
* Called when a comment or TrackBack has been rejected.
*
* @param response a Response
*/
protected void blogEntryResponseRejected(Response response) {
PluginProperties props = response.getBlogEntry().getBlog().getPluginProperties();
if (response.getIpAddress() == null || response.getIpAddress().trim().length() == 0) {
return;
}
synchronized (props) {
String blacklist = props.getProperty(BLACKLIST_KEY);
String whitelist = props.getProperty(WHITELIST_KEY);
blacklist = addIpAddress(response, blacklist);
whitelist = removeIpAddress(response, whitelist);
props.setProperty(BLACKLIST_KEY, blacklist);
props.setProperty(WHITELIST_KEY, whitelist);
props.store();
}
}
/**
* Determines whether the IP address of the specified response is contained
* within a given list of IP addresses.
*
* @param response a Response instance
* @param list a list of IP addresses, comma separated
* @return true if the IP address is contained within the list,
* false otherwise
*/
private boolean isListed(Response response, String list) {
if (response.getIpAddress() == null) {
return false;
}
String ipAddresses[] = null;
if (list != null) {
ipAddresses = list.split(",");
} else {
ipAddresses = new String[0];
}
for (int i = 0; i < ipAddresses.length; i++) {
if (response.getIpAddress().equals(ipAddresses[i])) {
return true;
}
}
return false;
}
/**
* Adds the IP address of the specified response to the given list.
*
* @param response a Response instance
* @param list a list of IP addresses, comma separated
* @return an updated list of IP addresses
*/
private String addIpAddress(Response response, String list) {
if (list == null || list.trim().length() == 0) {
return response.getIpAddress();
} else if (!isListed(response, list)) {
return list + "," + response.getIpAddress();
} else {
return list;
}
}
/**
* Removes the IP address of the specified response to the given list.
*
* @param response a Response instance
* @param list a list of IP addresses, comma separated
* @return an updated list of IP addresses
*/
private String removeIpAddress(Response response, String list) {
if (response.getIpAddress() == null) {
return list;
}
String ipAddresses[] = null;
if (list != null) {
ipAddresses = list.split(",");
} else {
ipAddresses = new String[0];
}
StringBuffer buf = new StringBuffer();
for (int i = 0; i < ipAddresses.length; i++) {
if (!response.getIpAddress().equals(ipAddresses[i])) {
if (buf.length() > 0) {
buf.append(",");
}
buf.append(ipAddresses[i]);
}
}
return buf.toString();
}
}
| 34.921951 | 134 | 0.684453 |
d76ef4b65d60f059a97483f2f8d15d92e7797d82 | 851 | swift | Swift | 10-all-playgrounds-for-part-2/begin/Part 2 - (ALL EPISODES) Beginning Collections - Begin.playground/Pages/04 - Arrays.xcplaygroundpage/Contents.swift | ucwealth/video-ps1-materials | b64796d4a039633645f3acf77584610d9863fa3c | [
"Apache-2.0"
] | null | null | null | 10-all-playgrounds-for-part-2/begin/Part 2 - (ALL EPISODES) Beginning Collections - Begin.playground/Pages/04 - Arrays.xcplaygroundpage/Contents.swift | ucwealth/video-ps1-materials | b64796d4a039633645f3acf77584610d9863fa3c | [
"Apache-2.0"
] | null | null | null | 10-all-playgrounds-for-part-2/begin/Part 2 - (ALL EPISODES) Beginning Collections - Begin.playground/Pages/04 - Arrays.xcplaygroundpage/Contents.swift | ucwealth/video-ps1-materials | b64796d4a039633645f3acf77584610d9863fa3c | [
"Apache-2.0"
] | null | null | null | //: [⇐ Previous: 03 - Challenge - Tuples](@previous)
//: ## Episode 04: Arrays
var pastries: [String] = ["buns", "cake", "donut", "chocolate"]
pastries[0]
let firstThree = Array(pastries[1...3])
firstThree[0]
pastries.append("juma")
//pastries.removeAll()
pastries.insert("popcorn", at: 2)
pastries.remove(at: 0)
pastries.isEmpty
pastries.count
pastries.contains("joba")
if let first = pastries.first{
print(first)
}
pastries[0...1] = ["brownies", "tart", "cowbell"] //This replaces the elements at index 0-1 with 3 elements
pastries.removeFirst()
pastries.removeLast()
pastries.swapAt(1,2)
//This optional unwrapping above is another way of writing this if statement below
//if pastries.first != nil {
// let first = pastries.first
// print(first)
//} else {
// print("Shii is empty")
//}
//: [⇒ Next: 05 - Operating on Arrays](@next)
| 27.451613 | 107 | 0.685076 |
4ca70f0a16f64826af94df1cf96c659f9961b19d | 1,973 | swift | Swift | Source/Node+References.swift | s4cha/Komponents | 2a69b49bd7e00fc61f0887455205fd102b3b105a | [
"MIT"
] | 9 | 2020-01-05T14:51:27.000Z | 2021-09-19T15:15:14.000Z | Source/Node+References.swift | freshOS/Komponents-Deprecated | 2a69b49bd7e00fc61f0887455205fd102b3b105a | [
"MIT"
] | null | null | null | Source/Node+References.swift | freshOS/Komponents-Deprecated | 2a69b49bd7e00fc61f0887455205fd102b3b105a | [
"MIT"
] | null | null | null | //
// Node+References.swift
// Komponents
//
// Created by Sacha Durand Saint Omer on 12/05/2017.
// Copyright © 2017 freshOS. All rights reserved.
//
import UIKit
import MapKit
func linkReference(of node: IsNode, to view: UIView) {
if let node = node as? View {
node.ref?.pointee = view
}
if let node = node as? Label, let view = view as? UILabel {
node.ref?.pointee = view
}
if let node = node as? Button, let view = view as? UIButton {
node.ref?.pointee = view
}
if let node = node as? Field, let view = view as? UITextField {
node.ref?.pointee = view
}
if let node = node as? TextView, let view = view as? UITextView {
node.ref?.pointee = view
}
if let node = node as? Image, let view = view as? UIImageView {
node.ref?.pointee = view
}
if let node = node as? Switch, let view = view as? UISwitch {
node.ref?.pointee = view
}
if let node = node as? Slider, let view = view as? UISlider {
node.ref?.pointee = view
}
if let node = node as? Progress, let view = view as? UIProgressView {
node.ref?.pointee = view
}
if let node = node as? Map, let view = view as? MKMapView {
node.ref?.pointee = view
}
if let node = node as? PageControl, let view = view as? UIPageControl {
node.ref?.pointee = view
}
if let node = node as? ActivityIndicatorView, let view = view as? UIActivityIndicatorView {
node.ref?.pointee = view
}
if let node = node as? HorizontalStack, let view = view as? UIStackView {
node.ref?.pointee = view
}
if let node = node as? VerticalStack, let view = view as? UIStackView {
node.ref?.pointee = view
}
if let node = node as? ScrollView, let view = view as? UIScrollView {
node.ref?.pointee = view
}
if let node = node as? Table, let view = view as? UITableView {
node.ref?.pointee = view
}
}
| 31.822581 | 95 | 0.603142 |
420be3711fd299f69dbc3e5c80c3650f29ac6fe6 | 2,708 | kt | Kotlin | src/main/java/com/mrkirby153/tgabot/commands/AdminCommands.kt | mrkirby153/TGABot | 12ea81ce527749ab7637db36a42adfd5829782c4 | [
"MIT"
] | 1 | 2019-02-21T12:38:52.000Z | 2019-02-21T12:38:52.000Z | src/main/java/com/mrkirby153/tgabot/commands/AdminCommands.kt | mrkirby153/TGABot | 12ea81ce527749ab7637db36a42adfd5829782c4 | [
"MIT"
] | null | null | null | src/main/java/com/mrkirby153/tgabot/commands/AdminCommands.kt | mrkirby153/TGABot | 12ea81ce527749ab7637db36a42adfd5829782c4 | [
"MIT"
] | null | null | null | package com.mrkirby153.tgabot.commands
import com.mrkirby153.botcore.command.Command
import com.mrkirby153.botcore.command.Context
import com.mrkirby153.botcore.command.args.CommandContext
import com.mrkirby153.tgabot.Bot
import com.mrkirby153.tgabot.listener.PollListener
import com.mrkirby153.tgabot.listener.ReactionManager
import com.mrkirby153.tgabot.polls.PollManager
import me.mrkirby153.kcutils.Time
class AdminCommands {
@Command(name = "shutdown", clearance = 100)
fun shutdown(context: Context, cmdContext: CommandContext) {
context.channel.sendMessage("Shutting down...").queue {
Bot.shutdown()
}
}
@Command(name = "reaction-threshold", clearance = 100, arguments = ["[num:int]"])
fun reactionThreshold(context: Context, cmdContext: CommandContext) {
if (cmdContext.has("num")) {
ReactionManager.threshold = cmdContext.getNotNull("num")
Bot.adminLog.log("Reaction clear threshold set to ${ReactionManager.threshold}")
context.channel.sendMessage("Updated threshold to ${ReactionManager.threshold}").queue()
} else {
context.channel.sendMessage("Current threshold: ${ReactionManager.threshold}").queue()
}
}
@Command(name = "spam-alert", clearance = 100, arguments = ["<count:int>", "<period:int>"])
fun spamAlert(context: Context, cmdContext: CommandContext) {
val count = cmdContext.getNotNull<Int>("count")
val period = cmdContext.getNotNull<Int>("period")
PollManager.pollBucket.count = count
PollManager.pollBucket.period = period
Bot.adminLog.log(
"${context.author.asMention} updated the alert threshold to $count in $period. This may trigger some false positives")
context.channel.sendMessage("Spam alert threshold updated to $count/$period").queue()
}
@Command(name = "ping", parent = "pollbot", clearance = 100)
fun pingCommand(context: Context, cmdContext: CommandContext) {
val t = System.currentTimeMillis()
context.channel.sendTyping().queue {
context.channel.sendMessage(
":ping_pong: Pong! ${Time.format(1, System.currentTimeMillis() - t)}").queue()
}
}
@Command(name = "stats", parent = "pollbot", clearance = 100)
fun reactionManagerStats(context: Context, cmdContext: CommandContext) {
val v = "Queue: `${PollListener.reactionManager.getQueue(true)}`"
context.channel.sendMessage(
"There are `${PollListener.reactionManager.queueSize()}` pending reaction removals").queue()
if(v.length < 2000)
context.channel.sendMessage(v).complete()
}
} | 45.133333 | 134 | 0.67836 |
498a1190454f67e243a8081cd6418f860de30eaf | 2,699 | html | HTML | angular-demos/lists/list-chat-sample/src/app/list-chat-sample/list-chat-sample.component.html | IgniteUI/igniteui-live-editing-samples | e5f2c29fa4fbe5c288f731f27f91a684a136bd43 | [
"Apache-2.0"
] | 4 | 2020-09-28T00:04:33.000Z | 2022-01-07T07:12:00.000Z | angular-demos/lists/list-chat-sample/src/app/list-chat-sample/list-chat-sample.component.html | IgniteUI/igniteui-live-editing-samples | e5f2c29fa4fbe5c288f731f27f91a684a136bd43 | [
"Apache-2.0"
] | 2 | 2020-09-18T11:53:58.000Z | 2021-11-12T11:16:08.000Z | angular-demos/lists/list-chat-sample/src/app/list-chat-sample/list-chat-sample.component.html | IgniteUI/igniteui-live-editing-samples | e5f2c29fa4fbe5c288f731f27f91a684a136bd43 | [
"Apache-2.0"
] | 4 | 2020-08-10T09:12:30.000Z | 2022-01-08T05:10:45.000Z | <div class="chat-sample-wrapper">
<form class="chat-sample__form">
<igx-list #list>
<ng-template #othersMessage let-message="message" let-contact="contact" let-messageIndex="index">
<igx-list-item class="contact" *ngIf="isFirstMessage(messageIndex)">
<div class="contact__panel">
<igx-avatar [src]="contact.photo" roundShape="true"></igx-avatar>
<span class="message__info">
{{ contact.name }}, {{ message.timestamp | date: 'shortTime'}}
</span>
</div>
</igx-list-item>
<igx-list-item [ngClass]="{
'others-message': true,
'others-message--first': isFirstMessage(messageIndex)}">
<span>
{{ message.message }}
</span>
</igx-list-item>
</ng-template>
<ng-template #myMessage let-message="message" let-messageIndex="index">
<igx-list-item class="contact" *ngIf="isFirstMessage(messageIndex)">
<span class="my-message__info">
{{ message.timestamp | date: 'shortTime'}}
</span>
</igx-list-item>
<igx-list-item class="my-message" [ngClass]="{
'my-message': true,
'my-message--first': isFirstMessage(messageIndex)}">
<span>
{{ message.message }}
</span>
</igx-list-item>
</ng-template>
<ng-container *ngFor="let message of messagesService.getMessages(); index as i">
<ng-container *ngTemplateOutlet="getMessageTemplate(message); context: {message: message, contact: contactsService.getContact(message.authorId), index: i}">
</ng-container>
</ng-container>
</igx-list>
<div class="chat-massage-field">
<igx-input-group class="chat-input-group" type="box">
<input class="input-round-box" placeholder="Send message" autocomplete="off" igxInput #newMessage name="newMessage"
[(ngModel)]="message" (keypress)="onMessageKeypress($event)" />
</igx-input-group>
<button class="send-message-button" igxButton="icon" igxButtonColor="#09f" igxButtonBackground="#fff"
igxRipple="#09f" igxRippleCentered="true" (click)="onSendButtonClick()">
<igx-icon family="material">send</igx-icon>
</button>
</div>
</form>
</div>
| 49.981481 | 172 | 0.51093 |
2682408cba10f4eb0202d4cd7bc9d33fcfae94f6 | 961 | java | Java | Libreria/src/vista/BarraMenuUs.java | jesusds7/Libreria | 475261d718e75e8134207ad6feb739f9d827fa4e | [
"Artistic-2.0"
] | null | null | null | Libreria/src/vista/BarraMenuUs.java | jesusds7/Libreria | 475261d718e75e8134207ad6feb739f9d827fa4e | [
"Artistic-2.0"
] | null | null | null | Libreria/src/vista/BarraMenuUs.java | jesusds7/Libreria | 475261d718e75e8134207ad6feb739f9d827fa4e | [
"Artistic-2.0"
] | null | null | null | package vista;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import controlador.Controlador;
public class BarraMenuUs extends JMenuBar{
private static final long serialVersionUID = 1L;
private JMenu menuArchivo;
private JMenuItem itemComprar;
private JMenuItem itemBuscar;
private JMenuItem itemMostrarLibros;
public BarraMenuUs(Controlador controlador) {
menuArchivo = new JMenu();
menuArchivo.setText(ConstantesGUI.T_MENU_ARCHIVO_US);
add(menuArchivo);
itemMostrarLibros = new JMenuItem();
itemMostrarLibros.setText(ConstantesGUI.T_MENU_ITEM_US_MOSTRAR_LIBROS);
menuArchivo.add(itemMostrarLibros);
itemComprar = new JMenuItem();
itemComprar.setText(ConstantesGUI.T_MENU_ITEM_US_COMPRAR_LIBRO);
menuArchivo.add(itemComprar);
itemBuscar = new JMenuItem();
itemBuscar.setText(ConstantesGUI.T_MENU_ITEM_US_BUSCAR_LIBRO);
menuArchivo.add(itemBuscar);
}
} | 28.264706 | 74 | 0.782518 |
04e5673279d4d58bd073adf0bc1517500d0e3c2d | 646 | java | Java | Spring-Projects/EXAM PREP 3/ColonialCouncilBank/src/main/java/app/ccb/repositories/EmployeeRepository.java | DenislavVelichkov/Java-DBS-Module-June-2019 | 643422bf41d99af1e0bbd3898fa5adfba8b2c36c | [
"MIT"
] | null | null | null | Spring-Projects/EXAM PREP 3/ColonialCouncilBank/src/main/java/app/ccb/repositories/EmployeeRepository.java | DenislavVelichkov/Java-DBS-Module-June-2019 | 643422bf41d99af1e0bbd3898fa5adfba8b2c36c | [
"MIT"
] | 1 | 2020-09-09T19:43:19.000Z | 2020-09-09T19:43:19.000Z | Spring-Projects/EXAM PREP 3/ColonialCouncilBank/src/main/java/app/ccb/repositories/EmployeeRepository.java | DenislavVelichkov/Java-DBS-Module-June-2019 | 643422bf41d99af1e0bbd3898fa5adfba8b2c36c | [
"MIT"
] | null | null | null | package app.ccb.repositories;
import app.ccb.domain.entities.Employee;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Optional;
@Repository
public interface EmployeeRepository extends JpaRepository<Employee, Integer> {
Optional<Employee>findByFirstNameAndLastName(String firstName, String lastName);
@Query(value =
"SELECT e FROM Employee AS e WHERE e.clients IS NOT EMPTY " +
"ORDER BY e.clients.size DESC, e.id")
List<Employee> findAllByClientsNotNull();
}
| 30.761905 | 83 | 0.786378 |
6f461d547801298e8aea52752602a89f8b8692e9 | 3,278 | lua | Lua | 6_setup/additional_files/zh/mm8/zh_update/Scripts/Structs/After/RemoveOutdoorLimits.lua | might-and-magic/mm678_i18n | ddc37c743ac37169908641dcd5328a7ccae5138f | [
"MIT"
] | 17 | 2020-02-25T14:37:23.000Z | 2022-02-21T15:33:09.000Z | 6_setup/additional_files/zh/mm8/zh_update/Scripts/Structs/After/RemoveOutdoorLimits.lua | might-and-magic/mm678_i18n | ddc37c743ac37169908641dcd5328a7ccae5138f | [
"MIT"
] | 1 | 2022-01-09T02:14:56.000Z | 2022-02-13T10:08:11.000Z | 6_setup/additional_files/zh/mm8/zh_update/Scripts/Structs/After/RemoveOutdoorLimits.lua | might-and-magic/mm678_i18n | ddc37c743ac37169908641dcd5328a7ccae5138f | [
"MIT"
] | 3 | 2020-10-06T20:38:13.000Z | 2021-02-17T02:11:17.000Z | local i4, i2, i1, u4, u2, u1, i8, u8 = mem.i4, mem.i2, mem.i1, mem.u4, mem.u2, mem.u1, mem.i8, mem.u8
local mmver = offsets.MMVersion
mem.IgnoreProtection(true)
local OldCount = 2000
local NewCount = (FacetRefsLimit or 8192) + 6000
if mmver == 6 then
local PolySize = 84
local dn = NewCount - OldCount
local Offset = mem.StaticAlloc(PolySize*NewCount) - 0x6CBBD8
local Offset2 = mem.StaticAlloc(PolySize*NewCount) - 0x72F8F8
local counts = {0x4695DD, 0x470B71, 0x470F49, 0x471312, 0x47915C, 0x47999D}
local refs = {0x4694E1, 0x469C18, 0x469C1E, 0x469C2C, 0x469FB4, 0x469FCA, 0x470A6E, 0x470E92, 0x471267, 0x4717DD, 0x479896, 0x479956}
local endrefs = {0x469FC2+1, 0x469FD9+1}
local refs2 = {0x4798A1, 0x4798E4, 0x479951}
local function Process(t, d)
for _, p in ipairs(t) do
u4[p] = u4[p] + d
end
end
Process(counts, dn)
Process(refs, Offset)
Process(endrefs, Offset + dn*PolySize)
Process(refs2, Offset2)
elseif mmver == 7 then
local PolySize = 268
local dn = NewCount - OldCount
local Offset = mem.StaticAlloc(PolySize*NewCount) - 0x77EA88
local Offset2 = mem.StaticAlloc(4*NewCount) - 0x80A8A8
local counts = {0x4787B7, 0x478C6A, 0x48062A, 0x480A5B, 0x480E58, 0x4814EC, 0x481802, 0x481ADF, 0x487499}
local refs = {0x4784D1, 0x478B5F, 0x479363, 0x479385, 0x47A568, 0x47A582, 0x480565, 0x48098D, 0x480D8A, 0x48141D, 0x48171C, 0x4819FF, 0x481EC8, 0x487366, 0x4873AC, 0x48745F, 0x487DAE}
local endrefs = {0x47A57A+1, 0x47A592+1, 0x487DBA+1}
local refs2 = {0x4873BD, 0x4873ED, 0x48747A}
local function Process(t, d)
for _, p in ipairs(t) do
u4[p] = u4[p] + d
end
end
Process(counts, dn)
Process(refs, Offset)
Process(endrefs, Offset + dn*PolySize)
Process(refs2, Offset2)
else
local PolySize = 268
local dn = NewCount - OldCount
local Offset = mem.StaticAlloc(PolySize*NewCount) - 0x7BCA50
local Offset2 = mem.StaticAlloc(4*NewCount) - 0x848870
local counts = {0x477261, 0x477738, 0x477B9B, 0x47FE0F, 0x480234, 0x48064D, 0x480D56, 0x48106E, 0x48134B, 0x4872DF}
local refs = {0x476F6F, 0x477469, 0x477A90, 0x478294, 0x4782B8, 0x479750, 0x47976A, 0x47FD52, 0x48014A, 0x48057D, 0x480C86, 0x480F86, 0x48126B, 0x481734, 0x486CA6, 0x486CEC, 0x4872AB, 0x4876BF}
local endrefs = {0x4876CB+1, 0x479762+1, 0x47977A+1}
local refs2 = {0x486CFD, 0x486D31, 0x4872C5}
local function Process(t, d)
for _, p in ipairs(t) do
u4[p] = u4[p] + d
end
end
Process(counts, dn)
Process(refs, Offset)
Process(endrefs, Offset + dn*PolySize)
Process(refs2, Offset2)
-- SW limits
do
local OldEdgeCount = 6000
local NewEdgeCount = NewCount*3
local OldSpansCount = 15000
local NewSpansCount = OldSpansCount*(NewCount/OldCount)
-- surfs
local size = 0x24
local sizes = {0x4863BF, 0x4868A9}
local counts = {0x4864EC+6} -- strange: 0x4876F9
Process(counts, dn)
Process(sizes, dn*size)
-- edges
local size = 0x34
local sizes = {0x4863A2, 0x486823}
local counts = {0x486502}
local dn = NewEdgeCount - OldEdgeCount
Process(counts, dn)
Process(sizes, dn*size)
-- spans
local size = 0x18
local sizes = {0x486386}
local counts = {0x486AAB+6, 0x486B94+6}
local dn = NewSpansCount - OldSpansCount
Process(counts, dn)
Process(sizes, dn*size)
end
end
mem.IgnoreProtection(false)
| 29.00885 | 194 | 0.721171 |
26975674f6cb51ec4f7e647175c122823f9432b9 | 675 | java | Java | nico-nolog/src/main/java/org/nico/log/extra/Log4j.java | ainilili/NoFramework | 44a56dc1b885d9e3293e910df68392b3bf95e7c3 | [
"Apache-2.0"
] | 4 | 2018-11-13T02:29:36.000Z | 2019-10-29T03:13:38.000Z | nico-nolog/src/main/java/org/nico/log/extra/Log4j.java | ainilili/no-framework | 44a56dc1b885d9e3293e910df68392b3bf95e7c3 | [
"Apache-2.0"
] | null | null | null | nico-nolog/src/main/java/org/nico/log/extra/Log4j.java | ainilili/no-framework | 44a56dc1b885d9e3293e910df68392b3bf95e7c3 | [
"Apache-2.0"
] | 1 | 2018-06-29T09:22:34.000Z | 2018-06-29T09:22:34.000Z | package org.nico.log.extra;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggerFactory;
import org.nico.log.Logging;
public class Log4j implements Logging{
private Logger logger;
public Log4j(Class<?> clazz){
this.logger = Logger.getLogger(clazz);
}
public Log4j(String sign){
this.logger = Logger.getLogger(sign);
}
@Override
public void debug(Object o) {
logger.debug(o);
}
@Override
public void info(Object o) {
logger.info(o);
}
@Override
public void hint(Object o) {
logger.info(o);
}
@Override
public void warning(Object o) {
logger.warn(o);
}
@Override
public void error(Object o) {
logger.error(o);
}
}
| 14.673913 | 42 | 0.691852 |
ed63ec1b3cb8cd0728cfd0b1fa1fa703dc82c8dd | 4,880 | swift | Swift | app/ChessAggregator/Modules/ForgotPassword/Views/ForgotPasswordView.swift | NickPuchko/ChessTournamentAggregator_IOS | 7a2f709a01b9ebdac4a6bdc8ddba2fa62f37760a | [
"MIT"
] | 4 | 2020-10-12T10:26:41.000Z | 2021-03-24T19:50:50.000Z | app/ChessAggregator/Modules/ForgotPassword/Views/ForgotPasswordView.swift | NickPuchko/ChessTournamentAggregator_IOS | 7a2f709a01b9ebdac4a6bdc8ddba2fa62f37760a | [
"MIT"
] | null | null | null | app/ChessAggregator/Modules/ForgotPassword/Views/ForgotPasswordView.swift | NickPuchko/ChessTournamentAggregator_IOS | 7a2f709a01b9ebdac4a6bdc8ddba2fa62f37760a | [
"MIT"
] | 2 | 2020-10-13T14:50:24.000Z | 2020-11-01T20:11:09.000Z | //
// Created by Максим Сурков on 07.12.2020.
//
import UIKit
class ForgotPasswordView: AutoLayoutView {
let scrollableStackView: ScrollableStackView = {
var result: ScrollableStackView
let config: ScrollableStackView.Config = ScrollableStackView.Config(
stack: ScrollableStackView.Config.Stack(axis: .vertical, distribution: .fill,
alignment: .fill, spacing: 15.0),
scroll: .defaultVertical,
pinsStackConstraints: UIEdgeInsets(top: 20.0, left: 16.0, bottom: 0.0, right: -16.0)
)
result = ScrollableStackView(config: config)
return result
}()
private let textFieldHeight: CGFloat = 40.0
private let registrationButtonSpacingToContentView: CGFloat = 20.0
private let registrationButtonHeight: CGFloat = 50.0
private let switchToOrganizerStackViewHeight: CGFloat = 30.0
var registrationOffset: CGFloat {
registrationButtonSpacingToContentView + registrationButtonHeight + switchToOrganizerStackViewHeight
}
private var emailAddressStackView: UIStackView?
private let emailAddress = UITextField()
let emailAddressWarning = WarningLabel()
private let changeButton = UIButton(type: .system)
var onTapChangeButton: ((String?) -> Void)?
init() {
super.init(frame: .zero)
self.setup()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func setup() {
self.addSubview(scrollableStackView)
setupRoundedTextField(
textField: emailAddress,
textFieldPlaceholder: "Введите Ваш email*",
textFieldKeyboard: .emailAddress
)
emailAddressWarning.text = "Адрес почты недействителен. Введите его в формате email@example.com"
self.emailAddressStackView = buildStackView(withTextField: emailAddress, andLabel: emailAddressWarning)
self.scrollableStackView.addArrangedSubview(emailAddressStackView!)
emailAddress.backgroundColor = UIColor.rgba(240, 241, 245)
changeButton.setTitle("Сменить пароль", for: .normal)
changeButton.titleLabel?.font = UIFont(name: "AppleSDGothicNeo-Bold", size: 18)
changeButton.backgroundColor = UIColor.rgba(0, 122, 255)
changeButton.setTitleColor(.white, for: .normal)
changeButton.layer.cornerRadius = 8.0
changeButton.clipsToBounds = false
changeButton.addTarget(self, action: #selector(onTapChange), for: .touchUpInside)
self.scrollableStackView.addSubview(changeButton)
}
override func setupConstraints() {
super.setupConstraints()
self.scrollableStackView.pins()
NSLayoutConstraint.activate([
changeButton.topAnchor.constraint(
equalTo: scrollableStackView.contentView.bottomAnchor,
constant: registrationButtonSpacingToContentView
),
changeButton.heightAnchor.constraint(equalToConstant: registrationButtonHeight),
changeButton.widthAnchor.constraint(equalToConstant: 200.0),
changeButton.centerXAnchor.constraint(equalTo: scrollableStackView.contentView.centerXAnchor),
])
self.scrollableStackView.set(contentInset: UIEdgeInsets(top: 0, left: 0, bottom: registrationOffset, right: 0))
}
@objc private func onTapChange() {
self.onTapChangeButton?(
self.emailAddress.text)
}
}
private extension ForgotPasswordView {
func buildStackView(withTextField textField: UITextField, andLabel label: UILabel) -> UIStackView {
let stackView = UIStackView()
stackView.axis = .vertical
stackView.distribution = .fill
stackView.alignment = .fill
stackView.addArrangedSubview(textField)
stackView.addArrangedSubview(label)
return stackView
}
func setupRoundedTextField(textField: UITextField, textFieldPlaceholder: String,
textFieldKeyboard: UIKeyboardType = .default) {
let attributedString = buildStringWithColoredAsterisk(string: textFieldPlaceholder)
textField.attributedPlaceholder = attributedString
textField.borderStyle = .roundedRect
textField.keyboardType = textFieldKeyboard
textField.autocapitalizationType = .none
}
func buildStringWithColoredAsterisk(string: String) -> NSMutableAttributedString {
let attributedString = NSMutableAttributedString.init(string: string)
let range = (string as NSString).range(of: "*")
attributedString.addAttribute(
NSAttributedString.Key.foregroundColor,
value: Styles.Color.asteriskRed,
range: range
)
return attributedString
}
}
| 34.609929 | 119 | 0.679303 |
b64c7d40bd0d05731d7a27fe5e53dc366daf6e5f | 2,183 | rb | Ruby | config/deploy.rb | jay16/demo.solife | 9b417a927c2f8b62e26e6d0569cf3cce5d51a1c2 | [
"MIT"
] | null | null | null | config/deploy.rb | jay16/demo.solife | 9b417a927c2f8b62e26e6d0569cf3cce5d51a1c2 | [
"MIT"
] | null | null | null | config/deploy.rb | jay16/demo.solife | 9b417a927c2f8b62e26e6d0569cf3cce5d51a1c2 | [
"MIT"
] | null | null | null | #encoding: utf-8
set :domain, "solife.us"
set :application, "carder"
set :scm, :git
set :repository, "https://github.com/jay16/carder"
set :branch, 'master'
# set :scm, :git # You can set :scm explicitly or Capistrano will make an intelligent guess based on known version control directory names
# Or: `accurev`, `bzr`, `cvs`, `darcs`, `git`, `mercurial`, `perforce`, `subversion` or `none`
# do not use sudo
set :use_sudo, false
set(:run_method) { use_sudo ? :sudo : :run }
# This is needed to correctly handle sudo password prompt
default_run_options[:pty] = true
# Input your username to login remote server address
set :user, "jay"
set :group, user
set :runner, user
role :web, domain # Your HTTP server, Apache/etc
role :app, domain # This may be the same as your `Web` server
role :db, domain
#role :db, "sqlite", :primary => true # This is where Rails migrations will run
#role :db, "your slave db-server here"
set :rails_env, :development
# Where will it be located on a server?
set :deploy_to, "/home/work/#{application}"
set :unicorn_conf, "#{deploy_to}/config/unicorn.rb"
set :unicorn_pid, "#{deploy_to}/shared/pids/unicorn.pid"
# Unicorn control tasks
namespace :deploy do
task :restart do
run "if [ -f #{unicorn_pid} ]; then kill -USR2 `cat #{unicorn_pid}`; else cd #{deploy_to}/current && bundle exec unicorn -c #{unicorn_conf} -E #{rails_env} -D; fi"
end
task :start do
run "cd #{deploy_to}/current && bundle exec unicorn -c #{unicorn_conf} -E #{rails_env} -D"
end
task :stop do
run "if [ -f #{unicorn_pid} ]; then kill -QUIT `cat #{unicorn_pid}`; fi"
end
end
# if you want to clean up old releases on each deploy uncomment this:
# after "deploy:restart", "deploy:cleanup"
# if you're still using the script/reaper helper you will need
# these http://github.com/rails/irs_process_scripts
# If you are using Passenger mod_rails uncomment this:
# namespace :deploy do
# task :start do ; end
# task :stop do ; end
# task :restart, :roles => :app, :except => { :no_release => true } do
# run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
# end
# end
| 34.650794 | 167 | 0.677966 |
f4a0a6e8eb679f3da7c2219263df16986393dc25 | 1,005 | go | Go | solutions/380.go | TheDoctor0/leetcode-golang | ef17559c4646e798db6cd68ff01e4d7c553c4262 | [
"MIT"
] | 1 | 2020-02-12T17:12:46.000Z | 2020-02-12T17:12:46.000Z | solutions/380.go | TheDoctor0/leetcode-golang | ef17559c4646e798db6cd68ff01e4d7c553c4262 | [
"MIT"
] | null | null | null | solutions/380.go | TheDoctor0/leetcode-golang | ef17559c4646e798db6cd68ff01e4d7c553c4262 | [
"MIT"
] | 2 | 2020-03-25T22:32:13.000Z | 2020-08-08T14:32:42.000Z | package solutions
import (
"math/rand"
)
type RandomizedSet struct {
nums []int
numberToIndex map[int]int
}
func Constructor() RandomizedSet {
return RandomizedSet{[]int{}, make(map[int]int)}
}
func (this *RandomizedSet) Insert(val int) bool {
if _, ok := this.numberToIndex[val]; ok {
return false
}
this.nums = append(this.nums, val)
this.numberToIndex[val] = len(this.nums) - 1
return true
}
func (this *RandomizedSet) Remove(val int) bool {
index, ok := this.numberToIndex[val]
if !ok {
return false
}
if len(this.nums) < 2 {
this.nums = this.nums[0: 0]
} else {
swap := this.nums[len(this.nums) - 1]
this.numberToIndex[swap] = index
this.nums[index] = swap
this.nums = this.nums[0: len(this.nums) - 1]
}
delete(this.numberToIndex, val)
return true
}
func (this *RandomizedSet) GetRandom() int {
return this.nums[rand.Intn(len(this.numberToIndex))]
}
| 19.326923 | 56 | 0.60597 |
fb59552cbd7c82d5db55d6208850e1960ec15985 | 4,302 | java | Java | aliyun-java-sdk-ecs/src/main/java/com/aliyuncs/ecs/model/v20140526/DescribeActivationsResponse.java | rnarla123/aliyun-openapi-java-sdk | 8dc187b1487d2713663710a1d97e23d72a87ffd9 | [
"Apache-2.0"
] | 1 | 2022-02-12T06:01:36.000Z | 2022-02-12T06:01:36.000Z | aliyun-java-sdk-ecs/src/main/java/com/aliyuncs/ecs/model/v20140526/DescribeActivationsResponse.java | rnarla123/aliyun-openapi-java-sdk | 8dc187b1487d2713663710a1d97e23d72a87ffd9 | [
"Apache-2.0"
] | null | null | null | aliyun-java-sdk-ecs/src/main/java/com/aliyuncs/ecs/model/v20140526/DescribeActivationsResponse.java | rnarla123/aliyun-openapi-java-sdk | 8dc187b1487d2713663710a1d97e23d72a87ffd9 | [
"Apache-2.0"
] | null | null | null | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.ecs.model.v20140526;
import java.util.List;
import com.aliyuncs.AcsResponse;
import com.aliyuncs.ecs.transform.v20140526.DescribeActivationsResponseUnmarshaller;
import com.aliyuncs.transform.UnmarshallerContext;
/**
* @author auto create
* @version
*/
public class DescribeActivationsResponse extends AcsResponse {
private Long pageSize;
private String requestId;
private Long pageNumber;
private Long totalCount;
private List<Activation> activationList;
public Long getPageSize() {
return this.pageSize;
}
public void setPageSize(Long pageSize) {
this.pageSize = pageSize;
}
public String getRequestId() {
return this.requestId;
}
public void setRequestId(String requestId) {
this.requestId = requestId;
}
public Long getPageNumber() {
return this.pageNumber;
}
public void setPageNumber(Long pageNumber) {
this.pageNumber = pageNumber;
}
public Long getTotalCount() {
return this.totalCount;
}
public void setTotalCount(Long totalCount) {
this.totalCount = totalCount;
}
public List<Activation> getActivationList() {
return this.activationList;
}
public void setActivationList(List<Activation> activationList) {
this.activationList = activationList;
}
public static class Activation {
private String creationTime;
private Integer deregisteredCount;
private Integer instanceCount;
private String description;
private Integer registeredCount;
private String instanceName;
private Boolean disabled;
private String ipAddressRange;
private Long timeToLiveInHours;
private String activationId;
public String getCreationTime() {
return this.creationTime;
}
public void setCreationTime(String creationTime) {
this.creationTime = creationTime;
}
public Integer getDeregisteredCount() {
return this.deregisteredCount;
}
public void setDeregisteredCount(Integer deregisteredCount) {
this.deregisteredCount = deregisteredCount;
}
public Integer getInstanceCount() {
return this.instanceCount;
}
public void setInstanceCount(Integer instanceCount) {
this.instanceCount = instanceCount;
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getRegisteredCount() {
return this.registeredCount;
}
public void setRegisteredCount(Integer registeredCount) {
this.registeredCount = registeredCount;
}
public String getInstanceName() {
return this.instanceName;
}
public void setInstanceName(String instanceName) {
this.instanceName = instanceName;
}
public Boolean getDisabled() {
return this.disabled;
}
public void setDisabled(Boolean disabled) {
this.disabled = disabled;
}
public String getIpAddressRange() {
return this.ipAddressRange;
}
public void setIpAddressRange(String ipAddressRange) {
this.ipAddressRange = ipAddressRange;
}
public Long getTimeToLiveInHours() {
return this.timeToLiveInHours;
}
public void setTimeToLiveInHours(Long timeToLiveInHours) {
this.timeToLiveInHours = timeToLiveInHours;
}
public String getActivationId() {
return this.activationId;
}
public void setActivationId(String activationId) {
this.activationId = activationId;
}
}
@Override
public DescribeActivationsResponse getInstance(UnmarshallerContext context) {
return DescribeActivationsResponseUnmarshaller.unmarshall(this, context);
}
@Override
public boolean checkShowJsonItemName() {
return false;
}
}
| 22.52356 | 84 | 0.725941 |
fb8024daddbf63ec37a3c3261d34767948bb0246 | 1,281 | c | C | src/common/ealloc.c | kalyanam-FMTGA/ray-original | f4f57896015c1c29ca571069b007c409d74824e0 | [
"BSD-3-Clause-LBNL"
] | 154 | 2015-01-27T15:02:36.000Z | 2022-01-06T18:14:18.000Z | src/common/ealloc.c | kalyanam-FMTGA/ray-original | f4f57896015c1c29ca571069b007c409d74824e0 | [
"BSD-3-Clause-LBNL"
] | 35 | 2015-05-11T21:41:31.000Z | 2021-12-17T13:23:57.000Z | src/common/ealloc.c | kalyanam-FMTGA/ray-original | f4f57896015c1c29ca571069b007c409d74824e0 | [
"BSD-3-Clause-LBNL"
] | 64 | 2015-01-21T00:52:40.000Z | 2022-02-07T12:15:09.000Z | #ifndef lint
static const char RCSid[] = "$Id: ealloc.c,v 2.9 2004/03/28 20:33:12 schorsch Exp $";
#endif
/*
* ealloc.c - memory routines which call quit on error.
*/
#include "copyright.h"
#include <stdio.h>
#include <stdlib.h>
#include "rterror.h"
#include "rtmisc.h"
extern void * /* return pointer to n uninitialized bytes */
emalloc(size_t n)
{
register void *cp;
if (n == 0)
return(NULL);
if ((cp = malloc(n)) != NULL)
return(cp);
eputs("Out of memory in emalloc\n");
quit(1);
return NULL; /* pro forma return */
}
extern void * /* return pointer to initialized memory */
ecalloc(register size_t ne, size_t es)
{
register char *cp;
ne *= es;
if (ne == 0)
return(NULL);
if ((cp = malloc(ne)) == NULL) {
eputs("Out of memory in ecalloc\n");
quit(1);
}
cp += ne;
while (ne--)
*--cp = 0;
return(cp);
}
extern void * /* reallocate cp to size n */
erealloc(register void *cp, size_t n)
{
if (n == 0) {
if (cp != NULL)
free(cp);
return(NULL);
}
if (cp == NULL)
cp = malloc(n);
else
cp = realloc(cp, n);
if (cp != NULL)
return(cp);
eputs("Out of memory in erealloc\n");
quit(1);
return NULL; /* pro forma return */
}
extern void /* free memory allocated by above */
efree(void *cp)
{
free(cp);
}
| 15.621951 | 85 | 0.600312 |
5b119af4edd33adef10ccb31e71a5aaf66b1e893 | 5,817 | ps1 | PowerShell | Learning Modules/Utilities/Demo-LoadGenerator.ps1 | JeffBarnard/WingtipTicketsSaaS-MultiTenantDB | fa21ad4f0afcc2d4c44899d3fbe05acbccb0f255 | [
"MIT"
] | 46 | 2019-05-15T15:53:41.000Z | 2022-03-27T18:04:33.000Z | Learning Modules/Utilities/Demo-LoadGenerator.ps1 | JeffBarnard/WingtipTicketsSaaS-MultiTenantDB | fa21ad4f0afcc2d4c44899d3fbe05acbccb0f255 | [
"MIT"
] | 3 | 2018-01-08T01:19:15.000Z | 2018-12-05T02:36:11.000Z | Learning Modules/Utilities/Demo-LoadGenerator.ps1 | JeffBarnard/WingtipTicketsSaaS-MultiTenantDB | fa21ad4f0afcc2d4c44899d3fbe05acbccb0f255 | [
"MIT"
] | 28 | 2019-05-28T19:20:22.000Z | 2022-02-08T05:17:00.000Z | # Invokes load generation on the tenants currently defined in the catalog.
# Duration of the load generation session. Some activity may continue after this time.
$DurationMinutes = 120
# For the Single Tenant burst scenario (scenario 4), this specifies the tenant to be overloaded.
$SingleTenantName = "Contoso Concert Hall"
# If true, generator will run once. If false will keep looking for additional tenants and apply load to them
$OneTime = $true
$Scenario = 1
<# Select the scenario to run
Scenario
1 Start a normal intensity load (approx 30 DTU)
2 Start a load with longer bursts per tenant
3 Start a load with higher DTU bursts per tenant (approx 70 DTU)
4 Start a high intensity load (approx 90 DTU) on a single tenant plus a normal intensity load on all other tenants
#>
## ------------------------------------------------------------------------------------------------
Import-Module "$PSScriptRoot\..\Common\SubscriptionManagement" -Force
Import-Module "$PSScriptRoot\..\UserConfig" -Force
# Get Azure credentials if not already logged on, Use -Force to select a different subscription
Initialize-Subscription -NoEcho
Save-AzureRmContext -Path $env:temp\AzureContext.json -Force
# Get the resource group and user names used when the Wingtip Tickets application was deployed from UserConfig.psm1.
$wtpUser = Get-UserConfig
### Default state - enter a valid demo scenaro
if ($Scenario -eq 0)
{
Write-Output "Please modify this script to select a scenario to run."
exit
}
### Generate normal intensity load
if ($Scenario -eq 1)
{
# First, stop and remove any prior running jobs
Write-Output "`nClose any previously opened PowerShell load generation sessions before launching another on the same tenants."
Write-Output "Closing a session can take a minute or more... "
Read-Host "`nPress ENTER to continue"
# Intensity of load, roughly approximates to average DTU loading on the tenants
$Intensity = 30
# start a new set of load generation jobs for the current tenants with the load configuration above
$powershellArgs = `
"-NoExit", `
"-File ""$($PSScriptRoot)\LoadGenerator2.ps1""",`
"$($wtpUser.ResourceGroupName)",`
"$($wtpUser.Name)",`
"$Intensity",`
"$DurationMinutes"
Start-Process PowerShell.exe -ArgumentList $powershellArgs
Write-Output "`Load generation session launched."
Write-Output "Close the session before starting another one on the same tenants`n"
exit
}
### Generate load with longer bursts per tenant
if ($Scenario -eq 2)
{
# First, stop and remove any prior running jobs
Write-Output "`nClose any previously opened PowerShell load generation sessions before launching another on the same tenants."
Write-Output "Closing a session can take a minute or more... "
Read-Host "`nPress ENTER to continue"
# Intensity of workload, roughly approximates to DTU
$Intensity = 30
# start a new set of load generation jobs for the current tenants
$powershellArgs = `
"-NoExit", `
"-File ""$($PSScriptRoot)\LoadGenerator2.ps1""",`
"$($wtpUser.ResourceGroupName)",`
"$($wtpUser.Name)",`
"$Intensity",`
"$DurationMinutes",
"-LongerBursts"
Start-Process PowerShell.exe -ArgumentList $powershellArgs
Write-Output "`Load generation session launched."
Write-Output "Close the session before starting another one on the same tenants`n"
exit
}
### Generate load with higher DTU bursts per tenant
if ($Scenario -eq 3)
{
# First, stop and remove any prior running jobs
Write-Output "`nClose any previously opened PowerShell load generation sessions before launching another on the same tenants."
Write-Output "Closing a session can take a minute or more... "
Read-Host "`nPress ENTER to continue"
# Intensity of workload, roughly approximates to DTU
$Intensity = 70
# start a new set of load generation jobs for the current tenants
$powershellArgs = `
"-NoExit", `
"-File ""$($PSScriptRoot)\LoadGenerator2.ps1""",`
"$($wtpUser.ResourceGroupName)",`
"$($wtpUser.Name)",`
"$Intensity",`
"$DurationMinutes",
"-LongerBursts"
Start-Process PowerShell.exe -ArgumentList $powershellArgs
Write-Output "`Load generation session launched."
Write-Output "Close the session before starting another one on the same tenants`n"
exit
}
### Generate a high intensity load (approx 95 DTU) on a single tenant plas a normal intensity load (40 DTU) on all other tenants
if ($Scenario -eq 4)
<#{
Write-Output "Not implemented yet"
exit
}
#>
{
# First, stop and remove any prior running jobs
Write-Output "`nClose any previously opened PowerShell load generation sessions before launching another on the same tenants."
Write-Output "Closing a session can take a minute or more... "
Read-Host "`nPress ENTER to continue"
# Intensity of workload, roughly approximates to DTU
$Intensity = 30
# start a new set of load generation jobs for the current tenants
$powershellArgs = `
"-NoExit", `
"-File ""$($PSScriptRoot)\LoadGenerator2.ps1""",`
"$($wtpUser.ResourceGroupName)",`
"$($wtpUser.Name)",`
"$Intensity",`
"$DurationMinutes", `
"-SingleTenant", `
"-SingleTenantName ""$SingleTenantName"""
Start-Process PowerShell.exe -ArgumentList $powershellArgs
Write-Output "`Load generation session launched."
Write-Output "Close the session before starting another one on the same tenants`n"
exit
}
Write-Output "Invalid scenario selected" | 35.907407 | 130 | 0.672684 |
ddb6727acba24e3cfd9abec2166f877d9fc1bc54 | 992 | php | PHP | app/Http/Controllers/AuthenticateController.php | danyalSh/employeeSystem | d890abbd3adf74e705057b9543aa29dedbd7be18 | [
"MIT"
] | null | null | null | app/Http/Controllers/AuthenticateController.php | danyalSh/employeeSystem | d890abbd3adf74e705057b9543aa29dedbd7be18 | [
"MIT"
] | null | null | null | app/Http/Controllers/AuthenticateController.php | danyalSh/employeeSystem | d890abbd3adf74e705057b9543aa29dedbd7be18 | [
"MIT"
] | null | null | null | <?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Http\Requests\LoginUserRequest;
use Illuminate\Support\Facades\Auth;
class AuthenticateController extends Controller
{
//
public function login(){
return view('auth.login');
}
public function loginUser(LoginUserRequest $request){
$login = Auth::attempt(['username' => $request['username'], 'password' => $request['password']]);
if($login){
if(Auth::user()->is_active == 1){
return redirect('/');
} else {
Auth::logout();
return redirect()->back()->with('failure', 'It seems your account is not active. Contact Administrator to active your account!');
}
} else{
return redirect()->back()->with('failure', 'Invalid Username or Password');
}
}
public function logout(){
Auth::logout();
return redirect('/login');
}
}
| 26.810811 | 145 | 0.590726 |
9305a8a0b496db517b22f503987a98f495d15be4 | 11,499 | rs | Rust | crates/client/src/vulkan.rs | wosim-net/wosim | 84b5ea77c304471a68f4345b04a2799e61de6412 | [
"Apache-2.0",
"MIT"
] | 2 | 2021-09-24T16:13:33.000Z | 2021-09-26T02:51:50.000Z | crates/client/src/vulkan.rs | wosim-net/wosim | 84b5ea77c304471a68f4345b04a2799e61de6412 | [
"Apache-2.0",
"MIT"
] | null | null | null | crates/client/src/vulkan.rs | wosim-net/wosim | 84b5ea77c304471a68f4345b04a2799e61de6412 | [
"Apache-2.0",
"MIT"
] | 1 | 2021-09-26T04:51:23.000Z | 2021-09-26T04:51:23.000Z | use std::{
cmp::{Ordering, Reverse},
sync::Arc,
};
use crate::renderer::RenderConfiguration;
use eyre::eyre;
use vulkan::{
cmp_device_types, contains_extension, ApiLevel, ColorSpaceKHR, Device, DeviceConfiguration,
Extent2D, Format, FormatFeatureFlags, ImageTiling, KhrPortabilitySubsetFn,
KhrShaderFloat16Int8Fn, KhrTimelineSemaphoreFn, PhysicalDevice, PhysicalDeviceFeatures,
PhysicalDeviceHandle, PresentModeKHR, QueueFlags, Surface, SurfaceFormatKHR, Swapchain,
SwapchainConfiguration, VkResult, FALSE, TRUE,
};
use winit::window::Window;
pub struct DeviceCandidate {
physical_device: PhysicalDevice,
device_configuration: DeviceConfiguration,
render_configuration: RenderConfiguration,
}
impl DeviceCandidate {
pub fn new(physical_device: PhysicalDevice, surface: &Surface) -> VkResult<Option<Self>> {
if choose_surface_format(surface, physical_device.handle())?.is_none()
|| choose_present_mode(surface, physical_device.handle(), false)?.is_none()
{
return Ok(None);
};
let properties = physical_device.properties();
let extensions = physical_device.extensions();
let features = physical_device.features();
let mut enabled_features = PhysicalDeviceFeatures::default();
if !contains_extension(extensions, Swapchain::extension_name()) {
return Ok(None);
}
let mut extension_names = vec![Swapchain::extension_name()];
#[cfg(not(target_os = "macos"))]
if physical_device.api_level() >= ApiLevel::Vulkan12
&& features.vulkan_12.vulkan_memory_model == TRUE
&& features.vulkan_12.vulkan_memory_model_device_scope == TRUE
{
enabled_features.vulkan_12.vulkan_memory_model = TRUE;
enabled_features.vulkan_12.vulkan_memory_model_device_scope = TRUE;
} else {
return Ok(None);
}
match physical_device.api_level() {
ApiLevel::Vulkan11 => {
if features.shader_draw_parameters.shader_draw_parameters == TRUE
&& features.float16_int8.shader_int8 == TRUE
&& features.timeline_semaphore.timeline_semaphore == TRUE
{
enabled_features
.shader_draw_parameters
.shader_draw_parameters = TRUE;
enabled_features.float16_int8.shader_int8 = TRUE;
enabled_features.timeline_semaphore.timeline_semaphore = TRUE;
extension_names.push(KhrShaderFloat16Int8Fn::name());
extension_names.push(KhrTimelineSemaphoreFn::name());
} else {
return Ok(None);
}
}
ApiLevel::Vulkan12 => {
if features.vulkan_11.shader_draw_parameters == TRUE
&& features.vulkan_12.shader_int8 == TRUE
&& features.vulkan_12.timeline_semaphore == TRUE
{
enabled_features.vulkan_11.shader_draw_parameters = TRUE;
enabled_features.vulkan_12.shader_int8 = TRUE;
enabled_features.vulkan_12.timeline_semaphore = TRUE;
} else {
return Ok(None);
}
}
}
let use_draw_count = if features.vulkan_12.draw_indirect_count == TRUE {
enabled_features.vulkan_12.draw_indirect_count = TRUE;
TRUE
} else {
FALSE
};
if features.vulkan_10.features.tessellation_shader == FALSE
|| features.vulkan_10.features.multi_draw_indirect == FALSE
{
return Ok(None);
}
enabled_features.vulkan_10.features.tessellation_shader = TRUE;
enabled_features.vulkan_10.features.multi_draw_indirect = TRUE;
if contains_extension(extensions, KhrPortabilitySubsetFn::name()) {
if ![1, 2, 4, 5, 10, 20].contains(
&properties
.portability_subset
.min_vertex_input_binding_stride_alignment,
) {
return Ok(None);
}
if features.portability_subset.image_view_format_swizzle == FALSE {
return Ok(None);
}
enabled_features
.portability_subset
.image_view_format_swizzle = TRUE;
extension_names.push(KhrPortabilitySubsetFn::name());
}
let families = physical_device.queue_families();
let main_queue_family_index = match families
.iter()
.enumerate()
.map(|(index, properties)| (index as u32, properties))
.find(|(index, properties)| {
match physical_device.surface_support(surface, *index) {
Ok(support) => {
if !support {
return false;
}
}
Err(_) => return false,
}
if !properties.queue_flags.contains(QueueFlags::GRAPHICS) {
return false;
}
properties.queue_flags.contains(QueueFlags::COMPUTE)
})
.map(|(index, _)| index as u32)
{
Some(index) => index,
None => return Ok(None),
};
let transfer_queue_family_index = families
.iter()
.enumerate()
.map(|(index, properties)| (index as u32, properties))
.find(|(_, properties)| {
properties.queue_flags.contains(QueueFlags::TRANSFER)
&& !properties.queue_flags.contains(QueueFlags::GRAPHICS)
&& !properties.queue_flags.contains(QueueFlags::COMPUTE)
})
.map(|(index, _)| index as u32);
let device_configuration = DeviceConfiguration {
extension_names,
features: enabled_features,
main_queue_family_index,
transfer_queue_family_index,
};
let depth_format = if let Some(format) = find_supported_format(
&physical_device,
&[
Format::D24_UNORM_S8_UINT,
Format::D32_SFLOAT,
Format::D32_SFLOAT_S8_UINT,
Format::D16_UNORM,
],
ImageTiling::OPTIMAL,
FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT,
) {
format
} else {
return Ok(None);
};
let depth_pyramid_format = if let Some(format) = find_supported_format(
&physical_device,
&[Format::R32_SFLOAT],
ImageTiling::OPTIMAL,
FormatFeatureFlags::SAMPLED_IMAGE
| FormatFeatureFlags::STORAGE_IMAGE
| FormatFeatureFlags::TRANSFER_DST,
) {
format
} else {
return Ok(None);
};
if properties
.vulkan_10
.properties
.limits
.timestamp_compute_and_graphics
== FALSE
{
return Ok(None);
}
let timestamp_period =
properties.vulkan_10.properties.limits.timestamp_period as f64 / 1000000.0;
let render_configuration = RenderConfiguration {
depth_format,
depth_pyramid_format,
timestamp_period,
use_draw_count,
};
Ok(Some(Self {
physical_device,
device_configuration,
render_configuration,
}))
}
pub fn create(self) -> Result<(Device, RenderConfiguration), vulkan::Error> {
Ok((
self.physical_device.create(self.device_configuration)?,
self.render_configuration,
))
}
}
impl PartialEq for DeviceCandidate {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for DeviceCandidate {}
impl PartialOrd for DeviceCandidate {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for DeviceCandidate {
fn cmp(&self, other: &Self) -> Ordering {
cmp_device_types(
self.physical_device
.properties()
.vulkan_10
.properties
.device_type,
other
.physical_device
.properties()
.vulkan_10
.properties
.device_type,
)
}
}
fn present_mode_priority(present_mode: PresentModeKHR, disable_vsync: bool) -> usize {
if present_mode == PresentModeKHR::IMMEDIATE {
if disable_vsync {
4
} else {
0
}
} else if present_mode == PresentModeKHR::MAILBOX {
3
} else if present_mode == PresentModeKHR::FIFO {
2
} else {
1
}
}
fn surface_format_priority(surface_format: SurfaceFormatKHR) -> usize {
if surface_format.format == Format::B8G8R8A8_SRGB
&& surface_format.color_space == ColorSpaceKHR::SRGB_NONLINEAR
{
1
} else {
0
}
}
pub fn choose_surface_format(
surface: &Surface,
physical_device_handle: PhysicalDeviceHandle,
) -> VkResult<Option<SurfaceFormatKHR>> {
Ok(surface
.formats(physical_device_handle)?
.into_iter()
.min_by_key(|surface_format| Reverse(surface_format_priority(*surface_format))))
}
pub fn choose_present_mode(
surface: &Surface,
physical_device_handle: PhysicalDeviceHandle,
disable_vsync: bool,
) -> VkResult<Option<PresentModeKHR>> {
Ok(surface
.present_modes(physical_device_handle)?
.into_iter()
.min_by_key(|present_mode| Reverse(present_mode_priority(*present_mode, disable_vsync))))
}
fn find_supported_format(
physical_device: &PhysicalDevice,
formats: &[Format],
tiling: ImageTiling,
required_features: FormatFeatureFlags,
) -> Option<Format> {
for format in formats {
let properties = physical_device.format_properties(*format);
let available_features = if tiling == ImageTiling::LINEAR {
properties.linear_tiling_features
} else if tiling == ImageTiling::OPTIMAL {
properties.optimal_tiling_features
} else {
FormatFeatureFlags::empty()
};
if available_features.contains(required_features) {
return Some(*format);
}
}
None
}
pub fn create_swapchain(
device: &Arc<Device>,
surface: &Surface,
window: &Window,
disable_vsync: bool,
previous: Option<&Swapchain>,
) -> eyre::Result<Swapchain> {
let extent = window.inner_size();
let extent = Extent2D {
width: extent.width,
height: extent.height,
};
let surface_format = choose_surface_format(surface, device.physical_device_handle())?
.ok_or_else(|| eyre!("could not find suitable surface format"))?;
let present_mode =
choose_present_mode(surface, device.physical_device_handle(), disable_vsync)?
.ok_or_else(|| eyre!("could not find suitable present mode"))?;
let configuration = SwapchainConfiguration {
surface,
previous,
present_mode,
surface_format,
extent,
};
Ok(device.create_swapchain(configuration)?)
}
| 34.531532 | 97 | 0.581964 |
83d04331a5a7782173cc96b030b5ad5891d50a02 | 9,684 | go | Go | qjson/engine.go | qjson/qjson-go | 170c47e2db4633ef7b8cdc63ef43d47b803f4e95 | [
"BSD-3-Clause"
] | null | null | null | qjson/engine.go | qjson/qjson-go | 170c47e2db4633ef7b8cdc63ef43d47b803f4e95 | [
"BSD-3-Clause"
] | null | null | null | qjson/engine.go | qjson/qjson-go | 170c47e2db4633ef7b8cdc63ef43d47b803f4e95 | [
"BSD-3-Clause"
] | null | null | null | package qjson
import (
"bytes"
"fmt"
"strconv"
)
// Version returns the version of the code and the supported
// syntax (e.g. "qjson-go: v0.1.1 syntax: v0.0.0").
func Version() string {
return "qjson-go: v0.0.0 syntax: v0.0.0"
}
// Decode accept QJSON text as input and return a JSON text or return an error.
func Decode(input []byte) ([]byte, error) {
if input == nil {
return []byte("{}"), nil
}
var e engine
e.init(input)
e.members()
if e.token().tag == tagCloseBrace {
e.setError(ErrUnexpectedCloseBrace)
}
t := e.token()
if t.tag == tagError && t.val.(error) != ErrEndOfInput {
return nil, fmt.Errorf("%w at line %d col %d", t.val.(error), t.pos.l+1, column(e.in[t.pos.s:t.pos.b])+1)
}
return e.out.Bytes(), nil
}
var maxDepth = 200
// engine to convert QJSON to JSON.
type engine struct {
tokenizer
depth int
out bytes.Buffer
}
func (e *engine) init(input []byte) {
e.tokenizer.init(input)
e.out.Reset()
e.depth = 0
e.nextToken()
}
func (e *engine) done() bool {
return e.tk.tag == tagError
}
func (e *engine) setError(err error) {
e.setErrorAndPos(err, e.tk.pos)
}
func (e *engine) setErrorAndPos(err error, p pos) {
e.tk = token{tag: tagError, pos: p, val: err}
}
// value process a value. If an error occurred it returns with the error set,
// otherwise calls nextToken() and return its result.
func (e *engine) value() bool {
switch e.tk.tag {
case tagCloseSquare:
e.setError(ErrUnexpectedCloseSquare)
return false
case tagCloseBrace:
e.setError(ErrUnexpectedCloseBrace)
return false
case tagDoubleQuotedString:
e.outputDoubleQuotedString()
case tagSingleQuotedString:
e.outputSingleQuotedString()
case tagMultilineString:
e.outputMultilineString()
case tagQuotelessString:
val := e.tk.val.([]byte)
if str := isLiteralValue(val); str != "" {
e.out.WriteString(str)
} else if isNumberExpr(val) {
res, pos, err := evalNumberExpression(val)
if err != nil {
p := e.tk.pos
p.b += pos
e.setErrorAndPos(err, p)
return true
}
e.out.WriteString(strconv.FormatFloat(res, 'g', 16, 64))
} else {
e.outputQuotelessString()
}
case tagOpenBrace:
startPos := e.tk.pos
e.nextToken()
if e.done() {
if e.tk.val.(error) == ErrEndOfInput {
e.setErrorAndPos(ErrUnclosedObject, startPos)
}
return true
}
if e.depth == maxDepth {
e.setError(ErrMaxObjectArrayDepth)
return true
}
e.depth++
if e.members() {
if e.tk.val.(error) == ErrEndOfInput {
e.setErrorAndPos(ErrUnclosedObject, startPos)
}
return true
}
e.depth--
case tagOpenSquare:
e.nextToken()
if e.done() {
if e.tk.val.(error) == ErrEndOfInput {
e.setError(ErrUnclosedArray)
}
return true
}
startPos := e.tk.pos
if e.depth == maxDepth {
e.setError(ErrMaxObjectArrayDepth)
return true
}
e.depth++
if e.values() {
if e.tk.val.(error) == ErrEndOfInput {
e.setErrorAndPos(ErrUnclosedArray, startPos)
}
return true
}
e.depth--
default:
e.setError(ErrSyntaxError)
// e.setError(fmt.Errorf("expected value, got %v", e.tk))
return false
}
e.nextToken()
return e.done()
}
// values process 0 or more values and pops the ending ]. Return done().
func (e *engine) values() bool {
var notFirst bool
e.out.WriteByte('[')
for !e.done() && e.tk.tag != tagCloseSquare {
if notFirst {
e.out.WriteByte(',')
if e.tk.tag == tagComma {
e.nextToken()
if e.done() {
if e.tk.val.(error) == ErrEndOfInput {
e.setError(ErrExpectValueAfterComma)
}
break
}
if e.tk.tag == tagCloseBrace || e.tk.tag == tagCloseSquare {
e.setError(ErrExpectValueAfterComma)
break
}
}
} else {
notFirst = true
}
if e.value() {
break
}
}
e.out.WriteByte(']')
return e.done()
}
func (e *engine) member() bool {
switch e.tk.tag {
case tagCloseSquare:
e.setError(ErrUnexpectedCloseSquare)
return false
case tagDoubleQuotedString:
e.outputDoubleQuotedString()
case tagSingleQuotedString:
e.outputSingleQuotedString()
case tagQuotelessString:
e.outputQuotelessString()
default:
e.setError(ErrExpectStringIdentifier)
}
e.nextToken()
if e.done() {
if e.tk.val.(error) == ErrEndOfInput {
e.setError(ErrUnexpectedEndOfInput)
}
return true
}
if e.tk.tag != tagColon {
e.setError(ErrExpectColon)
return true
}
e.out.WriteByte(':')
e.nextToken()
if e.done() {
if e.tk.val.(error) == ErrEndOfInput {
e.setError(ErrUnexpectedEndOfInput)
}
return true
}
return e.value()
}
// values process 0 or more members (identifiers : value) and pops the ending }. Return done().
func (e *engine) members() bool {
var notFirst bool
e.out.WriteByte('{')
for !e.done() && e.tk.tag != tagCloseBrace {
if notFirst {
e.out.WriteByte(',')
if e.tk.tag == tagComma {
e.nextToken()
if e.done() {
if e.tk.val.(error) == ErrEndOfInput {
e.setError(ErrExpectIdentifierAfterComma)
}
break
}
if e.tk.tag == tagCloseBrace || e.tk.tag == tagCloseSquare {
e.setError(ErrExpectIdentifierAfterComma)
break
}
}
} else {
notFirst = true
}
if e.member() {
break
}
}
e.out.WriteByte('}')
return e.done()
}
// isNullValue return true when p is equal to "null", "Null", "NULL".
func isLiteralValue(p []byte) string {
switch len(p) {
case 5:
if (p[0] == 'f' || p[0] == 'F') &&
((p[1] == 'a' && p[2] == 'l' && p[3] == 's' && p[4] == 'e') ||
(p[1] == 'A' && p[2] == 'L' && p[3] == 'S' && p[4] == 'E')) {
return "false"
}
case 4:
if (p[0] == 'n' || p[0] == 'N') &&
((p[1] == 'u' && p[2] == 'l' && p[3] == 'l') || (p[1] == 'U' && p[2] == 'L' && p[3] == 'L')) {
return "null"
}
if (p[0] == 't' || p[0] == 'T') &&
((p[1] == 'r' && p[2] == 'u' && p[3] == 'e') || (p[1] == 'R' && p[2] == 'U' && p[3] == 'E')) {
return "true"
}
case 3:
if (p[0] == 'y' || p[0] == 'Y') &&
((p[1] == 'e' && p[2] == 's') || (p[1] == 'E' && p[2] == 'S')) {
return "true"
}
if (p[0] == 'o' || p[0] == 'O') &&
((p[1] == 'f' && p[2] == 'f') || (p[1] == 'F' && p[2] == 'F')) {
return "false"
}
case 2:
if (p[0] == 'o' || p[0] == 'O') && (p[1] == 'n' || p[1] == 'N') {
return "true"
}
if (p[0] == 'n' || p[0] == 'N') && (p[1] == 'o' || p[1] == 'O') {
return "false"
}
}
return ""
}
func (e *engine) outputDoubleQuotedString() {
str := e.tk.val.([]byte)
e.out.WriteByte('"')
for i := 1; i < len(str)-1; i++ {
switch str[i] {
case '/':
if str[i-1] == '<' {
e.out.WriteByte('\\')
}
case '\t':
e.out.WriteByte('\\')
e.out.WriteByte('t')
continue
case '\\':
c := str[i+1]
if c != 't' && c != 'n' && c != 'r' && c != 'f' && c != 'b' && c != '/' && c != '\\' && c != '"' &&
!(c == 'u' && len(str) >= i+6 && isHexDigit(str[i+2]) && isHexDigit(str[i+3]) && isHexDigit(str[i+5]) && isHexDigit(str[i+5])) {
p := e.tk.pos
p.b += i
e.setErrorAndPos(ErrInvalidEscapeSequence, p)
return
}
}
e.out.WriteByte(str[i])
}
e.out.WriteByte('"')
}
func (e *engine) outputSingleQuotedString() {
str := e.tk.val.([]byte)
e.out.WriteByte('"')
for i := 1; i < len(str)-1; i++ {
switch str[i] {
case '/':
if str[i-1] == '<' {
e.out.WriteByte('\\')
}
case '\t':
e.out.WriteByte('\\')
e.out.WriteByte('t')
continue
case '\\':
c := str[i+1]
if c != 't' && c != 'n' && c != 'r' && c != 'f' && c != 'b' && c != '/' && c != '\\' && c != '\'' &&
!(c == 'u' && len(str) >= i+6 && isHexDigit(str[i+2]) && isHexDigit(str[i+3]) && isHexDigit(str[i+5]) && isHexDigit(str[i+5])) {
p := e.tk.pos
p.b += i
e.setErrorAndPos(ErrInvalidEscapeSequence, p)
return
}
if c == '\'' {
continue
}
case '"':
e.out.WriteByte('\\')
}
e.out.WriteByte(str[i])
}
e.out.WriteByte('"')
}
func (e *engine) outputQuotelessString() {
str := e.tk.val.([]byte)
e.out.WriteByte('"')
for i := 0; i < len(str); i++ {
switch str[i] {
case '"':
e.out.WriteByte('\\')
case '\t':
e.out.WriteByte('\\')
e.out.WriteByte('t')
continue
case '/':
if i > 0 && str[i-1] == '<' {
e.out.WriteByte('\\')
}
case '\\':
e.out.WriteByte('\\')
}
e.out.WriteByte(str[i])
}
e.out.WriteByte('"')
}
func (e *engine) outputMultilineString() {
str := e.tk.val.([]byte)
var p int
for str[p] != '`' {
p++
}
margin := str[:p]
str = str[p+1:]
for n := whitespace(str); n > 0; n = whitespace(str) {
str = str[n:]
}
str = str[1:] // skip \
var nl []byte
if str[0] == 'n' {
nl = []byte("\\n")
str = str[1:]
} else {
nl = []byte("\\r\\n")
str = str[3:]
}
for str[0] != '\n' {
str = str[1:]
}
// skip \n with margin of first line, and drop closing `
str = str[1+len(margin) : len(str)-1]
e.out.WriteByte('"')
for len(str) > 0 {
if n := newline(str); n != 0 {
e.out.Write(nl)
str = str[n+len(margin):]
} else if str[0] < 0x20 {
switch str[0] {
case '\b':
e.out.WriteString("\\b")
case '\t':
e.out.WriteString("\\t")
case '\r':
e.out.WriteString("\\r")
case '\f':
e.out.WriteString("\\f")
default:
e.out.WriteString(fmt.Sprintf("\\u00%0X", str[0]))
}
str = str[1:]
} else if str[0] == '<' {
e.out.WriteByte('<')
if len(str) > 1 && str[1] == '/' {
e.out.WriteByte('\\')
}
str = str[1:]
} else if str[0] == '"' {
e.out.WriteByte('\\')
e.out.WriteByte('"')
str = str[1:]
} else if str[0] == '`' && len(str) > 1 && str[1] == '\\' {
e.out.WriteByte('`')
str = str[2:]
} else if str[0] == '\\' {
e.out.WriteByte('\\')
e.out.WriteByte('\\')
str = str[1:]
} else {
e.out.WriteByte(str[0])
str = str[1:]
}
}
e.out.WriteByte('"')
}
| 22.416667 | 132 | 0.549773 |
dfc4a329ce5034e7732ba2ff7f8f13fef58af9f0 | 1,367 | ts | TypeScript | training01_udemy/sec.03/src/js/001.ts | y-uchiida/Angular_handson | 4e7d0669eac8cf081ec2f8acb5e5c3319e40b9e8 | [
"MIT"
] | null | null | null | training01_udemy/sec.03/src/js/001.ts | y-uchiida/Angular_handson | 4e7d0669eac8cf081ec2f8acb5e5c3319e40b9e8 | [
"MIT"
] | null | null | null | training01_udemy/sec.03/src/js/001.ts | y-uchiida/Angular_handson | 4e7d0669eac8cf081ec2f8acb5e5c3319e40b9e8 | [
"MIT"
] | null | null | null | /* 001_変数と型
* 変数宣言の際にコロンに続けて型名を指定できる
* let {変数名}: {型名} = {値}
* 宣言時に指定した方以外のデータ型を代入しようとするとエラーになる
*/
export module exec {
/* 文字列型: string */
let str: string = "hello world"
console.log(`typeof str is: ${typeof str}`) // -> string
/* string にnumber を代入しようとするとエラーが出る:
* Type 'number' is not assignable to type 'string'.
*/
// str = 100
/* 数値型: number */
let num: number = 42
console.log(`typeof num is: ${typeof num}`) // -> number
/* 真偽値型: boolean */
let tf: boolean = true
console.log(`typeof tf is: ${typeof tf}`) // -> boolean
/* 配列は、{型名}[]で宣言する */
let arr: number[] = [1, 2, 3] /* number 型のみ受け入れる配列 */
console.log(`typeof arr is: ${typeof arr}`) // -> object
/* 複数の型を受け取るためには、型を| で区切って宣言する */
let arr_2: (number|string)[] = ["1", 2, `3`, 4.0]
console.log(`tyoeof arr_2 is: ${typeof arr_2}`) // -> object
/* タプル: データの指定の要素番号に何が入るかを指定できる */
let tup: [number, string, boolean] = [1, 'user_001', true];
console.log(`typeof tup is: ${typeof tup}`) // -> object
/* 要素が足りなかったり、異なるデータ型が代入されるとエラー
* Type '[]' is not assignable to type '[number, string, boolean]'.
* Source has 0 element(s) but target requires 3
*/
// tup = [];
/* オブジェクト型: key, value それぞれに型を指定できる */
let obj: {[key: string]: string} = {
id: "2",
name: "user_002",
is_adminer: "false",
};
console.log(`typeof obj is: ${typeof tup}`) // -> object
} | 26.803922 | 70 | 0.613021 |
28478e0f2aed512a3bd7f040803389d7ffd629b2 | 947 | rb | Ruby | lib/hydra/roles/role_defs.rb | duke-libraries/hydra-roles | d03d0dfbeeadc504de9e064e338ee45c2e08628c | [
"MIT"
] | 1 | 2015-10-30T04:12:26.000Z | 2015-10-30T04:12:26.000Z | lib/hydra/roles/role_defs.rb | duke-libraries/hydra-roles | d03d0dfbeeadc504de9e064e338ee45c2e08628c | [
"MIT"
] | null | null | null | lib/hydra/roles/role_defs.rb | duke-libraries/hydra-roles | d03d0dfbeeadc504de9e064e338ee45c2e08628c | [
"MIT"
] | null | null | null | require 'singleton'
module Hydra
module Roles
class RoleDefs
include Singleton
attr_reader :role_defs
def initialize
@role_defs = {}
end
def define_role role, &block
add_role_def RoleDefBuilder.build(role, &block)
end
def freeze!
role_defs.freeze
freeze
end
def add_role_def role_def
raise "Role \"#{role_def.name}\" already defined." if role_defs.include?(role_def.name)
role_defs[role_def.name] = role_def
end
def role_names
role_defs.keys
end
def by_permissions
permissions = {}
role_defs.each do |role, role_def|
role_def.permissions.each { |p| (permissions[p] ||= []) << role }
end
permissions
end
def method_missing(method, *args)
return role_defs[method] if role_defs.include?(method)
super
end
end
end
end
| 19.326531 | 95 | 0.592397 |
d6c59bc7fb3cc1e6557a1e55d8b922911e26671b | 1,508 | swift | Swift | example/music_app/View/Common/MediaItemListView.swift | tsuruken0802/MusicPlayer | 8df7df2a1d0dd2bb2c82f4805f0e6677dae2ca90 | [
"Unlicense"
] | null | null | null | example/music_app/View/Common/MediaItemListView.swift | tsuruken0802/MusicPlayer | 8df7df2a1d0dd2bb2c82f4805f0e6677dae2ca90 | [
"Unlicense"
] | 1 | 2022-03-10T08:43:59.000Z | 2022-03-10T08:43:59.000Z | example/music_app/View/Common/MediaItemListView.swift | TsurumotoKentarou/MusicPlayer | 6aea82ac67e31c1a9a6fd3984de4218e8c8ec254 | [
"Unlicense"
] | null | null | null | //
// MediaItemListView.swift
// music_app
//
// Created by 鶴本賢太朗 on 2022/03/06.
//
import SwiftUI
import MediaPlayer
enum MediaListType {
case artwork
case number
}
struct MediaItemListView: View {
let items: [MPSongItem]
let listType: MediaListType
let onTap: (_ item: MPSongItem, _ index: Int) -> Void
@ViewBuilder
private func itemView(index: Int) -> some View {
if listType == .number {
MediaNumberItemView(number: index+1, title: items[index].title ?? "") {
onTap(items[index], index)
}
}
else if listType == .artwork {
MediaThumbnailItemView(thumbnailImage: items[index].item.image(size: MediaThumbnailItemView.imageSize), title: items[index].title ?? "") {
onTap(items[index], index)
}
}
else {
EmptyView()
}
}
var body: some View {
List {
ForEach(items.indices, id: \.self) { (index) in
/// VStackでラップしないとList表示が最適化されず
/// 全てのViewを一気に全て読み込んでしまう
VStack {
itemView(index: index)
}
}
}
.listStyle(PlainListStyle())
.navigationBarTitleDisplayMode(.inline)
}
}
struct MediaItemListView_Previews: PreviewProvider {
static var previews: some View {
MediaItemListView(items: [], listType: .artwork, onTap: { _,_ in
})
}
}
| 24.322581 | 150 | 0.546419 |
4008519b13a3a680ac054ed8d9b843ed748be384 | 1,705 | py | Python | pre-process-inference.py | safoinme/Mnist-MLFlow-dvc-Argo | 811a1a940dbb3e1650a6e05ef273dd61e4ac68c5 | [
"MIT"
] | 1 | 2022-02-12T13:24:55.000Z | 2022-02-12T13:24:55.000Z | pre-process-inference.py | safoinme/mnist-dvc | 811a1a940dbb3e1650a6e05ef273dd61e4ac68c5 | [
"MIT"
] | null | null | null | pre-process-inference.py | safoinme/mnist-dvc | 811a1a940dbb3e1650a6e05ef273dd61e4ac68c5 | [
"MIT"
] | 1 | 2022-02-28T00:47:04.000Z | 2022-02-28T00:47:04.000Z | import numpy as np
from PIL import ImageOps
from PIL import Image
import json
import requests
import os
def process_image(image):
# Resize the image to MNIST size 28 x 28 pixels
image = image.resize((28, 28))
# Make the image grayscale
image = image.convert('L')
# If the image looks like it's dark-on-white, invert it first
inverted = image.getpixel((0, 0)) > 192
if inverted:
print('Inverting image')
image = ImageOps.invert(image)
# Transform the image into a NumPy array
image_data = np.array(image)
# Normalize the pixel values from 0-255 to 0.0-1.0
image_data = image_data / 255.0
return image_data.reshape((1, 28, 28)), inverted
if __name__ == '__main__':
image, inverted = process_image(Image.open("five.jpeg"))
#with open('mnist-input.json', 'w') as f:
# json.dump({"instances": image.tolist()},f)
data = json.dumps({"signature_name": "serving_default", "instances": image.tolist()})
headers = {"content-type": "application/json","Host":"mnist-example-s3.kserve-deployement.example.com".format(os.environ["SERVICE_HOSTNAME"])}
json_response = requests.post("http://{}:{}/v1/models/mnist-example-s3:predict".format(os.environ["INGRESS_HOST"],os.environ["INGRESS_PORT"]), data=data, headers=headers)
predictions = json.loads(json_response.text)['predictions']
print(predictions)
print(np.argmax(predictions[0]))
#SERVICE_HOSTNAME=$(kubectl get inferenceservice mnist-example-s3 -n kserve-deployement -o jsonpath='{.status.url}' | cut -d "/" -f 3)
#curl -v -H "Host: ${SERVICE_HOSTNAME}" http://${INGRESS_HOST}:${INGRESS_PORT}/v1/models/mnist-example-s3:predict -d @./mnist-input.json
| 39.651163 | 174 | 0.690909 |
d7404c51114f72c0fe30025438fef13b63e8b1f6 | 2,527 | swift | Swift | GEDebugKit/Debugging.swift | grigorye/GEDebugKit | c68b9817d121dea78a8ecad9de38f5f674b3c12f | [
"MIT"
] | null | null | null | GEDebugKit/Debugging.swift | grigorye/GEDebugKit | c68b9817d121dea78a8ecad9de38f5f674b3c12f | [
"MIT"
] | null | null | null | GEDebugKit/Debugging.swift | grigorye/GEDebugKit | c68b9817d121dea78a8ecad9de38f5f674b3c12f | [
"MIT"
] | null | null | null | //
// Debugging.swift
// RSSReader
//
// Created by Grigory Entin on 01.12.2017.
// Copyright © 2017 Grigory Entin. All rights reserved.
//
import FBAllocationTracker
import FBMemoryProfiler
import FPSCounter
#if LOGGY_ENABLED
import Loggy
#endif
import Foundation
private let debugError = NSError(domain: "com.grigorye.debug", code: 1)
public func forceDebugCrash() {
fatalError()
}
public func triggerDebugError() {
trackError(debugError)
}
private var retainedObjects: [AnyObject] = []
private func initializeAllocationTracking() {
guard let allocationTrackerManager = x$(FBAllocationTrackerManager.shared()) else {
return
}
allocationTrackerManager.startTrackingAllocations()
allocationTrackerManager.enableGenerations()
}
public func configureAllocationTracking() {
if _0 {
if defaults.memoryProfilerEnabled {
let memoryProfiler = FBMemoryProfiler()
memoryProfiler.enable()
retainedObjects += [memoryProfiler]
}
}
else {
var memoryProfiler: FBMemoryProfiler!
retainedObjects += [
defaults.observe(\.memoryProfilerEnabled, options: .initial) { (_, _) in
if defaults.memoryProfilerEnabled {
guard nil == memoryProfiler else {
return
}
memoryProfiler = FBMemoryProfiler()
memoryProfiler.enable()
}
else {
guard nil != memoryProfiler else {
return
}
memoryProfiler.disable()
memoryProfiler = nil
}
}
]
}
}
let fpsMonitor = FPSMonitor()
func configureFPSMonitoring() {
retainedObjects += [
defaults.observe(\.FPSMonitoringEnabled, options: .initial) { (_, _) in
if defaults.FPSMonitoringEnabled {
fpsMonitor.show()
} else {
fpsMonitor.hide()
}
}
]
}
public func configureDebug() {
configureAllocationTracking()
configureFPSMonitoring()
configureShakeGesture()
}
public func initializeDebug() {
if defaults.allocationTrackingEnabled {
#if LOGGY_ENABLED
Activity.label("Initializing Allocation Tracking") {
initializeAllocationTracking()
}
#else
initializeAllocationTracking()
#endif
}
}
| 23.398148 | 87 | 0.586466 |
d1323763ef0c9ccbd8bcea4a4738624394b4406d | 54 | rs | Rust | build/classes/visao/jasperreports-6.15.0/src/net/sf/jasperreports/extensions/ListExtensionRegistry.rs | EuKaique/Projeto-Diagnostico-Medico | cf7cc535ff31992b7568dba777c8faafafa6920c | [
"MIT"
] | null | null | null | build/classes/visao/jasperreports-6.15.0/src/net/sf/jasperreports/extensions/ListExtensionRegistry.rs | EuKaique/Projeto-Diagnostico-Medico | cf7cc535ff31992b7568dba777c8faafafa6920c | [
"MIT"
] | null | null | null | build/classes/visao/jasperreports-6.15.0/src/net/sf/jasperreports/extensions/ListExtensionRegistry.rs | EuKaique/Projeto-Diagnostico-Medico | cf7cc535ff31992b7568dba777c8faafafa6920c | [
"MIT"
] | null | null | null | net.sf.jasperreports.extensions.ListExtensionRegistry
| 27 | 53 | 0.907407 |
045f0321a9554099112e86b392f033cf226e79d5 | 652 | java | Java | transcoder-status/src/main/java/es/videotranscoding/transcoder/status/dto/TranscodeMediaDTO.java | MasterCloudApps-Projects/ElasticTranscoder | c8ca85fb33595783e3caa403fb9caabb6d1d7cf8 | [
"Apache-2.0"
] | 2 | 2020-09-30T15:50:58.000Z | 2020-09-30T17:25:24.000Z | transcoder-status/src/main/java/es/videotranscoding/transcoder/status/dto/TranscodeMediaDTO.java | MasterCloudApps-Projects/ElasticTranscoder | c8ca85fb33595783e3caa403fb9caabb6d1d7cf8 | [
"Apache-2.0"
] | null | null | null | transcoder-status/src/main/java/es/videotranscoding/transcoder/status/dto/TranscodeMediaDTO.java | MasterCloudApps-Projects/ElasticTranscoder | c8ca85fb33595783e3caa403fb9caabb6d1d7cf8 | [
"Apache-2.0"
] | 2 | 2020-09-29T15:01:21.000Z | 2021-07-17T14:00:33.000Z | package es.videotranscoding.transcoder.status.dto;
import lombok.*;
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Data
public class TranscodeMediaDTO {
private String id;
private String flatMediaId;
private String type;
private String name;
private String path;
private String audioCodec;
private String videoCodec;
private String container;
private String preset;
private String resolution;
private double processed;
private String command;
private Boolean active;
private Boolean error;
private String user;
private String filesize;
private String bitrate;
}
| 14.173913 | 50 | 0.719325 |