text
stringlengths 1
1.05M
|
|---|
<gh_stars>0
const Util = require('../../util/MitUtil.js');
const Discord = require('discord.js');
module.exports = {
name: 'invite',
description: 'Envia una invitacion del bot UwU',
aliases: ['link'],
usage: '',
cooldown: 2,
args: 0,
catergory: 'Utilidad',
async execute(client, message, args) {
message.channel.send(`${message.author} mande un link de invitación a tus mensajes directos owo!`);
message.delete();
const creator = '824760675586932766'
const embed = new Discord.MessageEmbed()
.setAuthor(client.user.username, client.user.displayAvatarURL({ format: 'png', dynamic: true, size: 1024 }), 'https://github.com/KitsuneCode')
.setDescription('**[💖 Añademe dando click aqui!!](https://discord.com/oauth2/authorize?client_id=831865259357896755&permissions=8&scope=bot%20applications.commands)**\nAl invitarla a tu servidor me ayudas a seguir mejorando mi proyecto y seguir actualizandola UwU')
.setColor('RANDOM')
.setImage('https://kitsunityx.glitch.me/api/sfw/wag/wag1.gif')
.setFooter('Hecho con 💖 por KitsuneCode#5011, nyah nwn')
return message.author.send(embed);
}
};
|
int[] array = {10, 4, 8, 3, 7, 6, 2, 9, 1, 5};
public void sortAscending(int[] array) {
for (int i = 0; i < array.length - 1; i++) {
int minIndex = i;
for (int j = i + 1; j < array.length; j++) {
if (array[j] < array[minIndex])
minIndex = j;
}
if (minIndex != i) {
int temp = array[i];
array[i] = array[minIndex];
array[minIndex] = temp;
}
}
}
sortAscending(array);
|
#
# Copyright (C) 2011 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
add_lunch_combo xenonhd_nobleltetmo-userdebug
|
${PYTHON} -m pip install ./conda-store -vv --no-deps
|
<gh_stars>0
App = {
web3Provider: null,
contracts: {},
init: async function() {
$.getJSON('../books.json', function(data) {
var booksRow = $('#booksRow');
var bookTemplate = $('#bookTemplate');
for (i = 0; i < data.length; i ++) {
bookTemplate.find('.panel-title').text(data[i].name);
bookTemplate.find('img').attr('src', data[i].picture);
bookTemplate.find('.book-author').text(data[i].author);
bookTemplate.find('.book-rating').text(data[i].rating);
bookTemplate.find('.btn-borrow').attr('data-id', data[i].id);
booksRow.append(bookTemplate.html());
}
});
return await App.initWeb3();
},
initWeb3: async function() {
// Modern dapp browsers...
if (window.ethereum) {
App.web3Provider = window.ethereum;
try {
// Request account access
await window.ethereum.enable();
} catch (error) {
// User denied account access...
console.error("User denied account access")
}
}
// Legacy dapp browsers...
else if (window.web3) {
App.web3Provider = window.web3.currentProvider;
}
// If no injected web3 instance is detected, fall back to Ganache
else {
App.web3Provider = new Web3.providers.HttpProvider('http://localhost:8545');
}
web3 = new Web3(App.web3Provider);
return App.initContract();
},
initContract: function() {
$.getJSON('Borrow.json', function(data) {
var BorrowArtifact = data;
App.contracts.Borrow = TruffleContract(BorrowArtifact);
App.contracts.Borrow.setProvider(App.web3Provider);
return App.markBorrowed();
});
return App.bindEvents();
},
bindEvents: function() {
$(document).on('click', '.btn-borrow', App.handleBorrow);
},
markBorrowed: function(borrowers, account) {
var borrowInstance;
App.contracts.Borrow.deployed().then(function(instance) {
borrowInstance = instance;
return borrowInstance.getBorrowers.call();
}).then(function(borrowers) {
console.log(borrowers);
for (i = 0; i < borrowers.length; i++) {
if (borrowers[i] !== '0x0000000000000000000000000000000000000000') {
$('.panel-book').eq(i).find('button').text('Success').attr('disabled', true);
}
}
}).catch(function(err) {
console.log(err.message);
});
},
handleBorrow: function(event) {
event.preventDefault();
var bookId = parseInt($(event.target).data('id'));
var borrowInstance;
web3.eth.getAccounts(function(error, accounts) {
if (error) {
console.log(error);
}
var account = accounts[0];
App.contracts.Borrow.deployed().then(function(instance) {
borrowInstance = instance;
//return borrowInstance.borrowBook(bookId, {from: account});
return borrowInstance.borrowBook(bookId, {from: account});
}).then(function(result) {
return App.markBorrowed();
}).catch(function(err) {
console.log(err.message);
});
});
}
};
$(function() {
$(window).load(function() {
App.init();
});
});
|
#!/bin/bash
# For use inside docker shell (github.com/mzedeler/dsh)
echo "check_certificate = off" >> ~/.wgetrc
wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.31.0/install.sh | bash
. /root/.nvm/nvm.sh
nvm install v5.6.0
nvm use v5.6.0
npm install && npm run forever
|
DROP TABLE IF EXISTS `students`;
CREATE TABLE `students` (
`id` int(9) unsigned NOT NULL AUTO_INCREMENT,
`first_name` varchar(100) NOT NULL,
`last_name` varchar(100) NOT NULL,
`city` varchar(255) NOT NULL,
`phone` varchar(255) NOT NULL,
`gender` varchar(255) NOT NULL,
`email` varchar(255) NOT NULL,
`address` varchar(255) NOT NULL,
`postcode` int(11) NOT NULL,
`date_of_birth` date NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=31 DEFAULT CHARSET=utf8;
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (1, 'Ailn', 'Rathmouth', 'Rathmouth', '05144461974', 'Female', '<EMAIL>', '97228 Emmalee Harbors Suite 421 South Emmet, TX 54950', 23031, '2001-12-16');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (2, 'Hounson', 'Port Lolamouth', 'Port Lolamouth', '1-136-366-9496', 'Female', '<EMAIL>', '62654 Hirthe Lodge Port Zeldafurt, DE 87270', 27108, '1977-01-21');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (3, 'Tison', 'Lavernastad', 'Lavernastad', '157-283-0337x872', 'Female', '<EMAIL>', '9107 Blanda Plains Apt. 476 North Burdettechester, NM 91601', 76631, '1984-03-26');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (4, 'Surmeyers', 'Ethelville', 'Ethelville', '552.496.5910', 'Male', '<EMAIL>', '0997 Gleason Rue Apt. 149 East Gretaland, GA 13633-6343', 37965, '2005-03-17');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (5, 'Bob', 'Schulistland', 'Schulistland', '895-877-0076x197', 'Male', '<EMAIL>', '39405 Nicolas Walk Apt. 041 Kozeychester, AL 20566-8063', 23031, '2019-07-15');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (6, 'Holdey', 'Kennithside', 'Kennithside', '(055)403-3761', 'Female', '<EMAIL>', '747 Lucienne Shoal Suite 395 Runolfsdottirberg, NV 65296-7656', 23031, '1971-05-10');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (7, 'Blewmen', 'Oberbrunnerchester', 'Oberbrunnerchester', '(598)918-4548x480', 'Male', '<EMAIL>', '34720 Randi Roads Apt. 947 Kossmouth, WV 43552-7336', 24772, '2011-03-26');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (8, 'Vanacci', 'Marcoport', 'Marcoport', '013-440-6362', 'Female', '<EMAIL>', '044 Gaylord Corner Apt. 486 Larsonchester, MA 59370', 16, '1973-05-17');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (9, 'Marflitt', 'New Adalineton', 'New Adalineton', '1-113-016-8153x30326', 'Male', '<EMAIL>', '1670 Bogisich Lane Apt. 874 Port Malvina, CT 60714', 89650, '2009-03-23');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (10, 'Pietesch', 'East Kayla', 'East Kayla', '(177)500-7249', 'Female', '<EMAIL>', '651 Mallory Centers Hoppefort, PA 46020', 45934, '2002-01-06');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (11, 'Henrique', 'Kevinmouth', 'Kevinmouth', '1-241-311-9984', 'Male', '<EMAIL>', '31480 Oscar Wells Kassulkeborough, DC 35274-5250', 7820, '2002-10-24');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (12, 'Tynan', 'North Deondreland', 'North Deondreland', '(674)474-7300', 'Male', '<EMAIL>', '2729 Lucienne Roads Apt. 317 Theodorafurt, SD 21614-2447', 32292, '1980-12-21');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (13, 'Pinkard', 'Port Adrianaborough', 'Port Adrianaborough', '02115446108', 'Female', '<EMAIL>', '73620 Carmela Corners Apt. 609 New Litzy, DE 72732-8030', 55848, '1981-10-08');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (14, 'Haslock', 'South Hunter', 'South Hunter', '09398525252', 'Male', '<EMAIL>', '65792 Celine Coves Lempibury, MT 60747', 29257, '1985-04-25');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (15, 'Rickell', 'East Breanne', 'East Breanne', '692.772.5134x95174', 'Female', '<EMAIL>', '51665 Hermina Islands Apt. 724 East Nasirfort, CT 57320-2649', 49701, '1971-01-10');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (16, 'Bob', 'Port Thoraland', 'Port Thoraland', '1-628-108-7615', 'Male', '<EMAIL>', '9256 Price Summit Garrickland, KY 23867', 23031, '2016-02-05');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (17, 'Boxhill', 'West Jedediahville', 'West Jedediahville', '(725)577-0459', 'Male', '<EMAIL>', '557 Leo Alley Suite 273 Considinestad, AL 94813', 80323, '1995-04-30');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (18, 'Leeke', 'Franeckiland', 'Franeckiland', '1-889-468-2992x930', 'Female', '<EMAIL>', '1080 Orn Brook Heidenreichberg, GA 90248', 39741, '1970-01-03');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (19, 'Whale', 'Lake Clare', 'Lake Clare', '513-793-1124x98433', 'Male', '<EMAIL>', '605 Rosa Mills Suite 999 West Clarkburgh, MO 74959-5620', 24982, '1978-06-23');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (20, 'Ori', 'Beattyburgh', 'Beattyburgh', '08253021064', 'Male', '<EMAIL>', '6164 Spencer Meadow Apt. 689 Baumbachtown, PA 23843-5497', 7864, '1982-11-15');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (21, 'Tagg', 'Oberbrunnerport', 'Oberbrunnerport', '1-725-956-1107x13861', 'Male', '<EMAIL>', '572 Cyril Parkways Apt. 479 Murazikchester, KY 73127', 36356, '1993-11-18');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (22, 'Costerd', 'Ricofort', 'Ricofort', '096-776-9198', 'Male', '<EMAIL>', '96564 Cooper Corner Apt. 352 Port Floy, OR 86049', 69978, '2003-07-26');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (23, 'Corrin', 'East Graycefurt', 'East Graycefurt', '(583)403-4746', 'Male', '<EMAIL>', '353 Israel Streets Jedediahport, GA 99481', 49614, '1988-02-29');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (24, 'Bunford', 'West Joeport', 'West Joeport', '(245)726-8274x48974', 'Female', '<EMAIL>', '9755 Kshlerin Brooks East Roger, ND 23843-8553', 16353, '1979-11-25');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (25, 'Lumley', 'Oceanestad', 'Oceanestad', '06965723793', 'Female', '<EMAIL>', '9877 Kaia Village New D\'angelomouth, KS 82353-9742', 70534, '1985-06-15');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (26, 'Whiles', 'Creolashire', 'Creolashire', '800.294.1751x13357', 'Male', '<EMAIL>', '229 Derrick Village Gayview, OR 63688-9938', 58634, '1990-09-30');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (27, 'Presdee', 'North Ernestinaton', 'North Ernestinaton', '556-111-2276x003', 'Female', '<EMAIL>', '4353 Bayer Lights East Bentonville, GA 61468-6552', 90902, '1985-11-01');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (28, 'Bedberry', 'Jakaylaland', 'Jakaylaland', '1-078-468-7156', 'Female', '<EMAIL>', '51526 Stamm Garden Apt. 560 Hahnview, CA 29074-8976', 10160, '1998-09-04');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (29, 'Danilchev', 'North Esta', 'North Esta', '1-244-938-3948', 'Female', '<EMAIL>', '34989 Kuphal Inlet Suite 190 Gutkowskiville, IA 69417', 40221, '1992-10-04');
INSERT INTO `students` (`id`, `first_name`, `last_name`, `city`, `phone`, `gender`, `email`, `address`, `postcode`, `date_of_birth`) VALUES (30, 'Whaplington', 'West Breanabury', 'West Breanabury', '1-045-399-1032x67023', 'Female', '<EMAIL>', '488 Martine Villages Bernadettetown, AR 39587-0766', 23031, '1998-04-05');
DROP TABLE IF EXISTS `marks`;
CREATE TABLE `marks` (
`id` int(9) unsigned NOT NULL AUTO_INCREMENT,
`student_id` int(9) unsigned NOT NULL,
`mark` int(9) unsigned NOT NULL,
`subject` varchar(255) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=41 DEFAULT CHARSET=utf8;
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (1, 3, 23, 'Magic Survival');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (2, 4, 56, 'Planetary Geography');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (3, 9, 77, 'Foreign Evolutionary Biology');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (4, 6, 83, 'Intergallactic Relations');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (5, 9, 45, 'Grand Strategy');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (6, 7, 76, 'Foreign History');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (7, 1, 98, 'Alien Dance History');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (8, 7, 87, 'Foreign Social Skills');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (9, 8, 65, 'Alien Social Skills');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (10, 4, 76, 'Magic Music');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (11, 8, 76, 'Alien Genealogy');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (12, 4, 89, 'Magic Rituals');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (13, 1, 69, 'Planetary Ecology');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (14, 7, 79, 'Military Law');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (15, 3, 57, 'Foreign Ethics');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (16, 4, 56, 'Foreign Instrumental Music');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (17, 8, 59, 'Foreign Services');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (18, 4, 91, 'Alien Economics');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (19, 1, 91, 'Alien Ethics');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (20, 9, 23, 'Magic Arts');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (21, 6, 34, 'Alien Social Studies');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (22, 7, 54, 'Foreign Political Sciences');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (23, 8, 56, 'Terraforming');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (24, 4, 76, 'Transmutation');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (25, 1, 98, 'Space Travel');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (26, 5, 76, 'Alien Medicine');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (27, 3, 98, 'Foreign Statistics');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (28, 4, 100, 'Necromancy');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (29, 6, 00, 'Magic Music');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (30, 2, 34, 'Planetary History');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (31, 6, 58, 'Herbalism');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (32, 4, 34, 'Dimensional Manipulation');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (33, 4, 67, 'Nutrition Recognition');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (34, 3, 56, 'Foreign Pathology');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (35, 6, 88, 'Foreign Arts');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (36, 7, 80, 'Alien Bioengineering');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (37, 6, 81, 'Alien Physiology');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (38, 2, 71, 'Mathematics');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (39, 1, 72, 'Foreign Arts');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (40, 3, 84, 'Galactic History');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (41, 31, 84, 'Galactic History');
INSERT INTO `marks` (`id`, `student_id`, `mark`, `subject`) VALUES (42, 50, 103, 'Calculus 3');
|
import pandas as pd
import math
import random
from collections import defaultdict
from opendp.smartnoise.sql.parse import QueryParser
from opendp.smartnoise._ast.ast import Table
from ._mechanisms.rand import laplace
sys_rand = random.SystemRandom()
def preprocess_df_from_query(schema, df, query_string):
"""
Returns a dataframe with user_id | tuple based on query grouping keys.
"""
qp = QueryParser(schema)
q = qp.query(query_string)
queries = qp.queries(query_string)
query_ast = queries[0]
group_cols = [ge.expression.name for ge in query_ast.agg.groupingExpressions]
table_name = q.source.find_node(Table).name
key_col = schema[table_name].key_cols()[0].name
preprocessed_df = pd.DataFrame()
preprocessed_df[key_col] = df[key_col]
preprocessed_df["group_cols"] = tuple(df[group_cols].values.tolist())
return preprocessed_df
def policy_laplace(df, eps, delta, max_contrib):
"""
Differentially Private Set Union: https://arxiv.org/abs/2002.09745
Given a database of n users, each with a subset of items,
(eps, delta)-differentially private algorithm that outputs the largest possible set of the
the union of these items.
Parameters
----------
df: pandas df with user_id | item where item is a tuple
max_contrib: maximum number of items a user can contribute
epsilon/delta: privacy parameters
"""
alpha = 3.0
lambd = 1 / eps
rho = [
1 / i + lambd * math.log(1 / (2 * (1 - (1 - delta) ** (1 / i))))
for i in range(1, max_contrib + 1)
]
rho = max(rho)
gamma = rho + alpha * lambd
histogram = defaultdict(float)
key_col = df.columns[0]
df["hash"] = df[key_col].apply(lambda x: hash(str(x)))
df = df.sort_values("hash")
for idx, group in df.groupby(key_col):
items = list(group["group_cols"])
if len(items) > max_contrib:
items = sys_rand.sample(items, max_contrib)
cost_dict = {}
for item in items:
if histogram[item] < gamma:
cost_dict[item] = gamma - histogram[item]
budget = 1
k = len(cost_dict)
sorted_items = [k for k, v in sorted(cost_dict.items(), key=lambda item: item[1])]
for idx, curr_item in enumerate(sorted_items):
cost = (
cost_dict[curr_item] * k
) # cost of increasing weights of remaining k items by cost_dict[curr_item]
if cost <= budget:
# update weights of remaining k items with cost_dict[curr_item]
for j in range(idx, k):
remaining_item = sorted_items[j]
histogram[remaining_item] += cost_dict[curr_item]
cost_dict[remaining_item] -= cost_dict[curr_item]
budget -= cost
k -= 1
else:
# update weights of remaining k items with budget / k
for j in range(idx, k):
remaining_item = sorted_items[j]
histogram[remaining_item] += budget / k
break
items = []
for item in histogram.keys():
histogram[item] += laplace(0, lambd, 1)[0]
if histogram[item] > rho:
items.append(item)
df = df[df["group_cols"].isin(items)]
return df
def run_dpsu(schema, input_df, query, eps, delta=math.exp(-10), max_contrib=5):
preprocessed_df = preprocess_df_from_query(schema, input_df, query)
dpsu_df = policy_laplace(preprocessed_df, eps, delta, max_contrib)
output_df = pd.merge(input_df, dpsu_df, on=dpsu_df.columns[0])
output_df.drop(["group_cols", "hash"], axis=1, inplace=True)
output_df.drop_duplicates(inplace=True)
return output_df
|
def format_headers(headers):
formatted_headers = ""
for key, value in headers.items():
formatted_headers += f"{key}: {value}\n"
return formatted_headers.strip()
# Test the function with the given example
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0",
"Accept-Encoding": "gzip, deflate",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"DNT": "1",
"Connection": "close",
"Upgrade-Insecure-Requests": "1"
}
print(format_headers(headers))
|
import { TravelPerk } from '@services';
import { IStore } from '@store';
import { ILogger, toShortDateFormat } from '@utils';
import { IInvoice, IInvoiceLine, IManager, ITaxesSummaryItem } from './contracts';
export class Manager implements IManager {
constructor(
private readonly client: TravelPerk.IClient,
private readonly store: IStore,
private readonly accountId: string,
// @ts-ignore
private readonly logger: ILogger,
) { }
async getLastInvoicesSync(): Promise<Date | undefined> {
const lastSyncDate = await this.store.getLastSyncDate(this.accountId);
return lastSyncDate;
}
async updateLastInvoicesSync(syncedAt: Date): Promise<void> {
await this.store.updateLastSyncDate(this.accountId, syncedAt);
}
async getPaidInvoices(after?: Date): Promise<IInvoice[] | undefined> {
const newPaidInvoices = await this.client.invoices.getInvoices(
{
status: TravelPerk.InvoiceStatus.Paid,
issuing_date_gte: after ? toShortDateFormat(after) : undefined,
}
);
return newPaidInvoices?.map(mapToInvoice);
}
async getInvoiceLines(serialNumber: string): Promise<IInvoiceLine[] | undefined> {
const items = await this.client.invoices.getInvoiceLineItems(serialNumber);
return items?.map(mapToInvoiceLineItem);
}
async getInvoiceDocument(serialNumber: string): Promise<ArrayBuffer> {
return this.client.invoices.getInvoiceDocument(serialNumber);
}
}
function mapToInvoice({
serial_number,
status, profile_id,
profile_name,
currency,
total,
due_date,
issuing_date,
taxes_summary,
}: TravelPerk.IInvoice): IInvoice {
return {
serialNumber: serial_number,
status,
profileId: profile_id,
profileName: profile_name,
currency,
total: Number(total),
dueDate: due_date,
issuingDate: issuing_date,
taxesSummary: taxes_summary.map(mapToTaxesSummaryItem),
};
}
function mapToInvoiceLineItem(lineItem: TravelPerk.IInvoiceLine): IInvoiceLine {
return {
description: lineItem.description,
taxAmount: lineItem.tax_amount,
taxPercentage: lineItem.tax_percentage,
totalAmount: lineItem.total_amount,
expenseDate: lineItem.expense_date,
};
}
function mapToTaxesSummaryItem(summaryItem: TravelPerk.ITaxesSummaryItem): ITaxesSummaryItem {
return {
taxAmount: Number(summaryItem.tax_amount),
};
}
|
#ifndef LINKED_LIST_C
#define LINKED_LIST_C
#include "linkedList.h"
local Node* linkedList_initNode(void*);
inline ERROR_CODE linkedList_init(LinkedList* list) {
memset(list, 0, sizeof(*list));
return ERROR(ERROR_NO_ERROR);
}
inline ERROR_CODE linkedList_add(LinkedList* list, void* data) {
Node* node = linkedList_initNode(data);
if(LINKED_LIST_IS_EMPTY(list)) {
list->tail = node;
}else{
node->next = list->tail;
list->tail = node;
}
list->length++;
return ERROR(ERROR_NO_ERROR);
}
inline Node* linkedList_initNode(void* data){
Node* node = malloc(sizeof(*node));
if(node == NULL){
UTIL_LOG_ERROR(util_toErrorString(ERROR_OUT_OF_MEMORY));
}
node->data = data;
node->next = NULL;
return node;
}
inline void linkedList_initIterator(LinkedListIterator* it, LinkedList* list) {
it->node = list->tail;
}
inline Node* linkedList_iteratorNextNode(LinkedListIterator* it){
Node* node = it->node;
it->node = node->next;
return node;
}
inline void linkedList_free(LinkedList* list){
LinkedListIterator it;
linkedList_initIterator(&it, list);
while(LINKED_LIST_ITERATOR_HAS_NEXT(&it)){
free(linkedList_iteratorNextNode(&it));
}
}
inline ERROR_CODE linkedList_remove(LinkedList* list, void* data){
/* This is Linus Torvalds double Pointer based approach to removing nodes in LinkedLists, which compared to
other approachs (like the one shown below) removes a branch from the code.
*/
Node** node;
for(node = &list->tail; *node;){
Node* current = *node;
if(current->data == data){
*node = current->next;
list->length--;
free(current);
return ERROR(ERROR_NO_ERROR);
}else{
node = ¤t->next;
}
}
return ERROR(ERROR_FAILED_TO_REMOVE_NODE);
/*
LinkedListIterator it;
linkedList_initIterator(&it, list);
Node* prev = NULL;
while(LINKED_LIST_ITERATOR_HAS_NEXT(&it)){
Node* current = linkedList_iteratorNextNode(&it);
if(current->data == data){
// If the searched element is the only one in the list.
if(prev == NULL){
free(list->tail);
list->length--;
list->tail = NULL;
}else{
prev->next = current->next;
list->length--;
free(current);
}
}
prev = current;
}
*/
}
#endif
|
<reponame>Mihran9991/async-forms-back<gh_stars>0
import { Sequelize } from "sequelize-typescript";
import { ModelAttributes, QueryOptionsWithWhere } from "sequelize";
import { Nullable } from "../types/main.types";
export class TableService {
private sequelize: Sequelize;
public constructor(sequelize: Sequelize) {
this.sequelize = sequelize;
}
public getAs<T>(
tableName: string,
attributes: QueryOptionsWithWhere
): Promise<Nullable<T>> {
return this.getManyAs<T>(tableName, attributes).then((list: T[]) =>
list.length ? list[0] : null
);
}
public getManyAs<T>(
tableName: string,
attributes: Nullable<QueryOptionsWithWhere> = null
): Promise<T[]> {
return this.sequelize
.getQueryInterface()
.select(null, tableName, attributes ?? undefined)
.then((obj: object) => obj as T[]);
}
public create(tableName: string, attributes: ModelAttributes): Promise<void> {
return this.sequelize
.getQueryInterface()
.createTable(tableName, attributes);
}
public insert<T>(tableName: string, attributes: object): Promise<T> {
return this.sequelize
.getQueryInterface()
.bulkInsert(tableName, [attributes])
.then((res: unknown) => res as T);
}
}
export default TableService;
|
#!/bin/bash
set -e
dir=$(dirname "${BASH_SOURCE[0]}")
cd $dir
test -f .env && source .env
chmod 777 ../../app/etc ../../media ../../var
docker-compose up -d mysql apache
sleep 4
echo "Starting services..."
for i in $(seq 1 20); do
sleep 1
docker exec openmage_mysql_1 mysql -e 'show databases;' 2>/dev/null | grep -qF 'openmage' && break
done
HOST_PORT=":${HOST_PORT:-80}"
test "$HOST_PORT" = ":80" && HOST_PORT=""
BASE_URL=${BASE_URL:-"http://${HOST_NAME:-openmage-7f000001.nip.io}${HOST_PORT}/"}
ADMIN_EMAIL="${ADMIN_EMAIL:-admin@example.com}"
ADMIN_USERNAME="${ADMIN_USERNAME:-admin}"
ADMIN_PASSWORD="${ADMIN_PASSWORD:-veryl0ngpassw0rd}"
echo "Installing OpenMage LTS..."
docker-compose run --rm cli php install.php \
--license_agreement_accepted yes \
--locale en_US \
--timezone America/New_York \
--default_currency USD \
--db_host mysql \
--db_name openmage \
--db_user openmage \
--db_pass openmage \
--url "$BASE_URL" \
--use_rewrites yes \
--use_secure no \
--secure_base_url "$BASE_URL" \
--use_secure_admin no \
--skip_url_validation \
--admin_firstname OpenMage \
--admin_lastname User \
--admin_email "$ADMIN_EMAIL" \
--admin_username "$ADMIN_USERNAME" \
--admin_password "$ADMIN_PASSWORD"
echo ""
echo "Setup is complete!"
echo "Visit ${BASE_URL}admin and login with '$ADMIN_USERNAME' : '$ADMIN_PASSWORD'"
echo "MySQL server IP: $(docker exec openmage_apache_1 getent hosts mysql | awk '{print $1}')"
|
public static string GenerateRandomString()
{
stringchars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890";
char[] string = new char[8];
Random random = new Random();
for (int i = 0; i < 8; i++)
{
string[i] = stringchars[random.Next(stringchars.Length)];
}
return new string(string);
}
|
<gh_stars>0
package com.example.wbdemo.info.maindata;
import java.io.Serializable;
/**
* Created by zhoujunyu on 2019/5/23.
*/
public class VisibleBean implements Serializable {
/**
* type : 0
* list_id : 0
*/
private int type;
private int list_id;
public int getType() {
return type;
}
public void setType(int type) {
this.type = type;
}
public int getList_id() {
return list_id;
}
public void setList_id(int list_id) {
this.list_id = list_id;
}
}
|
SELECT product_id,
MONTH(created_at) as month_number,
COUNT(*) as total_purchases
FROM purchases
WHERE YEAR(created_at) = YEAR(CURRENT_DATE)
GROUP BY product_id, MONTH(created_at)
ORDER BY month_number, total_purchases DESC;
|
#include <iostream>
#include <boost/archive/text_oarchive.hpp>
#include <boost/archive/text_iarchive.hpp>
#include <boost/serialization/split_member.hpp>
class Point {
private:
int x;
int y;
friend class boost::serialization::access;
template <class Archive>
void save(Archive &ar, const unsigned int version) const {
ar & x;
ar & y;
}
template <class Archive>
void load(Archive &ar, const unsigned int version) {
ar & x;
ar & y;
}
BOOST_SERIALIZATION_SPLIT_MEMBER()
public:
Point() : x(0), y(0) {}
Point(int x, int y) : x(x), y(y) {}
void serialize(std::ostream &os) const {
boost::archive::text_oarchive oa(os);
oa << *this;
}
void deserialize(std::istream &is) {
boost::archive::text_iarchive ia(is);
ia >> *this;
}
int getX() const {
return x;
}
int getY() const {
return y;
}
};
int main() {
// Create a Point object
Point p1(5, 10);
// Serialize the Point object
std::stringstream ss;
p1.serialize(ss);
// Deserialize the Point object
Point p2;
p2.deserialize(ss);
// Output the deserialized Point object's coordinates
std::cout << "Deserialized Point - X: " << p2.getX() << ", Y: " << p2.getY() << std::endl;
return 0;
}
|
brew update
brew install imagemagick
|
#!/bin/bash -xe
# This script is meant to be run within a mock environment, using
# mock_runner.sh or chrooter, from the root of the repository.
get_run_path() {
# if above ram_threshold KBs are available in /dev/shm, run there
local suffix="${1:-lago}"
local ram_threshold=15000000
local avail_shm=$(df --output=avail /dev/shm | sed 1d)
[[ "$avail_shm" -ge "$ram_threshold" ]] && \
mkdir -p "/dev/shm/ost" && \
echo "/dev/shm/ost/deployment-$suffix" || \
echo "$PWD/deployment-$suffix"
}
collect_logs() {
local run_path="$1"
local artifacts_dir="exported-artifacts"
local vms_logs="${artifacts_dir}/vms_logs"
mkdir -p "$vms_logs"
lago \
--workdir "$run_path" \
collect \
--output "$vms_logs" \
|| :
find "$run_path" \
-name lago.log \
-exec cp {} "$artifacts_dir" \;
cp ansible.log "$artifacts_dir"
}
cleanup() {
set +e
local run_path="$1"
collect_logs "$run_path"
lago --workdir "$run_path" destroy --yes \
|| force_cleanup
}
force_cleanup() {
echo "Cleaning with libvirt"
local domains=($( \
virsh -c qemu:///system list --all --name \
| egrep -w "lago-master[0-9]*|lago-node[0-9]*"
))
local nets=($( \
virsh -c qemu:///system net-list --all \
| egrep -w "[[:alnum:]]{4}-.*" \
| egrep -v "vdsm-ovirtmgmt" \
| awk '{print $1;}' \
))
for domain in "${domains[@]}"; do
virsh -c qemu:///system destroy "$domain"
done
for net in "${nets[@]}"; do
virsh -c qemu:///system net-destroy "$net"
done
echo "Cleaning with libvirt Done"
}
set_params() {
# needed to run lago inside chroot
# TO-DO: use libvirt backend instead
export LIBGUESTFS_BACKEND=direct
# uncomment the next lines for extra verbose output
#export LIBGUESTFS_DEBUG=1 LIBGUESTFS_TRACE=1
# ensure /dev/kvm exists, otherwise it will still use
# direct backend, but without KVM(much slower).
if [[ ! -c "/dev/kvm" ]]; then
mknod /dev/kvm c 10 232
fi
}
install_requirements() {
ansible-galaxy install -r requirements.yml
}
main() {
# cluster_type: Openshift or Kubernetes
# mode:
# release - install kubevirt with kubevirt.yaml,
# and fetch kubevirt's containers from docker hub
#
# dev - install kubevirt with the dev manifests, and
# build kubevirt's containers on the vms
local cluster_type="${CLUSTER_TYPE:-openshift}"
local mode="${MODE:-release}"
local provider="${PROVIDER:-lago}"
local run_path="$(get_run_path "$cluster_type")"
local args=("prefix=$run_path")
local inventory_file="$(realpath inventory)"
trap "cleanup $run_path" EXIT
set_params
install_requirements
if [[ "$cluster_type" == "openshift" ]]; then
[[ -e openshift-ansible ]] || \
git clone -b release-3.7 https://github.com/openshift/openshift-ansible
args+=(
"openshift_ansible_dir=$(realpath openshift-ansible)"
"cluster_type=openshift"
)
elif [[ "$cluster_type" == "kubernetes" ]]; then
args+=("cluster_type=kubernetes")
else
echo "$cluster_type unkown cluster type"
exit 1
fi
args+=(
"mode=$mode"
"provider=$provider"
"inventory_file=$inventory_file"
)
ansible-playbook \
-u root \
-i "$inventory_file" \
-v \
-e "${args[*]}" \
control.yml
}
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
main "$@"
fi
|
#!/usr/bin/env bash
main() {
local cilium_started
cilium_started=false
for ((i = 0 ; i < 24; i++)); do
if cilium status --brief > /dev/null 2>&1; then
cilium_started=true
break
fi
sleep 5s
echo "Waiting for Cilium daemon to come up..."
done
if [ "$cilium_started" = true ] ; then
echo 'Cilium successfully started!'
else
>&2 echo 'Timeout waiting for Cilium to start...'
journalctl -u cilium.service --since $(systemctl show -p ActiveEnterTimestamp cilium.service | awk '{print $2 $3}')
>&2 echo 'Cilium failed to start'
exit 1
fi
}
main "$@"
|
import java.io.*;
public class Test {
public static void main(String[] args) throws IOException {
new File("foo").createNewFile();
new File("foo").delete(); // Don't flag: there's usually nothing to do
new File("foo").mkdir();
new File("foo").mkdirs(); // Don't flag: the return value is uninformative/misleading
new File("foo").renameTo(new File("bar"));
new File("foo").setLastModified(0L);
new File("foo").setReadOnly();
new File("foo").setWritable(true);
}
}
|
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
# "npm run compile-install-script" to compile install.sh
# The command is working on Windows PowerShell and Docker for Windows only.
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
"echo" "-e" "====================="
"echo" "-e" "Uptime Ron Installer"
"echo" "-e" "====================="
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
"echo" "-e" "---------------------------------------"
"echo" "-e" "This script is designed for Linux and basic usage."
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
"echo" "-e" "---------------------------------------"
"echo" "-e" ""
"echo" "-e" "Local - Install Uptime Ron in your current machine with git, Node.js 14 and pm2"
"echo" "-e" "Docker - Install Uptime Ron Docker container"
"echo" "-e" ""
if [ "$1" != "" ]; then
type="$1"
else
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
fi
defaultPort="3001"
function checkNode {
local _0
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
"echo" "-e" "Node Version: ""$nodeVersion"
_0="12"
if [ $(($nodeVersion < $_0)) == 1 ]; then
"echo" "-e" "Error: Required Node.js 14"
"exit" "1"
fi
if [ "$nodeVersion" == "12" ]; then
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
fi
}
function deb {
nodeCheck=$(node -v)
apt --yes update
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
# Old nodejs binary name is "nodejs"
check=$(nodejs --version)
if [ "$check" != "" ]; then
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
exit 1
fi
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
apt --yes install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
apt --yes install nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
apt --yes install git
fi
}
if [ "$type" == "local" ]; then
defaultInstallPath="/opt/uptime-kuma"
if [ -e "/etc/redhat-release" ]; then
os=$("cat" "/etc/redhat-release")
distribution="rhel"
else
if [ -e "/etc/issue" ]; then
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
if [ "$os" == "Ubuntu" ]; then
distribution="ubuntu"
fi
if [ "$os" == "Debian" ]; then
distribution="debian"
fi
fi
fi
arch=$(uname -i)
"echo" "-e" "Your OS: ""$os"
"echo" "-e" "Distribution: ""$distribution"
"echo" "-e" "Arch: ""$arch"
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Listening Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
installPath="$2"
else
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
if [ "$installPath" == "" ]; then
installPath="$defaultInstallPath"
fi
fi
# CentOS
if [ "$distribution" == "rhel" ]; then
nodeCheck=$(node -v)
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
yum -y -q install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
yum install -y -q nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
yum -y -q install git
fi
# Ubuntu
else
if [ "$distribution" == "ubuntu" ]; then
"deb"
# Debian
else
if [ "$distribution" == "debian" ]; then
"deb"
else
# Unknown distribution
error=$((0))
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
exit 1
fi
fi
fi
fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing PM2"
npm install pm2 -g
pm2 startup
fi
mkdir -p $installPath
cd $installPath
git clone https://github.com/louislam/uptime-kuma.git .
npm run setup
pm2 start server/server.js --name uptime-kuma -- --port=$port
else
defaultVolume="uptime-kuma"
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"exit" "1"
fi
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Expose Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
volume="$2"
else
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
if [ "$volume" == "" ]; then
volume="$defaultVolume"
fi
fi
"echo" "-e" "Port: $port"
"echo" "-e" "Volume: $volume"
docker volume create $volume
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
fi
"echo" "-e" "http://localhost:$port"
|
#!/bin/sh
# Build Google Cartographer for ROS from source (using catkin_ws) on Ubuntu 18.04
# TODO fix build failing
sudo apt-get update
sudo apt-get install -y python-wstool python-rosdep ninja-build stow
if [ ! -d "~/catkin_ws" ]
then
mkdir -p ~/catkin_ws/src
cd ~/catkin_ws
catkin_make
sudo rosdep init
fi
cd ~/catkin_ws
wstool init src
wstool merge -t src https://raw.githubusercontent.com/cartographer-project/cartographer_ros/master/cartographer_ros.rosinstall
wstool update -t src
rosdep update
rosdep install --from-paths src --ignore-src --rosdistro=${ROS_DISTRO} -y
source ~/catkin_ws/src/cartographer/scripts/install_abseil.sh
# sudo apt-get remove ros-${ROS_DISTRO}-abseil-cpp
catkin_make_isolated --install --use-ninja
echo 'source ~/catkin_ws/install_isolated/setup.bash' >> ~/.bashrc
|
// By KRT girl xiplus
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int main(){
// ios::sync_with_stdio(false);
// cin.tie(0);
int N,K,L;
cin>>N>>K>>L;
vector<int> left,right;
int t;
for(int q=0;q<N;q++){
cin>>t;
if(t==0);
else if(t<=L/2)left.push_back(t);
else right.push_back(L-t);
}
reverse(right.begin(),right.end());
int lsz=left.size(),rsz=right.size();
long long ans=0;
for(int q=lsz-1;q>=0;q-=K)ans+=left[q]*2;
for(int q=rsz-1;q>=0;q-=K)ans+=right[q]*2;
for(int q=0;q<=K&&lsz-q>=0;q++){
int w=min(K-q,rsz);
long long tans=L;
for(int e=lsz-1-q;e>=0;e-=K)tans+=left[e]*2;
for(int e=rsz-1-w;e>=0;e-=K)tans+=right[e]*2;
ans=min(ans,tans);
}
cout<<ans<<endl;
}
|
/**
* Contains the classes used to migrate old ArcGIS layers to the new one.
*/
package io.opensphere.arcgis2.migration;
|
#!/bin/bash
composer install
PHP_FILES=$(find . -path ./vendor -prune -o -type f -iname "*.php" -print)
echo "--- PHP Syntax"
for PHP_FILE in ${PHP_FILES}; do
php -l ${PHP_FILE}
if [ $? -ne 0 ]; then
exit 1
fi
done
echo "--- PHP Standards"
for PHP_FILE in ${PHP_FILES}; do
BASENAME=$(basename ${PHP_FILE})
if [ "${BASENAME}" != "easybitcoin.php" ]; then
./vendor/bin/phpcs --colors -n ${PHP_FILE}
if [ $? -ne 0 ]; then
exit 1
else
echo "No PEAR standards failures in ${PHP_FILE}"
fi
fi
done
|
#! /usr/bin/env bash
# This file is part of the Hipace++ test suite.
# It runs a Hipace simulation for a can beam, and compares the result
# of the simulation to a benchmark.
# abort on first encounted error
set -eu -o pipefail
# Read input parameters
HIPACE_EXECUTABLE=$1
HIPACE_SOURCE_DIR=$2
HIPACE_EXAMPLE_DIR=${HIPACE_SOURCE_DIR}/examples/blowout_wake
HIPACE_TEST_DIR=${HIPACE_SOURCE_DIR}/tests
FILE_NAME=`basename "$0"`
TEST_NAME="${FILE_NAME%.*}"
# Run the simulation
mpiexec -n 2 $HIPACE_EXECUTABLE $HIPACE_EXAMPLE_DIR/inputs_normalized max_step=2 \
hipace.file_prefix=$TEST_NAME
# Compare the results with checksum benchmark
$HIPACE_TEST_DIR/checksum/checksumAPI.py \
--evaluate \
--file_name $TEST_NAME \
--test-name $TEST_NAME \
--skip "{'beam': 'id'}"
|
<reponame>coding200/quizzapp<gh_stars>0
import {
BaseEntity,
PrimaryGeneratedColumn,
Column,
Entity,
Unique,
OneToMany,
Timestamp,
CreateDateColumn,
UpdateDateColumn,
} from 'typeorm';
import * as bcrypt from 'bcrypt';
// import { Task } from '../../task/Entities/task.entity';
@Entity('users')
@Unique(['username'])
// @Unique(['phone'])
// @Unique(['email'])
export class User extends BaseEntity {
@PrimaryGeneratedColumn('uuid')
id: string;
@Column()
first_name: string;
@Column()
last_name: string;
@Column()
phone: string;
@Column()
email: string;
@Column()
username: string;
@Column()
password: string;
@Column({ default: true })
is_active: boolean;
// @Column()
// salt: string;
// @CreateDateColumn({
// type: 'Timestamp',
// default: () => 'CURRENT_TIMESTAMP(6)',
// })
// created_at: Date;
// @UpdateDateColumn()
// updated_at: Timestamp;
// @Column()
// salt: string;
// @Column({ nullable: true })
// role: UserRole;
// @OneToMany(() => Task, (task) => task.user)
// tasks: Task[];
async validatePassword(password: string): Promise<boolean> {
const match = await bcrypt.compare(password, this.password);
return match == true;
}
}
|
<gh_stars>0
# Asign a string to variable x. Formatter d means a decimal integer
x = "There are %d types of people." % 10
binary = "binary"
do_not = "don't"
# Formatter s means str() of the variables (already stings in this case)
y = "Those who know %s and those who %s." % (binary, do_not)
print x
print y
# Formatter r means repr() of the variable and usually makes Python show quotes
# One exception is the repr(Boolean) which returns True/False without quotes
print "I said: %r." % x
print "I also said: '%s'." % y
hilarious = False
# This variable is a formatted string, but not yet evaluated/finalized!
joke_evaluation = "Isn't that joke funny?! %r"
print joke_evaluation % hilarious
w = "This is the left side of..."
e = "a string with a right side."
print w + e
|
<filename>hub-detect/src/main/groovy/com/blackducksoftware/integration/hub/detect/workflow/DetectConfigurationFactory.java<gh_stars>0
/**
* hub-detect
*
* Copyright (C) 2018 Black Duck Software, Inc.
* http://www.blackducksoftware.com/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.blackducksoftware.integration.hub.detect.workflow;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import com.blackducksoftware.integration.hub.detect.configuration.DetectConfiguration;
import com.blackducksoftware.integration.hub.detect.configuration.DetectProperty;
import com.blackducksoftware.integration.hub.detect.configuration.PropertyAuthority;
import com.blackducksoftware.integration.hub.detect.lifecycle.run.RunOptions;
import com.blackducksoftware.integration.hub.detect.tool.signaturescanner.BlackDuckSignatureScannerOptions;
import com.blackducksoftware.integration.hub.detect.util.filter.DetectOverrideableFilter;
import com.blackducksoftware.integration.hub.detect.workflow.bdio.BdioOptions;
import com.blackducksoftware.integration.hub.detect.workflow.file.AirGapOptions;
import com.blackducksoftware.integration.hub.detect.workflow.file.DirectoryOptions;
import com.blackducksoftware.integration.hub.detect.workflow.hub.DetectProjectServiceOptions;
import com.blackducksoftware.integration.hub.detect.workflow.project.ProjectNameVersionOptions;
import com.blackducksoftware.integration.hub.detect.workflow.search.SearchOptions;
public class DetectConfigurationFactory {
DetectConfiguration detectConfiguration;
public DetectConfigurationFactory(DetectConfiguration detectConfiguration) {
this.detectConfiguration = detectConfiguration;
}
public RunOptions createRunOptions() {
Optional<Boolean> sigScanDisabled = Optional.empty();
//TODO: Fix this when deprecated properties are removed
//This is because it is double deprecated so we must check if either property is set.
boolean originalPropertySet = detectConfiguration.wasPropertyActuallySet(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_DISABLED);
boolean newPropertySet = detectConfiguration.wasPropertyActuallySet(DetectProperty.DETECT_HUB_SIGNATURE_SCANNER_DISABLED);
if (originalPropertySet || newPropertySet) {
sigScanDisabled = Optional.of(detectConfiguration.getBooleanProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_DISABLED, PropertyAuthority.None));
}
Optional<Boolean> polarisEnabled = Optional.empty();
if (detectConfiguration.wasPropertyActuallySet(DetectProperty.DETECT_SWIP_ENABLED)) {
polarisEnabled = Optional.of(detectConfiguration.getBooleanProperty(DetectProperty.DETECT_SWIP_ENABLED, PropertyAuthority.None));
}
String includedTools = detectConfiguration.getProperty(DetectProperty.DETECT_TOOLS, PropertyAuthority.None);
String excludedTools = detectConfiguration.getProperty(DetectProperty.DETECT_TOOLS_EXCLUDED, PropertyAuthority.None);
DetectToolFilter detectToolFilter = new DetectToolFilter(excludedTools, includedTools, sigScanDisabled, polarisEnabled);
boolean unmapCodeLocations = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_PROJECT_CODELOCATION_UNMAP, PropertyAuthority.None);
String aggregateName = detectConfiguration.getProperty(DetectProperty.DETECT_BOM_AGGREGATE_NAME, PropertyAuthority.None);
String preferredTools = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_TOOL, PropertyAuthority.None);
return new RunOptions(unmapCodeLocations, aggregateName, preferredTools, detectToolFilter);
}
public DirectoryOptions createDirectoryOptions() {
String sourcePath = detectConfiguration.getProperty(DetectProperty.DETECT_SOURCE_PATH, PropertyAuthority.DirectoryManager);
String outputPath = detectConfiguration.getProperty(DetectProperty.DETECT_OUTPUT_PATH, PropertyAuthority.DirectoryManager);
String bdioPath = detectConfiguration.getProperty(DetectProperty.DETECT_BDIO_OUTPUT_PATH, PropertyAuthority.DirectoryManager);
String scanPath = detectConfiguration.getProperty(DetectProperty.DETECT_SCAN_OUTPUT_PATH, PropertyAuthority.DirectoryManager);
return new DirectoryOptions(sourcePath, outputPath, bdioPath, scanPath);
}
public AirGapOptions createAirGapOptions() {
String gradleOverride = detectConfiguration.getProperty(DetectProperty.DETECT_GRADLE_INSPECTOR_AIR_GAP_PATH, PropertyAuthority.AirGapManager);
String nugetOverride = detectConfiguration.getProperty(DetectProperty.DETECT_NUGET_INSPECTOR_AIR_GAP_PATH, PropertyAuthority.AirGapManager);
String dockerOverride = detectConfiguration.getProperty(DetectProperty.DETECT_DOCKER_INSPECTOR_AIR_GAP_PATH, PropertyAuthority.AirGapManager);
return new AirGapOptions(dockerOverride, gradleOverride, nugetOverride);
}
public SearchOptions createSearchOptions(File directory) {
List<String> excludedDirectories = Arrays.asList(detectConfiguration.getStringArrayProperty(DetectProperty.DETECT_DETECTOR_SEARCH_EXCLUSION, PropertyAuthority.None));
boolean forceNestedSearch = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_DETECTOR_SEARCH_CONTINUE, PropertyAuthority.None);
int maxDepth = detectConfiguration.getIntegerProperty(DetectProperty.DETECT_DETECTOR_SEARCH_DEPTH, PropertyAuthority.None);
String excluded = detectConfiguration.getProperty(DetectProperty.DETECT_EXCLUDED_DETECTOR_TYPES, PropertyAuthority.None).toUpperCase();
String included = detectConfiguration.getProperty(DetectProperty.DETECT_INCLUDED_DETECTOR_TYPES, PropertyAuthority.None).toUpperCase();
DetectOverrideableFilter bomToolFilter = new DetectOverrideableFilter(excluded, included);
return new SearchOptions(directory, excludedDirectories, forceNestedSearch, maxDepth, bomToolFilter);
}
public BdioOptions createBdioOptions() {
String aggregateName = detectConfiguration.getProperty(DetectProperty.DETECT_BOM_AGGREGATE_NAME, PropertyAuthority.None);
return new BdioOptions(aggregateName);
}
public ProjectNameVersionOptions createProjectNameVersionOptions(String sourceDirectoryName) {
String projectBomTool = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_NAME, PropertyAuthority.None);
String overrideProjectName = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_NAME, PropertyAuthority.None);
String overrideProjectVersionName = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_VERSION_NAME, PropertyAuthority.None);
String defaultProjectVersionText = detectConfiguration.getProperty(DetectProperty.DETECT_DEFAULT_PROJECT_VERSION_TEXT, PropertyAuthority.None);
String defaultProjectVersionScheme = detectConfiguration.getProperty(DetectProperty.DETECT_DEFAULT_PROJECT_VERSION_SCHEME, PropertyAuthority.None);
String defaultProjectVersionFormat = detectConfiguration.getProperty(DetectProperty.DETECT_DEFAULT_PROJECT_VERSION_TIMEFORMAT, PropertyAuthority.None);
return new ProjectNameVersionOptions(sourceDirectoryName, projectBomTool, overrideProjectName, overrideProjectVersionName, defaultProjectVersionText, defaultProjectVersionScheme, defaultProjectVersionFormat);
}
public DetectProjectServiceOptions createDetectProjectServiceOptions() {
final String projectVersionPhase = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_VERSION_PHASE, PropertyAuthority.None);
final String projectVersionDistribution = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_VERSION_DISTRIBUTION, PropertyAuthority.None);
final Integer projectTier = detectConfiguration.getIntegerProperty(DetectProperty.DETECT_PROJECT_TIER, PropertyAuthority.None);
final String projectDescription = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_DESCRIPTION, PropertyAuthority.None);
final String projectVersionNotes = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_VERSION_NOTES, PropertyAuthority.None);
final String[] cloneCategories = detectConfiguration.getStringArrayProperty(DetectProperty.DETECT_PROJECT_CLONE_CATEGORIES, PropertyAuthority.None);
final Boolean projectLevelAdjustments = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_PROJECT_LEVEL_ADJUSTMENTS, PropertyAuthority.None);
final Boolean forceProjectVersionUpdate = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_PROJECT_VERSION_UPDATE, PropertyAuthority.None);
final String cloneVersionName = detectConfiguration.getProperty(DetectProperty.DETECT_CLONE_PROJECT_VERSION_NAME, PropertyAuthority.None);
return new DetectProjectServiceOptions(projectVersionPhase, projectVersionDistribution, projectTier, projectDescription, projectVersionNotes, cloneCategories, projectLevelAdjustments, forceProjectVersionUpdate, cloneVersionName);
}
public BlackDuckSignatureScannerOptions createBlackDuckSignatureScannerOptions() {
final String[] signatureScannerPaths = detectConfiguration.getStringArrayProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_PATHS, PropertyAuthority.None);
final String[] exclusionPatterns = detectConfiguration.getStringArrayProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_EXCLUSION_PATTERNS, PropertyAuthority.None);
final String[] exclusionNamePatterns = detectConfiguration.getStringArrayProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_EXCLUSION_NAME_PATTERNS, PropertyAuthority.None);
final Integer scanMemory = detectConfiguration.getIntegerProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_MEMORY, PropertyAuthority.None);
final Integer parrallelProcessors = detectConfiguration.getIntegerProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_PARALLEL_PROCESSORS, PropertyAuthority.None);
final Boolean cleanupOutput = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_CLEANUP, PropertyAuthority.None);
final Boolean dryRun = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_DRY_RUN, PropertyAuthority.None);
final Boolean snippetMatching = detectConfiguration.getBooleanProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_SNIPPET_MODE, PropertyAuthority.None);
final String codeLocationPrefix = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_CODELOCATION_PREFIX, PropertyAuthority.None);
final String codeLocationSuffix = detectConfiguration.getProperty(DetectProperty.DETECT_PROJECT_CODELOCATION_SUFFIX, PropertyAuthority.None);
final String additionalArguments = detectConfiguration.getProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_ARGUMENTS, PropertyAuthority.None);
final Integer maxDepth = detectConfiguration.getIntegerProperty(DetectProperty.DETECT_BLACKDUCK_SIGNATURE_SCANNER_EXCLUSION_PATTERN_SEARCH_DEPTH, PropertyAuthority.None);
return new BlackDuckSignatureScannerOptions(signatureScannerPaths, exclusionPatterns, exclusionNamePatterns, scanMemory, parrallelProcessors, cleanupOutput, dryRun,
snippetMatching, codeLocationPrefix, codeLocationSuffix, additionalArguments, maxDepth);
}
}
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 TH<NAME> Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from pipeline.engine import states
from pipeline.engine.core import runtime
from pipeline.engine.models import FunctionSwitch, NodeRelationship, Status
from pipeline.tests.engine.mock import * # noqa
from pipeline.tests.mock_settings import * # noqa
PIPELINE_BUILD_RELATIONSHIP = "pipeline.engine.models.NodeRelationship.objects.build_relationship"
PIPELINE_STATUS_TRANSIT = "pipeline.engine.models.Status.objects.transit"
PIPELINE_ENGINE_IS_FROZEN = "pipeline.engine.models.FunctionSwitch.objects.is_frozen"
PIPELINE_SETTING_RERUN_MAX_LIMIT = "pipeline.engine.core.runtime.RERUN_MAX_LIMIT"
class RuntimeTestCase(TestCase):
def test_runtime_exception_handler(self):
process = MockPipelineProcess()
process.exit_gracefully = MagicMock()
e = Exception()
# raise case
with runtime.runtime_exception_handler(process):
raise e
process.exit_gracefully.assert_called_with(e)
process.exit_gracefully.reset_mock()
# normal case
with runtime.runtime_exception_handler(process):
pass
process.exit_gracefully.assert_not_called()
@patch(PIPELINE_BUILD_RELATIONSHIP, MagicMock())
@patch(PIPELINE_ENGINE_IS_FROZEN, MagicMock(return_value=False))
@patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True)))
@patch(PIPELINE_SETTING_RERUN_MAX_LIMIT, 0)
def test_run_loop(self):
# 1. test child meet destination
destination_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=destination_node),
destination_id=destination_node.id,
current_node_id=destination_node.id,
)
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_called_with(destination_node.id)
process.root_sleep_check.assert_not_called()
process.sleep.assert_not_called()
process.subproc_sleep_check.assert_not_called()
FunctionSwitch.objects.is_frozen.assert_not_called()
process.freeze.assert_not_called()
Status.objects.transit.assert_not_called()
process.refresh_current_node.assert_not_called()
NodeRelationship.objects.build_relationship.assert_not_called()
self.assertEqual(process.current_node_id, destination_node.id)
# 2. test root sleep check return true、
# 2.1. root pipeline is revoke
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(True, states.REVOKED))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.sleep.assert_called_once()
process.sleep.assert_called_with(do_not_save=True)
process.subproc_sleep_check.assert_not_called()
FunctionSwitch.objects.is_frozen.assert_not_called()
process.freeze.assert_not_called()
Status.objects.transit.assert_not_called()
process.refresh_current_node.assert_not_called()
NodeRelationship.objects.build_relationship.assert_not_called()
self.assertEqual(process.current_node_id, current_node.id)
# 2.2. root pipeline is not revoke
for state in states.SLEEP_STATES.difference({states.REVOKED}):
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(True, state))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.sleep.assert_called_once_with(do_not_save=False)
process.subproc_sleep_check.assert_not_called()
FunctionSwitch.objects.is_frozen.assert_not_called()
process.freeze.assert_not_called()
Status.objects.transit.assert_not_called()
process.refresh_current_node.assert_not_called()
NodeRelationship.objects.build_relationship.assert_not_called()
self.assertEqual(process.current_node_id, current_node.id)
# 3. test sub process sleep check return true
current_node = IdentifyObject()
subproc_above = uniqid()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(True, subproc_above))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.subproc_sleep_check.assert_called()
process.sleep.assert_called_once_with(adjust_status=True, adjust_scope=subproc_above)
FunctionSwitch.objects.is_frozen.assert_not_called()
process.freeze.assert_not_called()
Status.objects.transit.assert_not_called()
process.refresh_current_node.assert_not_called()
NodeRelationship.objects.build_relationship.assert_not_called()
self.assertEqual(process.current_node_id, current_node.id)
# 4. test engine is frozen
with patch(PIPELINE_ENGINE_IS_FROZEN, MagicMock(return_value=True)):
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(False, []))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.subproc_sleep_check.assert_called()
process.sleep.assert_not_called()
FunctionSwitch.objects.is_frozen.assert_called_once()
process.freeze.assert_called_once()
Status.objects.transit.assert_not_called()
process.refresh_current_node.assert_not_called()
NodeRelationship.objects.build_relationship.assert_not_called()
self.assertEqual(process.current_node_id, current_node.id)
FunctionSwitch.objects.is_frozen.reset_mock()
# 5. test transit fail
with patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False))):
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(False, []))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.subproc_sleep_check.assert_called()
FunctionSwitch.objects.is_frozen.assert_called_once()
process.freeze.assert_not_called()
Status.objects.transit.assert_called_with(
id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)
)
process.sleep.assert_called_once_with(adjust_status=True)
process.refresh_current_node.assert_not_called()
NodeRelationship.objects.build_relationship.assert_not_called()
self.assertEqual(process.current_node_id, current_node.id)
FunctionSwitch.objects.is_frozen.reset_mock()
Status.objects.transit.reset_mock()
# 6. test normal
hdl = MagicMock(return_value=MockHandlerResult(should_return=True, should_sleep=False))
with patch("pipeline.engine.core.runtime.HandlersFactory.handlers_for", MagicMock(return_value=hdl)):
# 6.1. test should return
current_node = IdentifyObject(name="name")
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(False, []))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.subproc_sleep_check.assert_called()
FunctionSwitch.objects.is_frozen.assert_called_once()
process.freeze.assert_not_called()
Status.objects.transit.assert_called_with(
id=current_node.id, to_state=states.RUNNING, start=True, name=current_node.name
)
process.refresh_current_node.assert_called_once_with(current_node.id)
NodeRelationship.objects.build_relationship.assert_called_once_with(
process.top_pipeline.id, current_node.id
)
hdl.assert_called_once_with(process, current_node, None)
process.sleep.assert_not_called()
self.assertEqual(process.current_node_id, current_node.id)
FunctionSwitch.objects.is_frozen.reset_mock()
Status.objects.transit.reset_mock()
NodeRelationship.objects.build_relationship.reset_mock()
hdl.reset_mock()
# 6.2. test should sleep
for should_return in (False, True):
hdl.return_value = MockHandlerResult(
should_return=should_return,
should_sleep=True,
after_sleep_call=MagicMock(),
args=["token1", "token2"],
kwargs={"kwargs": "token3"},
)
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node),
destination_id=uniqid(),
current_node_id=current_node.id,
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(False, []))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.subproc_sleep_check.assert_called()
FunctionSwitch.objects.is_frozen.assert_called_once()
process.freeze.assert_not_called()
Status.objects.transit.assert_called_with(
id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)
)
process.refresh_current_node.assert_called_once_with(current_node.id)
NodeRelationship.objects.build_relationship.assert_called_once_with(
process.top_pipeline.id, current_node.id
)
hdl.assert_called_once_with(process, current_node, None)
process.sleep.assert_called_once_with(adjust_status=True)
hdl.return_value.after_sleep_call.assert_called_once_with("token1", "token2", kwargs="token3")
self.assertEqual(process.current_node_id, current_node.id)
FunctionSwitch.objects.is_frozen.reset_mock()
Status.objects.transit.reset_mock()
NodeRelationship.objects.build_relationship.reset_mock()
hdl.reset_mock()
# 6.3. test execute 3 node and return
nodes = [IdentifyObject(), IdentifyObject(), IdentifyObject()]
hdl.return_value = None
hdl.side_effect = [
MockHandlerResult(should_return=False, should_sleep=False, next_node=nodes[0]),
MockHandlerResult(should_return=False, should_sleep=False, next_node=nodes[1]),
MockHandlerResult(should_return=True, should_sleep=True, next_node=nodes[2]),
]
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(
nodes={
current_node.id: current_node,
nodes[0].id: nodes[0],
nodes[1].id: nodes[1],
nodes[2].id: nodes[2],
}
),
destination_id=uniqid(),
current_node_id=current_node.id,
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(False, []))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_has_calls([mock.call(), mock.call(), mock.call()])
process.subproc_sleep_check.assert_has_calls([mock.call(), mock.call(), mock.call()])
FunctionSwitch.objects.is_frozen.assert_has_calls([mock.call(), mock.call(), mock.call()])
process.freeze.assert_not_called()
Status.objects.transit.assert_has_calls(
[
mock.call(
id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)
),
mock.call(id=nodes[0].id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)),
mock.call(id=nodes[1].id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)),
]
)
process.refresh_current_node.assert_has_calls(
[mock.call(current_node.id), mock.call(nodes[0].id), mock.call(nodes[1].id)]
)
NodeRelationship.objects.build_relationship.assert_has_calls(
[
mock.call(process.top_pipeline.id, current_node.id),
mock.call(process.top_pipeline.id, nodes[0].id),
mock.call(process.top_pipeline.id, nodes[1].id),
]
)
hdl.assert_has_calls(
[
mock.call(process, current_node, None),
mock.call(process, nodes[0], None),
mock.call(process, nodes[1], None),
]
)
process.sleep.assert_called_once_with(adjust_status=True)
self.assertEqual(process.current_node_id, nodes[1].id)
@patch(PIPELINE_BUILD_RELATIONSHIP, MagicMock())
@patch(PIPELINE_ENGINE_IS_FROZEN, MagicMock(return_value=False))
@patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True, extra=MockStatus(loop=11))))
@patch(PIPELINE_STATUS_FAIL, MagicMock())
def __fail_with_node_reach_run_limit(self):
with patch(PIPELINE_SETTING_RERUN_MAX_LIMIT, 10):
current_node = IdentifyObject()
process = MockPipelineProcess(
top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id
)
process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING))
process.subproc_sleep_check = MagicMock(return_value=(False, []))
runtime.run_loop(process)
process.destroy_and_wake_up_parent.assert_not_called()
process.root_sleep_check.assert_called()
process.subproc_sleep_check.assert_called()
FunctionSwitch.objects.is_frozen.assert_called_once()
process.freeze.assert_not_called()
Status.objects.transit.assert_called_with(
id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)
)
Status.objects.fail.assert_called_once_with(current_node, "rerun times exceed max limit: 10")
process.sleep.assert_called_once_with(adjust_status=True)
process.refresh_current_node.assert_not_called()
|
package menu
type ItemId int
const (
MenuItemEasy ItemId = iota
MenuItemNormal
MenuItemHard
MenuItemHighscores
MenuItemExit
)
type Item struct {
isSelected bool
id ItemId
}
func (mi *Item) IsSelected() bool {
return mi.isSelected
}
func (mi *Item) GetId() ItemId {
return mi.id
}
type Menu struct {
items []Item
}
func New() *Menu {
return &Menu{
items: []Item{
Item{isSelected: false, id: MenuItemEasy},
Item{isSelected: true, id: MenuItemNormal},
Item{isSelected: false, id: MenuItemHard},
Item{isSelected: false, id: MenuItemHighscores},
Item{isSelected: false, id: MenuItemExit}}}
}
func (m *Menu) GetItems() []Item {
return m.items
}
func (m *Menu) GetSelectedItemId() ItemId {
var selectedId ItemId
for i := 0; i < len(m.items); i++ {
if m.items[i].isSelected {
selectedId = m.items[i].id
}
}
return selectedId
}
func (m *Menu) SelectPrevItem() {
var selectedMenuItemIndex int
for i := 0; i < len(m.items); i++ {
if m.items[i].isSelected {
m.items[i].isSelected = false
selectedMenuItemIndex = i
break
}
}
if selectedMenuItemIndex == 0 {
m.items[len(m.items)-1].isSelected = true
} else {
m.items[selectedMenuItemIndex-1].isSelected = true
}
}
func (m *Menu) SelectNextItem() {
var selectedMenuItemIndex int
for i := 0; i < len(m.items); i++ {
if m.items[i].isSelected {
m.items[i].isSelected = false
selectedMenuItemIndex = i
break
}
}
if selectedMenuItemIndex == len(m.items)-1 {
m.items[0].isSelected = true
} else {
m.items[selectedMenuItemIndex+1].isSelected = true
}
}
|
<gh_stars>0
export function openFileDialog(accept: string, multiple: boolean, callback: (arg: Event) => void, filePickerRef: any) {
// this function must be called from a user
// activation event (ie an onclick event)
console.log(filePickerRef.current);
filePickerRef.current.type = 'file';
filePickerRef.current.accept = accept;
filePickerRef.current.multiple = multiple;
filePickerRef.current.addEventListener('change', callback);
filePickerRef.current.addEventListener('blur', callback);
filePickerRef.current.dispatchEvent(new MouseEvent('click'));
// Create an input element
// var inputElement = document.createElement('input');
// // Set its type to file
// inputElement.type = 'file';
// // Set accept to the file types you want the user to select.
// // Include both the file extension and the mime type
// inputElement.accept = accept;
// // Accept multiple files
// inputElement.multiple = multiple;
// // set onchange event to call callback when user has selected file
// inputElement.addEventListener('change', callback);
// // set onblur event to call callback when user has selected file on Safari
// inputElement.addEventListener('blur', callback);
// // dispatch a click event to open the file dialog
// inputElement.dispatchEvent(new MouseEvent('click'));
}
|
#!/bin/sh
cd api-gateway; ./gradlew clean build; cd ..
cd auth-server; ./gradlew clean build; cd ..
cd config-server; ./gradlew clean build; cd ..
cd task-webservice; ./gradlew clean build; cd ..
cd user-webservice; ./gradlew clean build; cd ..
cd webservice-registry; ./gradlew clean build; cd ..
cd comments-webservice; ./gradlew clean build; cd ..
|
<filename>ui.apps/src/main/content/jcr_root/apps/__appsFolderName__/components/webpack.resolve/js/utils.js
/**
* Checks if object has any key-value pairs.
*
* @param object
*
* @returns {boolean}
*/
export function isEmpty(object) {
return Object.keys(object).length === 0;
}
export const helloWorld = () => 'Hello world!';
|
name 'delivery-base-build-cookbook'
maintainer '<NAME>'
maintainer_email '<EMAIL>'
license 'Apache 2.0'
description 'Build the delivery-base cookbook'
version '0.1.0'
depends 'delivery-truck'
|
package main
import(
".."
"fmt"
)
func main() {
client := scanpay.NewClient("1153:YHZIUGQw6NkCIYa3mG6CWcgShnl13xuI7ODFUYuMy0j790Q6ThwBEjxfWFXwJZ0W")
client.SetHost("api.test.scanpay.dk") /* Connect to the test-environment instead of production */
data := scanpay.PaymentURLData {
OrderId: "a766409",
Language: "da",
SuccessURL: "https://insertyoursuccesspage.dk",
Items: []scanpay.Item {
{
Name: "Pink Floyd: The Dark Side Of The Moon",
Quantity: 2,
Total: "199.98 DKK",
SKU: "fadf23",
},
{
Name: "巨人宏偉的帽子",
Quantity: 2,
Total: "840 DKK",
SKU: "124",
},
},
Billing: scanpay.Billing{
Name: "<NAME>",
Company: "The Shop A/S",
Email: "<EMAIL>",
Phone: "+4512345678",
Address: []string{"Langgade 23, 2. th"},
City: "Havneby",
Zip: "1234",
State: "",
Country: "DK",
VATIN: "35413308",
GLN: "7495563456235",
},
Shipping: scanpay.Shipping{
Name: "<NAME>",
Company: "The Choppa A/S",
Email: "<EMAIL>",
Phone: "+4587654321",
Address: []string{"Langgade 23, 1. th", "C/O The Choppa"},
City: "Haveby",
Zip: "1235",
State: "",
Country: "DK",
},
}
opts := scanpay.Options{
Headers: map[string]string{
"X-Cardholder-Ip": "192.168.3.11",
},
}
url, err := client.NewURL(&data, &opts)
if err != nil {
fmt.Println("Error:", err)
return
}
fmt.Println(url)
}
|
sap.ui.define([
"com/sap/gtt/app/sample/pof/controller/deliveryItem/TrackingTimeline.controller",
], function (TrackingTimeline) {
"use strict";
var sandbox = sinon.createSandbox();
function stub(object, method, func) {
if (!(method in object)) {
object[method] = function () {};
}
var stubbed = sandbox.stub(object, method);
if (typeof func === "function") {
return stubbed.callsFake(func);
}
return stubbed;
}
QUnit.module("com.sap.gtt.app.pof.controller.deliveryItem.TrackingTimeline", {
beforeEach: function () {
this.oTrackingTimeline = new TrackingTimeline();
this.oTrackingTimeline._oMap = {};
this.oTrackingTimeline.oMapModel = {};
},
afterEach: function () {
sandbox.restore();
this.oTrackingTimeline.destroy();
},
});
QUnit.test("onTimelineItemSelect - select timeline event", function (assert) {
// Arrange
var oFakeEvent = {},
oFakeParameter = {},
oFakeCtx = {},
oFakeData = {};
stub(oFakeEvent, "getParameter").returns(oFakeParameter);
stub(oFakeParameter, "getBindingContext").returns(oFakeCtx);
stub(this.oTrackingTimeline, "_getSelectedStop").returns();
stub(oFakeCtx, "getObject").returns(oFakeData);
// Act
var oResult = this.oTrackingTimeline.onTimelineItemSelect(oFakeEvent);
// Assert
assert.ok(!oResult, "The function doesn't return anything.");
});
QUnit.test("onTimelineItemSelect - select timeline event that exists on the Map", function (assert) {
// Arrange
var oFakeEvent = {},
oFakeParameter = {},
oFakeCtx = {};
var aFakeStops = [
{plannedEventId: "123", eventId: "123", longitude: 55},
];
var oController = this.oTrackingTimeline;
stub(oController._oMap, "setCenterPosition").returns();
stub(oController, "_getSelectedStop").returns({});
stub(oController.oMapModel, "getProperty").returns(aFakeStops);
stub(oFakeEvent, "getParameter").returns(oFakeParameter);
stub(oFakeParameter, "getBindingContext").returns(oFakeCtx);
stub(oFakeCtx, "getObject").returns({});
// Act
var oResult = oController.onTimelineItemSelect(oFakeEvent);
// Assert
assert.ok(!oResult, "The function doesn't return anything.");
assert.ok(oController._oMap.setCenterPosition.calledOnce, "'setCenterPosition' was caalled once");
});
QUnit.test("_getSelectedStop - return selected stop: no plannedEventId", function (assert) {
// Arrange
var oController = this.oTrackingTimeline;
var oFakeData = {
plannedEventId: null,
actualEventId: "123",
};
var aFakeStops = [
{plannedEventId: "123", eventId: "123", longitude: 55},
];
stub(oController.oMapModel, "getProperty").returns(aFakeStops);
// Act
var oSelectedStop = oController._getSelectedStop(oFakeData);
// Assert
assert.ok(!!oSelectedStop, "The function returns anything.");
assert.ok(oSelectedStop.longitude === 55, "Check longitude of event");
});
QUnit.test("_getSelectedStop - return selected stop: has plannedEventId and actualEventId", function (assert) {
// Arrange
var oController = this.oTrackingTimeline;
var oFakeData = {
plannedEventId: "890",
actualEventId: "123",
};
var aFakeStops = [
{plannedEventId: "123", eventId: "123", longitude: 55},
{plannedEventId: "123", eventId: "890", longitude: 48},
];
stub(oController.oMapModel, "getProperty").returns(aFakeStops);
// Act
var oSelectedStop = oController._getSelectedStop(oFakeData);
// Assert
assert.ok(!!oSelectedStop, "The function returns anything.");
assert.ok(oSelectedStop.longitude === 48, "Check longitude of event");
});
QUnit.test("_getSelectedStop - return selected stop: has plannedEventId and doesn't have actualEventId", function (assert) {
// Arrange
var oController = this.oTrackingTimeline;
var oFakeData = {
plannedEventId: "654",
actualEventId: null,
};
var aFakeStops = [
{plannedEventId: "123", eventId: "123", longitude: 55},
{plannedEventId: "123", eventId: "890", longitude: 48},
{plannedEventId: "654", eventId: "345", longitude: 90},
];
stub(oController.oMapModel, "getProperty").returns(aFakeStops);
// Act
var oSelectedStop = oController._getSelectedStop(oFakeData);
// Assert
assert.ok(!!oSelectedStop, "The function returns anything.");
assert.ok(oSelectedStop.eventId === "345", "Check eventId of event");
});
});
|
import requests
from bs4 import BeautifulSoup
def parse_iana_registry(url):
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.content, 'html.parser')
table = soup.find('table', {'class': 'alt'})
if table:
rows = table.find_all('tr')
registry = {}
for row in rows[1:]: # Skip the header row
columns = row.find_all('td')
if len(columns) >= 3:
enterprise_number = columns[0].text.strip()
organization_name = columns[1].text.strip()
description = columns[2].text.strip()
registry[enterprise_number] = {'organization_name': organization_name, 'description': description}
return registry
return None
def get_enterprise_info(enterprise_number, registry):
if enterprise_number in registry:
return registry[enterprise_number]
else:
return {'organization_name': 'Unassigned', 'description': 'This enterprise number is unassigned.'}
def main():
url = 'https://www.iana.org/assignments/enterprise-numbers/enterprise-numbers'
registry = parse_iana_registry(url)
if registry:
enterprise_number = input('Enter the enterprise number: ')
enterprise_info = get_enterprise_info(enterprise_number, registry)
print(f'Organization Name: {enterprise_info["organization_name"]}')
print(f'Description: {enterprise_info["description"]}')
else:
print('Failed to retrieve the IANA Private Enterprise Numbers registry.')
if __name__ == "__main__":
main()
|
//
// Copyright 2016 Kary Foundation, Inc.
// Author: <NAME> <<EMAIL>>
//
namespace KaryGraph {
//
// ─── ADD EVENT TO SVG ───────────────────────────────────────────────────────────
//
/** Adds an ***event*** to the ***element*** */
export function AddEventOnClick ( element: ISnapObject, event: string ) {
document.getElementById( element.id ).setAttribute( 'onClick', event );
}
// ────────────────────────────────────────────────────────────────────────────────
}
|
<filename>pwcracker-worker/src/main/scala/Permutator.scala
object Permutator {
import scala.collection.mutable.ArrayBuffer
val combine: Array[Byte] = (' ' +: (('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9'))).toArray.map(_.toByte)
val ncombine: Int = combine.length
/**
* find the next string in the sequence the non functional way
* but this is Amotarized O(1) the list does not need to be reverse initially also
* I am too lazy to proof this but once every 62 times the number of n will be n+1 will increase
* however to fully run n times is (1/62) to the power of n
* @param st
* @return byte array
*/
def increment(st: Array[Byte]): Array[Byte] = {
val n = st.length - 1
var i = n
var isValid = true
while (i >= 0 && (isValid || i == n) ){
if(st(i) == '9') isValid = true else isValid = false
st(i) = if (nextChar(st(i)) == ' ') nextChar(nextChar(st(i))) else nextChar(st(i))
i = i-1
}
if(isValid) Array('A'.toByte) ++ st else st
}
/**
* Move to the next character
* @param ch
* @return the next character in our permute system
*/
def nextChar(ch:Byte): Byte = {combine((combine.indexOf(ch) + 1) % combine.length)}
/**
* Create a sequence of range of permutated string within start to end
* @param start
* @param end
* @return seq of permutated string
*/
def permuteRange(start: String, end: String, incr: Array[Byte] => Array[Byte]): Seq[String] = {
var ar: ArrayBuffer[String] = ArrayBuffer[String]()
val endAr = end.toCharArray
var current:Array[Byte] = start.getBytes()
while(!(current sameElements endAr)){
ar += current.mkString("")
current = increment(current)
}
ar += current.mkString("")
ar
}
/**
* convert string to a decimal number
* @param st
* @return a decimal number that can be converted back to a string
*/
def convertStToDec(st: String): Long = st.reverse.foldLeft((0, 0:Long)){
case ((i, sum), elt) => (i+1, sum + (combine.indexOf(elt) * math.pow(ncombine, i).toLong))
}._2
/**
* convert a decimal back to a string in our base 62 system
* warning if n%63 == 0 a rubbish will be generated I suggest you skip
* to the next n if that happens
* @param n
* @return a string within our base 62 system or a rubbish string if n%63 == 0
*/
def convertDecToByte(n: Long): Vector[Byte] = {
if (n == 0){
Vector[Byte]()
}else{
convertDecToByte(n / ncombine) :+ combine((n % ncombine).toInt)
}
}
def convertDecToSt(n: Long): String = {
convertDecToByte(n).map(_.toChar).mkString("")
}
}
|
export enum KeyCodes {
BACKSPACE = 'Backspace',
TAB = 'Tab',
RETURN = 'Enter',
ESC = 'Escape',
SPACE = ' ',
PAGE_UP = 'PageUp',
PAGE_DOWN = 'PageDown',
END = 'End',
HOME = 'Home',
LEFT = 'ArrowLeft',
UP = 'ArrowUp',
RIGHT = 'ArrowRight',
DOWN = 'ArrowDown',
DELETE = 'Delete'
}
|
<reponame>Hannah-Abi/python-pro-21<filename>intro/part04-28_distinct_numbers/test/test_distinct_numbers.py
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import textwrap
exercise = 'src.distinct_numbers'
function = 'distinct_numbers'
def get_correct(test_case: list) -> list:
pass
@points('4.distinct_numbers')
class DistinctNumbersTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
cls.module = load_module(exercise, 'en')
def test_0_main_program_ok(self):
ok, line = check_source(self.module)
message = """The code for testing the functions should be placed inside
if __name__ == "__main__":
block. The following row should be moved:
"""
self.assertTrue(ok, message+line)
def test_1_function_exists(self):
try:
from src.distinct_numbers import distinct_numbers
except:
self.assertTrue(False, 'Your code should contain function named as distinct_numbers(my_list: list)')
try:
distinct_numbers = load(exercise, function, 'en')
distinct_numbers([1,2])
except:
self.assertTrue(False, 'Test function call\ndistinct_numbers([1,2])')
def test_2_type_of_return_value(self):
distinct_numbers = load(exercise, function, 'en')
val = distinct_numbers([1,2])
self.assertTrue(type(val) == list, f"Function {function} does not return list with parameter value [1,2].")
def test_3_numbers_1(self):
test_cases = {(1,2,3,1,2,3): [1,2,3],
(5,6,7,8,8,9,9,5): [5,6,7,8,9],
(1,10,1,100,1,1000): [1,10,100,1000]}
for test_case in test_cases:
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
reload_module(self.module)
output_at_start = get_stdout()
longest_ones = load(exercise, function, 'en')
correct = test_cases[test_case]
test_case2 = test_case[:]
test_result = longest_ones(list(test_case))
self.assertEqual(correct, test_result, f"The result {test_result} does not match with the expected resul {correct} when calling distinct_numbers({test_case2})")
self.assertEqual(test_case, test_case2, f"Function should not change the original list. The list should be {list(test_case2)} but it is {list(test_case)}.")
def test_4_numbers_2(self):
test_cases = {(3,2,1,3,2,1,3,2,1): [1,2,3],
(9,8,7,6,9,8,7,6,10,3,3,3,3,1): [1,3,6,7,8,9,10],
(-1,-2,-1,-2,-3,-3,-3,0,0): [-3,-2,-1,0]}
for test_case in test_cases:
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
reload_module(self.module)
output_at_start = get_stdout()
longest_ones = load(exercise, function, 'en')
correct = test_cases[test_case]
test_case2 = test_case[:]
test_result = longest_ones(list(test_case))
self.assertEqual(correct, test_result, f"The result {test_result} does not match with the expected resul {correct} when calling distinct_numbers({test_case2})")
self.assertEqual(test_case, test_case2, f"Function should not change the original list. The list should be {list(test_case2)} but it is {list(test_case)}.")
if __name__ == '__main__':
unittest.main()
|
<gh_stars>10-100
/**
* @module
*/
/**
* Protège les caractères spéciaux d'une chaine de caractères pour les
* expressions rationnelles.
*
* @param {string} pattern La chaine de caractères.
* @returns {string} La chaine de caractères avec les caractères spéciaux
* protégés.
*/
export const quote = function (pattern) {
return pattern.replace(/[$()*+.?[\\\]^{|}]/gu, "\\$&");
};
/**
* Enlève les balises utilisées par Kodi pour mettre en forme des textes.
*
* @param {string} text Le texte qui sera nettoyé.
* @returns {string} Le texte nettoyé.
*/
export const strip = function (text) {
return text.replaceAll(/\[B\](?<t>.*?)\[\/B\]/gu, "$<t>")
.replaceAll(/\[I\](?<t>.*?)\[\/I\]/gu, "$<t>")
.replaceAll(/\[LIGHT\](?<t>.*?)\[\/LIGHT\]/gu, "$<t>")
.replaceAll(/\[COLOR [^\]]+\](?<t>.*?)\[\/COLOR\]/gu, "$<t>")
.replaceAll(/\[UPPERCASE\](?<t>.*?)\[\/UPPERCASE\]/gu, "$<t>")
.replaceAll(/\[LOWERCASE\](?<t>.*?)\[\/LOWERCASE\]/gu, "$<t>")
.replaceAll(/\[CAPITALIZE\](?<t>.*?)\[\/CAPITALIZE\]/gu, "$<t>")
.replaceAll("[CR]", " ")
.trim();
};
|
<reponame>rjointer2/EMS
import * as actionTypes from '../constants/loginConstants';
// the action will be dispatched from the stre
// the reducer will take a state that will be an empty array and an action
export const loginRequestReducer = ( state = { userLoggedIn: []}, action ) => {
// checking the actions's request
switch(action.type){
case actionTypes.GET_LOGIN_REQUEST:
return {
// we send a property of loading as true and a empty
loading: true,
userLoggedIn: []
}
case actionTypes.GET_LOGIN_SUCCESS:
return {
loading: false,
// from our action function the action's payload return is a user
userLoggedIn: action.payload
}
case actionTypes.GET_LOGIN_REJECT:
return {
loading: false,
// from our action function the action's payload return is a user
error: action.payload
}
// If not a user request then the array is not empty object
default:
return state;
}
}
|
#include "systems/CleanupSystem.hh"
#include <queue>
#include "advanced/transform.hh"
#include "glow/common/log.hh"
void gamedev::CleanupSystem::AddEntity(InstanceHandle& handle, Signature entitySignature) { mEntities.insert(handle); }
void gamedev::CleanupSystem::RemoveEntity(InstanceHandle& handle, Signature entitySignature) { mEntities.erase(handle); }
void gamedev::CleanupSystem::RemoveEntity(InstanceHandle& handle) { mEntities.erase(handle); }
void gamedev::CleanupSystem::RemoveAllEntities() { mEntities.clear(); }
void gamedev::CleanupSystem::Init(std::shared_ptr<EngineECS>& ecs) {
mECS = ecs;
}
int gamedev::CleanupSystem::Update()
{
auto t0 = std::chrono::steady_clock::now();
std::vector<InstanceHandle> destroy;
for (const auto& handle : mEntities)
{
if (mECS->GetInstance(handle).destroy)
{
destroy.push_back(handle);
}
}
for (auto& handle : destroy)
{
mECS->DestroyInstance(handle);
}
auto tn = std::chrono::steady_clock::now();
return std::chrono::duration_cast<std::chrono::microseconds>(tn - t0).count();
}
|
class CarsUnder20k::Car
attr_accessor :name ,:price, :gasmileage, :url
def self.thisyear
self.scrape_cars
end
#scape kbb and then return info based on that data
def self.scrape_cars
cars = []
cars << self.scrape_autotrader
#go to kbb, find the car
#extract the properties
#instantiate a car
cars
end
def self.scrape_autotrader
doc = Nokogiri::HTML(open("https://www.autotrader.com/cars-for-sale/cars+under+20000"))
binding.pry
end
end
#car_1 = self.new
#car_1.name = "Hyundai Accent"
#car_1.price = "$15,915"
#car_1.gasmileage = "28 City / 37 Highway"
#car_1.url = "https://www.kbb.com/hyundai/accent/2019/"
#car_2 = self.new
#car_2.name = "<NAME>"
#car_2.price = "$16,380"
#car_2.gasmileage = "30 City / 39 Highway"
#car_2.url = "https://www.kbb.com/toyota/yaris/2019/"
#car_3 = self.new
#car_3.name = "<NAME> 5-Door"
#car_3.price = "$19,980"
#car_3.gasmileage = "24 City / 32 Highway"
#car_3.url = "https://www.kbb.com/subaru/impreza/2019/"
#car_4 = self.new
#car_4.name = "<NAME>"
#car_4.price = "$19,640"
#car_4.gasmileage = "30 City / 40 Highway"
#car_4.url = "https://www.kbb.com/volkswagen/jetta/2019/"
#car_5 = self.new
#car_5.name = "<NAME>"
#car_5.price = "$18,870"
#car_5.gasmileage = "28 City / 38 Highway"
#car_5.url = "https://www.kbb.com/chevrolet/cruze/2019/"
#[car_1, car_2, car_3, car_4, car_5]
|
#!/bin/sh
cd qedserver
java -jar start.jar
cd ..
|
<reponame>GergoHong/cruise-control<filename>cruise-control/src/main/java/com/linkedin/kafka/cruisecontrol/monitor/sampling/aggregator/MetricCompletenessChecker.java
/*
* Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information.
*/
package com.linkedin.kafka.cruisecontrol.monitor.sampling.aggregator;
import com.linkedin.kafka.cruisecontrol.monitor.ModelGeneration;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
/**
* A class that helps compute the completeness of the metrics in the {@link MetricSampleAggregator}
*/
public class MetricCompletenessChecker {
private static final Logger LOG = org.slf4j.LoggerFactory.getLogger(MetricCompletenessChecker.class);
// The following two data structures help us to quickly identify how many valid partitions are there in each window.
private final ConcurrentSkipListMap<Long, Map<String, Set<Integer>>> _validPartitionsPerTopicByWindows;
private final SortedMap<Long, Integer> _validPartitionsByWindows;
private final int _maxNumSnapshots;
private volatile ModelGeneration _modelGeneration;
private volatile long _activeSnapshotWindow;
public MetricCompletenessChecker(int maxNumSnapshots) {
_validPartitionsPerTopicByWindows = new ConcurrentSkipListMap<>(Comparator.reverseOrder());
_validPartitionsByWindows = new TreeMap<>(Comparator.reverseOrder());
_modelGeneration = null;
_maxNumSnapshots = maxNumSnapshots;
}
/**
* Get the number of valid windows that meets the minimum monitored partitions percentage requirement.
*
* @param minMonitoredPartitionsPercentage the minimum monitored partitions percentage.
* @param totalNumPartitions the total number of partitions.
* @return the number of the most recent valid windows.
*/
synchronized public int numValidWindows(ModelGeneration modelGeneration,
Cluster cluster,
double minMonitoredPartitionsPercentage,
int totalNumPartitions) {
updateMetricCompleteness(cluster, modelGeneration);
int i = 0;
double minMonitoredNumPartitions = totalNumPartitions * minMonitoredPartitionsPercentage;
for (Map.Entry<Long, Integer> entry : _validPartitionsByWindows.entrySet()) {
long window = entry.getKey();
if ((entry.getValue() < minMonitoredNumPartitions && window != _activeSnapshotWindow) || i == _maxNumSnapshots) {
break;
}
if (window != _activeSnapshotWindow) {
i++;
}
}
return i;
}
synchronized public SortedMap<Long, Double> monitoredPercentages(ModelGeneration modelGeneration,
Cluster cluster,
int totalNumPartitions) {
updateMetricCompleteness(cluster, modelGeneration);
TreeMap<Long, Double> percentages = new TreeMap<>();
for (Map.Entry<Long, Integer> entry : _validPartitionsByWindows.entrySet()) {
percentages.put(entry.getKey(), (double) entry.getValue() / totalNumPartitions);
}
return percentages;
}
/**
* Get number of snapshot windows in a period.
*/
synchronized public int numWindows(long from, long to) {
int i = 0;
long activeSnapshotWindow = _activeSnapshotWindow;
for (long window : _validPartitionsByWindows.keySet()) {
// Exclude the active window.
if (window > from && window <= to && window != activeSnapshotWindow) {
i++;
}
}
return i;
}
synchronized void refreshAllPartitionCompleteness(MetricSampleAggregator aggregator,
Set<Long> windows,
Set<TopicPartition> partitions) {
_validPartitionsPerTopicByWindows.clear();
for (long window : windows) {
for (TopicPartition tp : partitions) {
updatePartitionCompleteness(aggregator, window, tp);
}
}
// We need to reset the model generation here. This is because previously we did not populate the partition completeness
// map and user may have queried and set the model generation to be up to date.
_modelGeneration = null;
}
/**
* Remove a snapshot window that has been evicted from the metric sample aggregator.
*/
void removeWindow(long snapshotWindow) {
_validPartitionsPerTopicByWindows.remove(snapshotWindow);
}
/**
* Update the valid partition number of a topic for a window.
*/
void updatePartitionCompleteness(MetricSampleAggregator aggregator,
long window,
TopicPartition tp) {
_activeSnapshotWindow = aggregator.activeSnapshotWindow();
_validPartitionsPerTopicByWindows.computeIfAbsent(window, w -> new ConcurrentHashMap<>())
.compute(tp.topic(), (t, set) -> {
Set<Integer> s = set == null ? new HashSet<>() : set;
MetricSampleAggregationResult.Imputation imputation = aggregator.validatePartitions(window, tp);
if (imputation != MetricSampleAggregationResult.Imputation.NO_VALID_IMPUTATION) {
LOG.debug("Added partition {} to valid partition set for window {} with imputation {}",
tp, window, imputation);
synchronized (s) {
s.add(tp.partition());
}
}
return s;
});
}
private void updateMetricCompleteness(Cluster cluster,
ModelGeneration modelGeneration) {
if (_modelGeneration == null || !_modelGeneration.equals(modelGeneration)) {
_modelGeneration = modelGeneration;
_validPartitionsByWindows.clear();
for (Map.Entry<Long, Map<String, Set<Integer>>> entry : _validPartitionsPerTopicByWindows.entrySet()) {
long window = entry.getKey();
for (String topic : entry.getValue().keySet()) {
updateWindowMetricCompleteness(cluster, window, topic);
}
}
}
}
private void updateWindowMetricCompleteness(Cluster cluster, long window, String topic) {
int numValidPartitions = _validPartitionsPerTopicByWindows.get(window).get(topic).size();
List<PartitionInfo> partitions = cluster.partitionsForTopic(topic);
// The topic may have been deleted so the cluster does not have it.
if (partitions != null) {
int numPartitions = partitions.size();
_validPartitionsByWindows.compute(window, (w, v) -> {
int newValue = (v == null ? 0 : v);
return numValidPartitions == numPartitions ? newValue + numPartitions : newValue;
});
}
}
}
|
# This file should be sourced before using go commands
# it ensures that bazel's version of go is used
EXEC_ROOT="$(bazel info execution_root)"
if [[ ! -e ${EXEC_ROOT} ]]; then
echo "*** ${EXEC_ROOT} does not exist - did you forget to bazel build ... ?"
exit 1
fi
export GOROOT="$(find ${EXEC_ROOT}/external -type d -name 'go1_*')"
export PATH=${GOROOT}/bin:${PATH}
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "*** Calling ${BASH_SOURCE[0]} directly has no effect. It should be sourced."
echo "Using GOROOT: ${GOROOT}"
go version
fi
|
#!/usr/bin/env bash
PASSCODE=${1:-20202021}
DISCRIMINATOR=${2:-42}
UDP_PORT=${3:-5560}
OTA_DOWNLOAD_PATH=${4:-"/tmp/test.bin"}
FIRMWARE_BIN="my-firmware.bin"
FIRMWARE_OTA="my-firmware.ota"
OTA_PROVIDER_APP="chip-ota-provider-app"
OTA_PROVIDER_FOLDER="out/ota_provider_debug"
OTA_REQUESTOR_APP="chip-ota-requestor-app"
OTA_REQUESTOR_FOLDER="out/ota_requestor_debug"
CHIP_TOOL_APP="chip-tool"
CHIP_TOOL_FOLDER="out"
killall -e "$OTA_PROVIDER_APP" "$OTA_REQUESTOR_APP"
rm -f "$FIRMWARE_OTA" "$FIRMWARE_BIN" "$OTA_DOWNLOAD_PATH"
scripts/examples/gn_build_example.sh examples/chip-tool "$CHIP_TOOL_FOLDER"
scripts/examples/gn_build_example.sh examples/ota-requestor-app/linux "$OTA_REQUESTOR_FOLDER" chip_config_network_layer_ble=false
scripts/examples/gn_build_example.sh examples/ota-provider-app/linux "$OTA_PROVIDER_FOLDER" chip_config_network_layer_ble=false
echo "Test" >"$FIRMWARE_BIN"
rm /tmp/chip_*
./src/app/ota_image_tool.py create -v 0xDEAD -p 0xBEEF -vn 1 -vs "1.0" -da sha256 "$FIRMWARE_BIN" "$FIRMWARE_OTA"
if [ ! -f "$FIRMWARE_OTA" ]; then
exit 1
fi
./"$OTA_PROVIDER_FOLDER"/"$OTA_PROVIDER_APP" -f "$FIRMWARE_OTA" | tee /tmp/ota/provider-log.txt &
echo "Commissioning Provider"
./"$CHIP_TOOL_FOLDER"/"$CHIP_TOOL_APP" pairing onnetwork 1 "$PASSCODE" | tee /tmp/ota/chip-tool-commission-provider.txt
if grep "Device commissioning completed with success" /tmp/ota/chip-tool-commission-provider.txt; then
echo Provider Commissioned
else
echo Provider not commissioned properly
fi
./"$CHIP_TOOL_FOLDER"/"$CHIP_TOOL_APP" accesscontrol write acl '[{"fabricIndex": 1, "privilege": 5, "authMode": 2, "subjects": [112233], "targets": null}, {"fabricIndex": 1, "privilege": 3, "authMode": 2, "subjects": null, "targets": null}]' 1 0
stdbuf -o0 ./"$OTA_REQUESTOR_FOLDER"/"$OTA_REQUESTOR_APP" --discriminator "$DISCRIMINATOR" --secured-device-port "$UDP_PORT" --KVS /tmp/chip_kvs_requestor --otaDownloadPath "$OTA_DOWNLOAD_PATH" | tee /tmp/ota/requestor-log.txt &
echo "Commissioning Requestor"
./"$CHIP_TOOL_FOLDER"/"$CHIP_TOOL_APP" pairing onnetwork-long 2 "$PASSCODE" "$DISCRIMINATOR" | tee /tmp/ota/chip-tool-commission-requestor.txt
if grep "Device commissioning completed with success" /tmp/ota/chip-tool-commission-requestor.txt; then
echo Requestor Commissioned
else
echo Requestor not commissioned properly
fi
echo "Sending announce-ota-provider"
./"$CHIP_TOOL_FOLDER"/"$CHIP_TOOL_APP" otasoftwareupdaterequestor announce-ota-provider 1 0 0 0 2 0 | tee /tmp/ota/chip-tool-announce-ota.txt
timeout 30 grep -q "OTA image downloaded to" <(tail -n0 -f /tmp/ota/requestor-log.txt)
echo "Exiting, logs are in tmp/ota/"
killall -e "$OTA_PROVIDER_APP" "$OTA_REQUESTOR_APP"
if cmp "$OTA_DOWNLOAD_PATH" "$FIRMWARE_BIN"; then
echo Test passed && exit 0
else
echo Test failed && exit 1
fi
|
<gh_stars>1-10
import { Component, OnInit } from '@angular/core';
import { ContactService } from '../services/contact.service';
import { FormControl, FormGroup, FormBuilder, Validators } from '@angular/forms';
function containsValidCharacters(c: FormControl) {
const specialChars = ['\\', '<', '>', '&' ];
for (const i in specialChars) {
if ( c.value !== undefined && c.value.indexOf(specialChars[i]) !== -1 ) {
return {containsValidCharacters: true };
}
}
return null;
}
@Component({
selector: 'app-contact',
templateUrl: './contact.component.html',
styleUrls: ['./contact.component.css']
})
export class ContactComponent implements OnInit {
public email: string;
phone: string;
address: string;
messageSend = false;
message = '';
controlGroup: FormGroup;
public messageCtrl: FormControl;
constructor(contactService: ContactService, fb: FormBuilder) {
this.email = contactService.email ;
this.phone = contactService.phone ;
this. address = contactService.address;
this.messageCtrl = fb.control('', [Validators.required, containsValidCharacters]);
this.controlGroup = fb.group({
messageCtrl: this.messageCtrl
});
}
ngOnInit() {
}
updateTextContent(data: any) {
this.message = data.target.value;
console.log(this.message);
}
sendMessage() {
console.log('message send: ' + this.message);
this.messageSend = true;
}
}
|
class BankAccount:
def __init__(self):
self.balance = 0
self.transaction_count = 0
def deposit(self, amount):
self.balance += amount
self.transaction_count += 1
def withdraw(self, amount):
self.balance -= amount
self.transaction_count += 1
def get_balance(self):
return self.balance
def get_transaction_count(self):
return self.transaction_count
|
curl -X GET \
'https://api.mercadopago.com/v1/payments/search?access_token=ACCESS_TOKEN&sort=date_created&criteria=desc&external_reference="ID_REF"'
|
def custom_dot(A, B):
if len(A[0]) != len(B):
raise ValueError("Number of columns in A must be equal to the number of rows in B")
result = [[0 for _ in range(len(B[0]))] for _ in range(len(A))]
for i in range(len(A)):
for j in range(len(B[0])):
for k in range(len(B)):
result[i][j] += A[i][k] * B[k][j]
return result
# Test the custom_dot function
A = [[1, 2], [3, 4]]
B = [[5, 6], [7, 8]]
R = [[19, 22], [43, 50]]
assert custom_dot(A, B) == R
u = [1, 1]
Ru = [3, 7]
assert custom_dot(A, [u]) == [Ru]
|
package stack
// Stack data structure
type Stack interface {
Push(...interface{})
Pop() interface{}
Peek() interface{}
Values() []interface{}
Length() int
Empty() bool
Clear()
}
|
<reponame>MacKentoch/reactNativeReduxSidemenuTabbarStarter
'use strict';
import React, {
PropTypes,
Component
} from 'react';
import {
StyleSheet,
View
} from 'react-native';
import shallowCompare from 'react-addons-shallow-compare';
import Icon from 'react-native-vector-icons/Ionicons';
class TabBarItem extends Component {
shouldComponentUpdate(nextProps, nextState) {
return shallowCompare(this, nextProps, nextState);
}
render() {
const {
iconName,
selectedIconName,
title,
selected,
allowFontScaling,
children
} = this.props;
return (
<Icon.TabBarItemIOS
iconName={iconName}
selectedIconName={selectedIconName}
title={title}
selected={selected}
allowFontScaling={allowFontScaling}
onPress={this.handlesOnPress}>
<View style={styles.tabContent}>
{ children }
</View>
</Icon.TabBarItemIOS>
);
}
handlesOnPress = () => {
const { tabId, onPress } = this.props;
onPress(tabId);
}
}
const styles = StyleSheet.create({
tabContent: {
backgroundColor: '#fff',
flex: 1
},
tabText: {
color: '#4A4A4A'
}
});
TabBarItem.propTypes = {
tabId: PropTypes.string.isRequired,
iconName: PropTypes.string.isRequired,
selectedIconName: PropTypes.string.isRequired,
title: PropTypes.string,
selected: PropTypes.bool.isRequired,
allowFontScaling: PropTypes.bool,
onPress: PropTypes.func.isRequired,
children: PropTypes.node
};
TabBarItem.defaultProps = {
title: '',
allowFontScaling: false
};
export default TabBarItem;
|
<gh_stars>0
const { databaseConnection } = require("./connections")
const filmes = {}
|
<reponame>Darian1996/mercyblitz-gp-public
package com.darian.springbootjmx.mBean;
public interface HelloMBean {
public String greeting();
public void setValue(String value);
public String getValue();
}
|
//does not subtract holidays
// $(document).ready(function(){
// //plugin for start Date
// $('#startDate').daterangepicker({
// singleDatePicker: true,
// calender_style: "picker_4",
// // minDate: new Date(),
// isInvalidDate: function(date){
// /*
// validates the following dates
// 1) 1st Jan- 2nd Jan
// 2) 1st May
// 3) 25th Dec - 31st Dec
// 4) 1st June
// 5) 20th Oct
// 6) 12th Dec
// */
// if((date.day() == 0 )|| (date.day() == 6)||(date.format('MM-DD') === '01-01') || (date.format('MM-DD') === '01-02') || (date.format('MM-DD') === '12-25')|| (date.format('MM-DD') === '12-12') || (date.format('MM-DD') === '12-26') || (date.format('MM-DD') === '12-31')|| (date.format('MM-DD') === '05-01')|| (date.format('MM-DD') === '06-01') || (date.format('MM-DD') === '10-20')){
// // || (date.format('MM-DD') === '12-27') || (date.format('MM-DD') === '12-28')|| (date.format('MM-DD') === '12-29')|| (date.format('MM-DD') === '12-30') ||
// return true;
// }else{
// return false;
// }
// },
// }, function(start, end, label) {
// //console.log(start.toISOString(), end.toISOString(), label);
// });
// //plugin for start date
// //plugin for end date
// $('#endDate').daterangepicker({
// singleDatePicker: true,
// calender_style: "picker_4",
// // minDate: new Date(),
// isInvalidDate: function(date){
// /*
// validates the following dates
// 1) 1st Jan- 2nd Jan
// 2) 1st May
// 3) 25th Dec - 31st Dec
// 4) 1st June
// 5) 20th Oct
// 6) 12th Dec
// */
// if((date.day() == 0 )|| (date.day() == 6)||(date.format('MM-DD') === '01-01') || (date.format('MM-DD') === '01-02') || (date.format('MM-DD') === '12-25')|| (date.format('MM-DD') === '12-12') || (date.format('MM-DD') === '12-26') || (date.format('MM-DD') === '12-31')|| (date.format('MM-DD') === '05-01')|| (date.format('MM-DD') === '06-01') || (date.format('MM-DD') === '10-20')){
// // || (date.format('MM-DD') === '12-27') || (date.format('MM-DD') === '12-28')|| (date.format('MM-DD') === '12-29')|| (date.format('MM-DD') === '12-30')
// return true;
// }else{
// return false;
// }
// },
// }, function(start, end, label) {
// //console.log(start.toISOString(), end.toISOString(), label);
// });
// //plugin for end date
// $('#startDate').prop('disabled',true);
// $('#endDate').prop('disabled',true);
// $('#daysAvaliable').prop('disabled', true);
// $('#daysToApply').prop('disabled', true);
// $('#daysRemaining').prop('disabled', true);
// $('#returnDate').prop('disabled', true);
// $("#applyLeave").prop('disabled', true);
// $("#startDate").change(function(){
// $('#endDate').val('');
// $('#daysToApply').val('');
// $('#daysRemaining').val('');
// $('#returnDate').val('');
// $("#applyLeave").prop('disabled', true);
// $daysNID = $("#absenceReason").val();
// $daysNID = $daysNID.split("k");
// $leavedays = Math.ceil($daysNID[0]);
// $leaveTypeID = $daysNID[1];
// console.log($leaveTypeID);
// if($leaveTypeID == "MATERNITY" || $leaveTypeID == "PATERNITY"){
// console.log("disable entry of end date");
// $startDate = $('#startDate').val();
// $endDate = $('#endDate').val();
// //format date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $startDate = $month+'/'+$dayofWeek+'/'+$year;
// // format date
// $expectedReturnDate = getReturnDateIncludingWeekends($startDate,$leavedays);
// // console.log("getReturnDateIncludingWeekends has run "+getReturnDateIncludingWeekends($startDate,$leavedays));
// $endAndReturnDate = $expectedReturnDate.split('k');
// $("#daysToApply").val($leavedays);
// $("#returnDate").val($endAndReturnDate[0]);
// $("#endDate").val($endAndReturnDate[1]);
// $('#daysRemaining').val(0);
// $("#applyLeave").prop('disabled', false);
// }else{
// $("#endDate").prop('disabled', false);
// $days = $("#daysToApply").val();
// if($days == null || $days == undefined || $days == ""){
// // console.log("undefined");
// }else{
// $startDate = $('#startDate').val();
// //format date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $newDate = $month+'/'+$dayofWeek+'/'+$year;
// //format date
// $expectedReturnDate = getDateFromStartDateLeaveDays($newDate,$days);
// $expectedReturnDate = $expectedReturnDate.split("k");
// $("#returnDate").val($expectedReturnDate[0]);
// //sex
// //$("#endDate").val($expectedReturnDate[1]);
// }
// }
// });
// //apply leave
// //auto populates the number of days field during leave application
// $("#absenceReason").change(function(){
// $('#startDate').prop('disabled',false);
// $('#endDate').prop('disabled',true);
// $('#endDate').val('');
// $('#startDate').val('');
// $('#daysToApply').val('');
// $('#daysRemaining').val('');
// $('#daysAvaliable').val('');
// $('#returnDate').val('');
// $("#applyLeave").prop('disabled', true);
// $daysNID = $("#absenceReason").val();
// $daysNID = $daysNID.split("k");
// $leavedays = Math.ceil($daysNID[0]);
// $leaveTypeID = $daysNID[1];
// $("#daysAvaliable").val($leavedays);
// if($leaveTypeID == "MATERNITY" || $leaveTypeID == "PATERNITY"){
// $("#daysToApply").prop('disabled', true);
// $('#endDate').prop('disabled', true);
// $startDate = $('#startDate').val();
// //format date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $startDate = $month+'/'+$dayofWeek+'/'+$year;
// // format date
// if($startDate === '/undefined/undefined' || $startDate === '/undefined/undefined' || $startDate === '/undefined/undefined'){
// }else{
// $expectedReturnDate = getDateFromStartDateLeaveDays($startDate,$leavedays);
// $endAndReturnDate = $expectedReturnDate.split('k');
// console.log(" End and return Date "+$endAndReturnDate);
// $("#returnDate").val($endAndReturnDate[0]);
// $("#endDate").val($endAndReturnDate[1]);
// $("#applyLeave").prop('disabled', false);
// }
// }else{
// $('#daysToApply').attr('max', $leavedays);
// $('#daysToApply').attr('min', parseInt(0));
// // $("#daysToApply").prop('disabled', false);
// }
// });
// //auto populates the number of days field during leave application
// window.getNumberOfHolidaysWithinDateRange = function(){
// //ADJUSTS FOR HOLIDAYS
// //get the holidays
// //check to see if holidays fall within the date range applied
// //do necessary adjustements
// $holidaysWithinRange = 0;
// $startDate = $("#startDate").val();
// $endDate = $("#endDate").val();
// $startDateArray = $startDate.split("/");
// $endDateArray = $endDate.split("/");
// $currentYear = new Date().getFullYear();
// $k = 0;
// while($k < $holidays.length){
// //loop through holidays to check if any falls within range
// $holidayDate = $holidays[$k]['holidayDate'];//compare this to the start date end date range provided
// $holidayYear = $holidays[$k]['year'];
// console.log("$holidayDate "+$holidayDate);
// $holidayDateArray = $holidayDate.split("-");
// // console.log("Start Date broken down: Year "+$startDateArray[2]+" Month "+$startDateArray[0]+" Day "+$startDateArray[1]);
// // console.log("End Date broken down: Year "+$endDateArray[2]+" Month "+$endDateArray[0]+" Day "+$endDateArray[1]);
// // console.log("Check year broken down: Year "+$currentYear+" Month "+$holidayDateArray[1]+" Day "+$holidayDateArray[0]);
// //FromDate >= @startDate AND ToDate <= @endDate
// var from = new Date($startDateArray[2], parseInt($startDateArray[0])-1, $startDateArray[1]); // -1 because months are from 0 to 11
// var to = new Date($endDateArray[2], parseInt($endDateArray[0])-1, $endDateArray[1]);
// var check = new Date($holidayYear, parseInt($holidayDateArray[1])-1, $holidayDateArray[0]);
// //check if this datae falls on weekedn, if not it is okay increament else don't increament
// console.log("From "+from+" Check "+check+" To "+to);
// if(check > from && check < to){
// console.log("Value of K "+$k+" "+check);
// //if it falls on range increase days buy 1
// console.log("falls in range ");
// // console.log("From: "+from);
// // console.log("To: "+to);
// // console.log("Check: "+check);
// // console.log("DOW "+$dayOfWeek);
// //(check > from && check < to) && ($dayOfWeek == 6 || $dayOfWeek == 0)
// $dayOfWeek = check.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// if($dayOfWeek == 6 || $dayOfWeek == 0){
// console.log("falls in range but is a weekend dont count");
// }else{
// console.log("falls in range but is a weekday count");
// $holidaysWithinRange++;
// }
// }else{
// //if it doesn't fall in range DO NOTHINg
// console.log("not in range");
// // console.log("From: "+from);
// // console.log("To: "+to);
// // console.log("Check: "+check);
// }
// // if($holidayDate >= $startDateDnM && $holidayDate <= $endDateDnM){//if the
// //
// // }else{
// //
// // }
// $k++;
// }
// //ADJUSTS FOR HOLIDAYS
// return $holidaysWithinRange;
// }
// //gets the holidays set in the system
// window.getHolidays = function(){
// $.ajax({
// url:$getHolidays,
// data:{},
// type:'POST',
// success:function($resp,status){
// console.log($resp);
// $holidays = JSON.parse($resp);
// // console.log($holidays[0]['holidayName']);
// return $holidays;
// }
// });
// }
// getHolidays();//this is called to pre-popuate the holidays variable
// //gets the holidays set in the system
// function getReturnDateIncludingWeekends($startDate,$leavedays){
// //compute return date
// var date = new Date($startDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt((newdate.getDate()) + parseInt($leavedays));
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// var returnDate = mm + '/' + dd + '/' + y;
// //compute return date
// //compute end date
// var date = new Date($startDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt((newdate.getDate()) + parseInt($leavedays)-1);
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// var leaveEndDate = mm + '/' + dd + '/' + y;
// //compute end date
// // console.log('getReturnDateIncludingWeekends '+returnDate+'leave end date'+leaveEndDate);
// return returnDate+'k'+leaveEndDate;
// }
// function getDateFromStartDateLeaveDays($startDate,$leavedays){
// //get the number weeks add the weeks
// var date = new Date($startDate);
// var newdate = new Date(date);
// $k = 0;
// $("#totalDaysApplied").val($leavedays);//hidden field
// // $totalDaysApplied = $leavedays + $totalWeekendDaysOnLeave;
// while($k < $leavedays){
// //add the number of days
// // $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// // newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// $t = new Date(y,(mm-1),dd);
// $dayOfWeek = $t.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// // console.log("DOW "+$dayOfWeek);
// if($dayOfWeek == 6 || $dayOfWeek == 0){
// // console.log("weekend");
// $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// newdate.setDate($newdayvalue);
// console.log(newdate);
// $k--;
// }else{
// // console.log("not weekend");
// $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// newdate.setDate($newdayvalue);
// console.log(newdate);
// }
// $k++;
// }
// var leaveEndDate = mm + '/' + dd + '/' + y;
// $newdayvalue = parseInt(newdate.getDate());
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// // console.log("return date"+dd+" "+mm+" "+y);
// $t = new Date(y,(mm-1),dd);
// // console.log("return date"+$t);
// $dayOfWeek = $t.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// // console.log('ODW '+$dayOfWeek);
// if($dayOfWeek == 6){//saturday
// var $newDate = mm + '/' + dd + '/' + y;
// var date = new Date($newDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((2));
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// // dd = parseInt(dd)+2;
// }else if($dayOfWeek == 0){//sunday
// var $newDate = mm + '/' + dd + '/' + y;
// var date = new Date($newDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// // dd = parseInt(dd)+1;
// }else{}
// var returnDate = mm + '/' + dd + '/' + y;
// return returnDate+'k'+leaveEndDate;
// // return JSON.stringify('{"leavedays":'+someFormattedDate+',"totalDays":'+someFormattedDate+'}');
// }
// //get expected return date
// window.computeLeaveDetails = function(a,b){
// $leavDaysArray = JSON.parse(workingDaysBetweenDates(a,b));//gets you the working days and total leave days applied.
// $leavedays = $leavDaysArray['leavedays'];
// var date = new Date(a);
// var newdate = new Date(date);
// //if start date plus days applied covers weekend skip weekend
// $k = 1;
// $workingDays = 0;
// $leavedays = $leavDaysArray['totalDays']+1;
// while($k < $leavedays){
// //startDate broken down
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// //startDate broken down
// //put the broken up date together to get the day you applied. monday-sunday
// $t = new Date(y,(mm-1),dd);
// $dayOfWeek = $t.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// //put the broken up date together to get the day you applied. monday-sunday
// // console.log('Day of week sun-mon '+$dayOfWeek);
// if($dayOfWeek == 0 || $dayOfWeek == 6){
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
// newdate.setDate($newdayvalue);
// }else{
// //increase day by one
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
// newdate.setDate($newdayvalue);
// $workingDays++;
// // console.log("Working Days "+$workingDays+" Day of week "+$workingDays);
// // console.log(newdate);
// //increase day by one
// }
// //startDate broken down
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// //startDate broken down
// // console.log("New Date after adding 1 "+mm+"/"+dd+"/"+y);
// $k++;
// }
// $daysEntitled = $("#daysAvaliable").val();
// if($workingDays > $daysEntitled){
// $("#startDate").val("");
// $("#absenceReason").val("");
// $("#daysAvaliable").val("");
// $("#daysToApply").val("");
// $("#daysRemaining").val("");
// $("#returnDate").val("");
// $("#endDate").val("");
// $('#startDate').prop('disabled',true);
// $('#endDate').prop('disabled',true);
// var element = document.getElementById("applyLeave")
// element.setAttribute('disabled','disabled')
// $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// hideLoginErrorBox();
// }else{
// //display working days
// $("#daysToApply").val($workingDays);
// //display working days
// //get expected return date
// //sex
// $expectedReturnDate = getDateFromStartDateLeaveDays($('#startDate').val(),$leavDaysArray['totalDays']);
// $expectedReturnDate = $expectedReturnDate.split("k");
// $("#returnDate").val($expectedReturnDate[0]);
// //get expected return date
// //compute remaining days
// $daysApplied = parseInt($("#daysToApply").val());
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($daysApplied+" "+$availableDays);
// $remaingDays = $availableDays - $daysApplied;
// console.log($daysApplied);
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// }
// }
// //On End Date Change
// // $("#endDate").change(function(){
// // $startDate = $('#startDate').val();
// // $endDate = $('#endDate').val();
// // $handleHolidayFallingOnWeekend = 0;
// // $.ajax({
// // url:$validateStartDateUrl,
// // data:{"startDate":$startDate,"endDate":$endDate},
// // type:'POST',
// // success:function($resp,status){
// // $resp = JSON.parse($resp);
// // $status = parseInt($resp['status']);
// // if($status == 0){
// // $startDate = $startDate.split("/");
// // $endDate = $endDate.split("/");
// // if($startDate.toString() === $endDate.toString()){
// // //ok
// // var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // $leaveDetails = computeLeaveDetails23(a,b);
// // $leaveDetails = JSON.parse($leaveDetails);
// // $message = $leaveDetails['message'];
// // $workingDays = $leaveDetails['workingDays'];
// // console.log($leaveDetails);
// // if($message === "OK"){
// // $("#daysToApply").val($workingDays);
// // //compute remaining days
// // $daysApplied = parseInt($("#daysToApply").val());
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log($daysApplied+" "+$availableDays);
// // $remaingDays = $availableDays - $daysApplied;
// // console.log($daysApplied);
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// // }else{
// // console.log("Error computing number of working days");
// // }
// // $("#applyLeave").prop("disabled", false);
// // console.log("Data appears fine ");
// // }else{
// // if($endDate[0] < $startDate[0]){//if end month is less than start month
// // console.log('End month is less than start year');
// // //wrong throw error, except if end year is greater than start year
// // if($endDate[2] > $startDate[2]){//if end year is greator than start year
// // //ok
// // //get remaining days till end of year
// // var d = new Date($startDate[2], 11, 31);//end date of year
// // var c = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // console.log(d+"current year days comutation "+c);
// // $workingDaysTillEndYear = computeLeaveDetails23(c,d);
// // $workingDaysTillEndYear = JSON.parse($workingDaysTillEndYear);
// // $workingDaysTillEndYearQty = $workingDaysTillEndYear['workingDays'];
// // $workingDaysTillEndYearMsg = $workingDaysTillEndYear['message'];
// // $handleHolidayFallingOnWeekendCurrentYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// // //get remaining days till end of year
// // //get days between start of year and end date selected
// // $endDate = $endDate.split("/");
// // var e = new Date($endDate[2], 00, 01);//first day of first month of end year
// // var f = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // console.log($endDate+"Year ");
// // console.log(e+"new year days comutation "+f);
// // $workingDaysFromYearStart = computeLeaveDetails23(e,f);
// // $workingDaysFromYearStart = JSON.parse($workingDaysFromYearStart);
// // $workingDaysFromYearStartQty = $workingDaysFromYearStart['workingDays'];
// // $workingDaysFromYearStartMsg = $workingDaysFromYearStart['message'];
// // $handleHolidayFallingOnWeekendNextYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// // //get days between start of year and end date selected
// // console.log($workingDaysFromYearStartQty+" $workingDaysTillEndYearQty "+$handleHolidayFallingOnWeekendNextYear);
// // //add the two days to get total days applied
// // console.log("$handleHolidayFallingOnWeekendCurrentYear"+$handleHolidayFallingOnWeekendCurrentYear+"Working days"+$workingDaysTillEndYearQty);
// // console.log($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// // console.log(($workingDaysFromYearStartQty + $workingDaysTillEndYearQty));
// // console.log("$handleHolidayFallingOnWeekendNextYear"+$handleHolidayFallingOnWeekendNextYear+"Working days"+$workingDaysFromYearStartQty );
// // $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)-($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// // //add the two days to get total days applied
// // $daysAvailable = $("#daysAvaliable").val();
// // //check of days applied is more than days entitled
// // if($daysAvailable < $totalDaysApplied){
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#daysToApply").val('');
// // $("#daysRemaining").val('');
// // $("#returnDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// // hideLoginErrorBox();
// // }else{
// // console.log("End month is less than start month");
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of "+$NoOfHolidays+"$totalDaysApplied"+$totalDaysApplied);
// // console.log("Days Less holiday = "+($totalDaysApplied-$NoOfHolidays));
// // $("#daysToApply").val($totalDaysApplied);
// // $remaingDays = parseInt($("#daysAvaliable").val()) - parseInt($("#daysToApply").val());
// // $("#daysRemaining").val($remaingDays);
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$totalDaysApplied);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// // }
// // }else{
// // console.log('1');
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html("The end date cannot be less than start date");
// // hideLoginErrorBox();
// // }
// // }else if($endDate[0] == $startDate[0]){//if month is equal check if dates are valid
// // //validate the days of the week
// // if($endDate[1] < $startDate[1]){//if end day is less than start day
// // //wrong
// // //display error
// // // console.log("The end day can't be less than start day");
// // console.log('2');
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", false);
// // $("#loginerrorBox p").html("The end date cannot be less than start date");
// // hideLoginErrorBox();
// // }else{//end day is greater than start day
// // //ok
// // //call function to compute end date, return date and number of days
// // var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // $leaveDetails = computeLeaveDetails23(a,b);
// // $leaveDetails = JSON.parse($leaveDetails);
// // $message = $leaveDetails['message'];
// // $workingDays = $leaveDetails['workingDays'];
// // $handleHolidayFallingOnWeekend = $leaveDetails['handleHolidayFallingOnWeekend'];
// // console.log($leaveDetails);
// // if($message === "OK"){
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of holidays"+$NoOfHolidays);
// // console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// // $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// // console.log($handleHolidayFallingOnWeekend+"isss"+$workingDaysApplied);
// // $workingDaysApplied = parseInt($workingDaysApplied)+parseInt($handleHolidayFallingOnWeekend)
// // $("#daysToApply").val($workingDaysApplied);//set the days applied
// // //subtract from the number of days applied
// // //get the number of holidays between the dates selected
// // // $workingDaysApplied = parseInt($workingDaysApplied);
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log($availableDays+" remaingi days"+$workingDaysApplied);
// // $remaingDays = $availableDays - $workingDaysApplied;
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// // console.log($workingDays+"sdsdsd");
// // console.log("Return Date info "+$returnDate);
// // $returnDateArray = $returnDate.split("k");
// // $returnDate = $returnDateArray[0].split("/");
// // $month = $returnDate[0];
// // $day = $returnDate[1];
// // $combi = $month+"/"+$day;
// // if($combi == "12/26" || $combi == "12/27" || $combi == "12/28" || $combi == "12/29" || $combi == "12/30" || $combi == "12/31"){
// // $returnDate = "3/01/"+(parseInt($returnDate[2])+parseInt(1));
// // }else{
// // $returnDate = $returnDateArray[0];
// // }
// // console.log("Return Datess"+$returnDate);
// // // $("#returnDate").val($returnDateArray[0]);
// // $("#returnDate").val($returnDate);
// // }else{
// // console.log("Error computing number of working days");
// // }
// // $("#applyLeave").prop("disabled", false);
// // console.log("Data appears fine ");
// // }
// // }else if($endDate[0] > $startDate[0]){//if the end month is greater than start month
// // //call function to compute end date and and number of days
// // //okconsole.log("Error");
// // var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // $leaveDetails = computeLeaveDetails23(a,b);
// // $leaveDetails = JSON.parse($leaveDetails);
// // $message = $leaveDetails['message'];
// // $workingDays = $leaveDetails['workingDays'];
// // console.log("Just before leavedetails "+$leaveDetails['message']);
// // if($message === "OK"){
// // console.log("Just before leavedetails");
// // //compute remaining days
// // //get the number of holidays between the dates selected
// // //subtract from the number of days applied
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of "+$NoOfHolidays);
// // console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// // $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// // $("#daysToApply").val($workingDaysApplied);//set the days applied
// // //subtract from the number of days applied
// // //get the number of holidays between the dates selected
// // // $workingDaysApplied = parseInt($workingDaysApplied);
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log("Days applied "+$workingDaysApplied+" Days available xxx "+$availableDays);
// // $remaingDays = $availableDays - $workingDaysApplied;
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// // console.log("Return Date info "+$returnDate);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// // }else{
// // console.log("Error computing number of working days");
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html($message);
// // hideLoginErrorBox();
// // }
// // $("#applyLeave").prop("disabled", false);
// // console.log("End month is greater thus ok");
// // }else{
// // console.log('3');
// // console.log($endDate+" "+$startDate);
// // console.log("condition not tested "+$endDate[0]+ " strat month "+$startDate[0]);
// // }
// // }
// // }else{
// // console.log($resp['message']);
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html($resp['message']);
// // hideLoginErrorBox();
// // }
// // }
// // });
// // });
// $("#endDate").change(function(){
// $startDate = $('#startDate').val();
// $endDate = $('#endDate').val();
// $handleHolidayFallingOnWeekend = 0;
// $.ajax({
// url:$validateStartDateUrl,
// data:{"startDate":$startDate,"endDate":$endDate},
// type:'POST',
// success:function($resp,status){
// $resp = JSON.parse($resp);
// $status = parseInt($resp['status']);
// if($status == 0){
// $startDate = $startDate.split("/");
// $endDate = $endDate.split("/");
// if($startDate.toString() === $endDate.toString()){
// //ok
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// console.log($leaveDetails);
// if($message === "OK"){
// $("#daysToApply").val($workingDays);
// //compute remaining days
// $daysApplied = parseInt($("#daysToApply").val());
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($daysApplied+" "+$availableDays);
// $remaingDays = $availableDays - $daysApplied;
// console.log($daysApplied);
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }else{
// console.log("Error computing number of working days");
// }
// $("#applyLeave").prop("disabled", false);
// console.log("Data appears fine ");
// }else{
// if($endDate[0] < $startDate[0]){//if end month is less than start month
// console.log('End month is less than start year');
// //wrong throw error, except if end year is greater than start year
// if($endDate[2] > $startDate[2]){//if end year is greator than start year
// //ok
// //get remaining days till end of year
// var d = new Date($startDate[2], 11, 31);//end date of year
// var c = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// console.log(d+"current year days comutation "+c);
// $workingDaysTillEndYear = computeLeaveDetails23(c,d);
// $workingDaysTillEndYear = JSON.parse($workingDaysTillEndYear);
// $workingDaysTillEndYearQty = $workingDaysTillEndYear['workingDays'];
// $workingDaysTillEndYearMsg = $workingDaysTillEndYear['message'];
// $handleHolidayFallingOnWeekendCurrentYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// //get remaining days till end of year
// //get days between start of year and end date selected
// $endDate = $endDate.split("/");
// var e = new Date($endDate[2], 00, 01);//first day of first month of end year
// var f = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// console.log($endDate+"Year ");
// console.log(e+"new year days comutation "+f);
// $workingDaysFromYearStart = computeLeaveDetails23(e,f);
// $workingDaysFromYearStart = JSON.parse($workingDaysFromYearStart);
// $workingDaysFromYearStartQty = $workingDaysFromYearStart['workingDays'];
// $workingDaysFromYearStartMsg = $workingDaysFromYearStart['message'];
// $handleHolidayFallingOnWeekendNextYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// //get days between start of year and end date selected
// //add the two days to get total days applied
// console.log("$handleHolidayFallingOnWeekendCurrentYear"+$handleHolidayFallingOnWeekendCurrentYear+"Working days"+$workingDaysTillEndYearQty);
// console.log($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// console.log(($workingDaysFromYearStartQty + $workingDaysTillEndYearQty));
// console.log("$handleHolidayFallingOnWeekendNextYear"+$handleHolidayFallingOnWeekendNextYear+"Working days"+$workingDaysFromYearStartQty );
// // $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)-($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)
// //add the two days to get total days applied
// $daysAvailable = $("#daysAvaliable").val();
// //check of days applied is more than days entitled
// if($daysAvailable < $totalDaysApplied){
// $("#startDate").val('');
// $("#endDate").val('');
// $("#daysToApply").val('');
// $("#daysRemaining").val('');
// $("#returnDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// hideLoginErrorBox();
// }else{
// console.log("End month is less than start month");
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of "+$NoOfHolidays);
// console.log("Days Less holiday = "+($totalDaysApplied-$NoOfHolidays));
// $("#daysToApply").val($totalDaysApplied-$NoOfHolidays);
// $remaingDays = parseInt($("#daysAvaliable").val()) - parseInt($("#daysToApply").val());
// $("#daysRemaining").val($remaingDays);
// //return and end date
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$totalDaysApplied);//gets return Date and leave end date
// $returnDateArray = $returnDate.split("k");
// console.log(" returnDateArray "+$returnDateArray);
// $("#returnDate").val($returnDateArray[0]);
// $("#applyLeave").prop("disabled", false);
// //return and end date
// }
// }else{
// console.log('1');
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html("The end date cannot be less than start date");
// hideLoginErrorBox();
// }
// }else if($endDate[0] == $startDate[0]){//if month is equal check if dates are valid
// //validate the days of the week
// if($endDate[1] < $startDate[1]){//if end day is less than start day
// //wrong
// //display error
// // console.log("The end day can't be less than start day");
// console.log('2');
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", false);
// $("#loginerrorBox p").html("The end date cannot be less than start date");
// hideLoginErrorBox();
// }else{//end day is greater than start day
// //ok
// //call function to compute end date, return date and number of days
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// $handleHolidayFallingOnWeekend = $leaveDetails['handleHolidayFallingOnWeekend'];
// console.log($leaveDetails);
// if($message === "OK"){
// // $("#daysToApply").val($workingDays);
// // //compute remaining days
// // $daysApplied = parseInt($("#daysToApply").val());
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log($daysApplied+" "+$availableDays);
// // $remaingDays = $availableDays - $daysApplied;
// // console.log($daysApplied);
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of "+$NoOfHolidays);
// // console.log("Days Less holiday = "+($daysApplied-parseInt($NoOfHolidays)));
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// //compute remaining days
// //get the number of holidays between the dates selected
// //subtract from the number of days applied
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of holidays"+$NoOfHolidays);
// console.log("ze working days"+$workingDays);
// console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// console.log($handleHolidayFallingOnWeekend+"isss"+$workingDaysApplied);
// $("#daysToApply").val($workingDaysApplied);
// // $workingDaysApplied = parseInt($workingDaysApplied)+parseInt($handleHolidayFallingOnWeekend)
// // $("#daysToApply").val($workingDaysApplied);//set the days applied
// //subtract from the number of days applied
// //get the number of holidays between the dates selected
// // $workingDaysApplied = parseInt($workingDaysApplied);
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($availableDays+" remaingi days"+$workingDaysApplied);
// $remaingDays = $availableDays - $workingDaysApplied;
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// console.log($workingDays+"sdsdsd");
// console.log("Return Date info "+$returnDate);
// $returnDateArray = $returnDate.split("k");
// $returnDate = $returnDateArray[0].split("/");
// $month = $returnDate[0];
// $day = $returnDate[1];
// $combi = $month+"/"+$day;
// if($combi == "12/26" || $combi == "12/27" || $combi == "12/28" || $combi == "12/29" || $combi == "12/30" || $combi == "12/31"){
// $returnDate = "3/01/"+(parseInt($returnDate[2])+parseInt(1));
// }else{
// $returnDate = $returnDateArray[0];
// }
// console.log("Return Datess"+$returnDate);
// // $("#returnDate").val($returnDateArray[0]);
// $("#returnDate").val($returnDate);
// }else{
// console.log("Error computing number of working days");
// }
// $("#applyLeave").prop("disabled", false);
// console.log("Data appears fine ");
// }
// }else if($endDate[0] > $startDate[0]){//if the end month is greater than start month
// //call function to compute end date and and number of days
// //okconsole.log("Error");
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// console.log($leaveDetails);
// if($message === "OK"){
// //compute remaining days
// //get the number of holidays between the dates selected
// //subtract from the number of days applied
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of "+$NoOfHolidays);
// console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// $("#daysToApply").val($workingDaysApplied);//set the days applied
// //subtract from the number of days applied
// //get the number of holidays between the dates selected
// // $workingDaysApplied = parseInt($workingDaysApplied);
// $availableDays = parseInt($("#daysAvaliable").val());
// $remaingDays = $availableDays - $workingDaysApplied;
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// console.log("Return Date info "+$returnDate);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }else{
// console.log("Error computing number of working days");
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($message);
// hideLoginErrorBox();
// }
// $("#applyLeave").prop("disabled", false);
// console.log("End month is greater thus ok");
// }else{
// console.log('3');
// console.log($endDate+" "+$startDate);
// console.log("condition not tested "+$endDate[0]+ " strat month "+$startDate[0]);
// }
// }
// }else{
// console.log($resp['message']);
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// }
// });
// });
// window.workingDaysBetweenDates = function(startDate, endDate) {
// var millisecondsPerDay = 86400 * 1000;
// startDate.setHours(0,0,0,1);
// endDate.setHours(23,59,59,999);
// var diff = endDate - startDate;
// var days = Math.ceil(diff / millisecondsPerDay);
// // Subtract two weekend days for every week in between
// var weeks = Math.floor(days / 7);
// // Handle special cases
// var startDay = startDate.getDate();
// var endDay = endDate.getDate();
// if(startDay == endDay){
// resp = '{"leavedays":1,"totalDays":1}';
// }else{
// workingdays = days - (weeks * 2);
// resp = '{"leavedays":'+workingdays+',"totalDays":'+days+'}';
// }
// return resp;
// }
// //On End Date Change
// $("#applyLeave").click(function(e){
// e.preventDefault();
// $daysRemaining = $("#daysRemaining").val();
// $daysToApply = $("#daysToApply").val();
// $endDate = $('#endDate').val();
// $startDate = $('#startDate').val();
// $.ajax({
// url:$validateStartDateUrl,
// data:{"startDate":$startDate,"endDate":$endDate},
// type:'POST',
// success:function($resp,status){
// console.log("validate date hase run "+$resp);
// $resp = JSON.parse($resp);
// $status = parseInt($resp['status']);
// if($status == 0){
// if($daysRemaining == "" || $daysRemaining == undefined || $daysRemaining == null || $daysToApply == null || $daysToApply == undefined || $daysToApply == ""){
// $("#loginerrorBox p").html("Complete the application form.");
// console.log("Complete the application form.");
// hideLoginErrorBox();
// }else{
// $(".overlay").show();
// $startDate = $('#startDate').val();
// $daysNID = $('#absenceReason').val();
// $daysNID = $daysNID.split("k");
// $leaveTypeID = $daysNID[1];
// $daysAvaliable = $('#daysAvaliable').val();
// $daysToApply = $('#daysToApply').val();
// $daysRemaining = $('#daysRemaining').val();
// $endDate = $('#endDate').val();
// $totalDays = $("#totalDaysApplied").val();
// $returnDate = $("#returnDate").val();
// // console.log("Return Date "+$returnDate+" End Date "+$endDate);
// // $comment = $('#comment').val();
// //format start date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $startDate = $year+'/'+$month+'/'+$dayofWeek;
// //format start date
// //format end date
// $dateValue = $endDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $endDate = $year+'/'+$month+'/'+$dayofWeek;
// //format end date
// //format return date
// $dateValue = $returnDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $returnDate = $year+'/'+$month+'/'+$dayofWeek;
// //format return date
// console.log($endDate+"endDate StartDate"+$startDate);
// $confirm = confirm("Are your sure");
// if($confirm == true){
// $.post($applyLeaveUrl,{"startDate":$startDate, "totalDaysApplied":$totalDays, "endDate":$endDate,"returnDate":$returnDate,"absenceReason":$leaveTypeID, "daysApplied":$daysToApply, "daysAvaliable":$daysAvaliable},function(data, status){
// console.log(data);
// $resp = JSON.parse(data);
// $status =$resp['status'];
// if($status == 0){
// $(".overlay").hide();
// $message = $resp['message'];
// $("#loginSuccessBox p").html($resp['message']);
// hideLoginSuccessBox();
// setTimeout(function(){
// location.reload();
// },10000);
// }else{
// $(".overlay").hide();
// $message = $resp['message'];
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// });
// }else{
// $(".overlay").hide();
// }
// }
// }else{
// console.log($resp['message']);
// $("#startDate").val('');
// $("#endDate").val('');
// $("#daysRemaining").val('');
// $("#returnDate").val('');
// $("#daysToApply").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// }
// });
// });
// //apply leave
// });
//does not subtract holidays
//subracts public holidays
// $(document).ready(function(){
// //plugin for start Date
// $('#startDate').daterangepicker({
// singleDatePicker: true,
// calender_style: "picker_4",
// minDate: new Date(),
// isInvalidDate: function(date){
// /*
// validates the following dates
// 1) 1st Jan- 2nd Jan
// 2) 1st May
// 3) 25th Dec - 31st Dec
// 4) 1st June
// 5) 20th Oct
// 6) 12th Dec
// */
// if((date.day() == 0 )|| (date.day() == 6)||(date.format('MM-DD') === '01-01') || (date.format('MM-DD') === '01-02') || (date.format('MM-DD') === '12-25')|| (date.format('MM-DD') === '12-12') || (date.format('MM-DD') === '12-26') || (date.format('MM-DD') === '12-31')|| (date.format('MM-DD') === '05-01')|| (date.format('MM-DD') === '06-01') || (date.format('MM-DD') === '10-20')){
// // || (date.format('MM-DD') === '12-27') || (date.format('MM-DD') === '12-28')|| (date.format('MM-DD') === '12-29')|| (date.format('MM-DD') === '12-30') ||
// return true;
// }else{
// return false;
// }
// },
// }, function(start, end, label) {
// //console.log(start.toISOString(), end.toISOString(), label);
// });
// //plugin for start date
// //plugin for end date
// $('#endDate').daterangepicker({
// singleDatePicker: true,
// calender_style: "picker_4",
// minDate: new Date(),
// isInvalidDate: function(date){
// /*
// validates the following dates
// 1) 1st Jan- 2nd Jan
// 2) 1st May
// 3) 25th Dec - 31st Dec
// 4) 1st June
// 5) 20th Oct
// 6) 12th Dec
// */
// if((date.day() == 0 )|| (date.day() == 6)||(date.format('MM-DD') === '01-01') || (date.format('MM-DD') === '01-02') || (date.format('MM-DD') === '12-25')|| (date.format('MM-DD') === '12-12') || (date.format('MM-DD') === '12-26') || (date.format('MM-DD') === '12-31')|| (date.format('MM-DD') === '05-01')|| (date.format('MM-DD') === '06-01') || (date.format('MM-DD') === '10-20')){
// // || (date.format('MM-DD') === '12-27') || (date.format('MM-DD') === '12-28')|| (date.format('MM-DD') === '12-29')|| (date.format('MM-DD') === '12-30')
// return true;
// }else{
// return false;
// }
// },
// }, function(start, end, label) {
// //console.log(start.toISOString(), end.toISOString(), label);
// });
// //plugin for end date
// $('#startDate').prop('disabled',true);
// $('#endDate').prop('disabled',true);
// $('#daysAvaliable').prop('disabled', true);
// $('#daysToApply').prop('disabled', true);
// $('#daysRemaining').prop('disabled', true);
// $('#returnDate').prop('disabled', true);
// $("#applyLeave").prop('disabled', true);
// $("#startDate").change(function(){
// $('#endDate').val('');
// $('#daysToApply').val('');
// $('#daysRemaining').val('');
// $('#returnDate').val('');
// $("#applyLeave").prop('disabled', true);
// $daysNID = $("#absenceReason").val();
// $daysNID = $daysNID.split("k");
// $leavedays = Math.ceil($daysNID[0]);
// $leaveTypeID = $daysNID[1];
// console.log($leaveTypeID);
// if($leaveTypeID == "MATERNITY" || $leaveTypeID == "PATERNITY"){
// console.log("disable entry of end date");
// $startDate = $('#startDate').val();
// $endDate = $('#endDate').val();
// //format date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $startDate = $month+'/'+$dayofWeek+'/'+$year;
// // format date
// $expectedReturnDate = getReturnDateIncludingWeekends($startDate,$leavedays);
// // console.log("getReturnDateIncludingWeekends has run "+getReturnDateIncludingWeekends($startDate,$leavedays));
// $endAndReturnDate = $expectedReturnDate.split('k');
// $("#daysToApply").val($leavedays);
// $("#returnDate").val($endAndReturnDate[0]);
// $("#endDate").val($endAndReturnDate[1]);
// $('#daysRemaining').val(0);
// $("#applyLeave").prop('disabled', false);
// }else{
// $("#endDate").prop('disabled', false);
// $days = $("#daysToApply").val();
// if($days == null || $days == undefined || $days == ""){
// // console.log("undefined");
// }else{
// $startDate = $('#startDate').val();
// //format date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $newDate = $month+'/'+$dayofWeek+'/'+$year;
// //format date
// $expectedReturnDate = getDateFromStartDateLeaveDays($newDate,$days);
// $expectedReturnDate = $expectedReturnDate.split("k");
// $("#returnDate").val($expectedReturnDate[0]);
// //sex
// //$("#endDate").val($expectedReturnDate[1]);
// }
// }
// });
// //apply leave
// //auto populates the number of days field during leave application
// $("#absenceReason").change(function(){
// $('#startDate').prop('disabled',false);
// $('#endDate').prop('disabled',true);
// $('#endDate').val('');
// $('#startDate').val('');
// $('#daysToApply').val('');
// $('#daysRemaining').val('');
// $('#daysAvaliable').val('');
// $('#returnDate').val('');
// $("#applyLeave").prop('disabled', true);
// $daysNID = $("#absenceReason").val();
// $daysNID = $daysNID.split("k");
// $leavedays = Math.ceil($daysNID[0]);
// $leaveTypeID = $daysNID[1];
// $("#daysAvaliable").val($leavedays);
// if($leaveTypeID == "MATERNITY" || $leaveTypeID == "PATERNITY"){
// $("#daysToApply").prop('disabled', true);
// $('#endDate').prop('disabled', true);
// $startDate = $('#startDate').val();
// //format date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $startDate = $month+'/'+$dayofWeek+'/'+$year;
// // format date
// if($startDate === '/undefined/undefined' || $startDate === '/undefined/undefined' || $startDate === '/undefined/undefined'){
// }else{
// $expectedReturnDate = getDateFromStartDateLeaveDays($startDate,$leavedays);
// $endAndReturnDate = $expectedReturnDate.split('k');
// console.log(" End and return Date "+$endAndReturnDate);
// $("#returnDate").val($endAndReturnDate[0]);
// $("#endDate").val($endAndReturnDate[1]);
// $("#applyLeave").prop('disabled', false);
// }
// }else{
// $('#daysToApply').attr('max', $leavedays);
// $('#daysToApply').attr('min', parseInt(0));
// // $("#daysToApply").prop('disabled', false);
// }
// });
// //auto populates the number of days field during leave application
// window.getNumberOfHolidaysWithinDateRange = function(){
// //ADJUSTS FOR HOLIDAYS
// //get the holidays
// //check to see if holidays fall within the date range applied
// //do necessary adjustements
// $holidaysWithinRange = 0;
// $startDate = $("#startDate").val();
// $endDate = $("#endDate").val();
// $startDateArray = $startDate.split("/");
// $endDateArray = $endDate.split("/");
// $currentYear = new Date().getFullYear();
// $k = 0;
// while($k < $holidays.length){
// //loop through holidays to check if any falls within range
// $holidayDate = $holidays[$k]['holidayDate'];//compare this to the start date end date range provided
// $holidayDateArray = $holidayDate.split("-");
// console.log("Start Date broken down: Year "+$startDateArray[2]+" Month "+$startDateArray[0]+" Day "+$startDateArray[1]);
// console.log("End Date broken down: Year "+$endDateArray[2]+" Month "+$endDateArray[0]+" Day "+$endDateArray[1]);
// console.log("Check year broken down: Year "+$currentYear+" Month "+$holidayDateArray[1]+" Day "+$holidayDateArray[0]);
// //FromDate >= @startDate AND ToDate <= @endDate
// var from = new Date($startDateArray[2], parseInt($startDateArray[0])-1, $startDateArray[1]); // -1 because months are from 0 to 11
// var to = new Date($endDateArray[2], parseInt($endDateArray[0])-1, $endDateArray[1]);
// var check = new Date($currentYear, parseInt($holidayDateArray[1])-1, $holidayDateArray[0]);
// // console.log("From "+from+" To "+to+" Check "+check);
// if(check > from && check < to){
// //if it falls on range increase days buy 1
// console.log("falls in range");
// console.log("From: "+from);
// console.log("To: "+to);
// console.log("Check: "+check);
// $holidaysWithinRange++;
// }else{
// //if it doesn't fall in range DO NOTHINg
// console.log("not in range");
// console.log("From: "+from);
// console.log("To: "+to);
// console.log("Check: "+check);
// }
// // if($holidayDate >= $startDateDnM && $holidayDate <= $endDateDnM){//if the
// //
// // }else{
// //
// // }
// $k++;
// }
// //ADJUSTS FOR HOLIDAYS
// return $holidaysWithinRange;
// }
// //gets the holidays set in the system
// window.getHolidays = function(){
// $.ajax({
// url:$getHolidays,
// data:{},
// type:'POST',
// success:function($resp,status){
// console.log($resp);
// $holidays = JSON.parse($resp);
// // console.log($holidays[0]['holidayName']);
// return $holidays;
// }
// });
// }
// getHolidays();//this is called to pre-popuate the holidays variable
// //gets the holidays set in the system
// function getReturnDateIncludingWeekends($startDate,$leavedays){
// //compute return date
// var date = new Date($startDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt((newdate.getDate()) + parseInt($leavedays));
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// var returnDate = mm + '/' + dd + '/' + y;
// //compute return date
// //compute end date
// var date = new Date($startDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt((newdate.getDate()) + parseInt($leavedays)-1);
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// var leaveEndDate = mm + '/' + dd + '/' + y;
// //compute end date
// // console.log('getReturnDateIncludingWeekends '+returnDate+'leave end date'+leaveEndDate);
// return returnDate+'k'+leaveEndDate;
// }
// function getDateFromStartDateLeaveDays($startDate,$leavedays){
// //get the number weeks add the weeks
// var date = new Date($startDate);
// var newdate = new Date(date);
// $k = 0;
// $("#totalDaysApplied").val($leavedays);//hidden field
// // $totalDaysApplied = $leavedays + $totalWeekendDaysOnLeave;
// while($k < $leavedays){
// //add the number of days
// // $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// // newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// $t = new Date(y,(mm-1),dd);
// $dayOfWeek = $t.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// // console.log("DOW "+$dayOfWeek);
// if($dayOfWeek == 6 || $dayOfWeek == 0){
// // console.log("weekend");
// $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// newdate.setDate($newdayvalue);
// console.log(newdate);
// $k--;
// }else{
// // console.log("not weekend");
// $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// newdate.setDate($newdayvalue);
// console.log(newdate);
// }
// $k++;
// }
// var leaveEndDate = mm + '/' + dd + '/' + y;
// $newdayvalue = parseInt(newdate.getDate());
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// // console.log("return date"+dd+" "+mm+" "+y);
// $t = new Date(y,(mm-1),dd);
// // console.log("return date"+$t);
// $dayOfWeek = $t.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// // console.log('ODW '+$dayOfWeek);
// if($dayOfWeek == 6){//saturday
// var $newDate = mm + '/' + dd + '/' + y;
// var date = new Date($newDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((2));
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// // dd = parseInt(dd)+2;
// }else if($dayOfWeek == 0){//sunday
// var $newDate = mm + '/' + dd + '/' + y;
// var date = new Date($newDate);
// var newdate = new Date(date);
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
// newdate.setDate($newdayvalue);
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// // dd = parseInt(dd)+1;
// }else{}
// var returnDate = mm + '/' + dd + '/' + y;
// return returnDate+'k'+leaveEndDate;
// // return JSON.stringify('{"leavedays":'+someFormattedDate+',"totalDays":'+someFormattedDate+'}');
// }
// //get expected return date
// window.computeLeaveDetails = function(a,b){
// $leavDaysArray = JSON.parse(workingDaysBetweenDates(a,b));//gets you the working days and total leave days applied.
// $leavedays = $leavDaysArray['leavedays'];
// var date = new Date(a);
// var newdate = new Date(date);
// //if start date plus days applied covers weekend skip weekend
// $k = 1;
// $workingDays = 0;
// $leavedays = $leavDaysArray['totalDays']+1;
// while($k < $leavedays){
// //startDate broken down
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// //startDate broken down
// //put the broken up date together to get the day you applied. monday-sunday
// $t = new Date(y,(mm-1),dd);
// $dayOfWeek = $t.getDay();
// $dayOfWeek = parseInt($dayOfWeek);
// //put the broken up date together to get the day you applied. monday-sunday
// // console.log('Day of week sun-mon '+$dayOfWeek);
// if($dayOfWeek == 0 || $dayOfWeek == 6){
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
// newdate.setDate($newdayvalue);
// }else{
// //increase day by one
// $newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
// newdate.setDate($newdayvalue);
// $workingDays++;
// // console.log("Working Days "+$workingDays+" Day of week "+$workingDays);
// // console.log(newdate);
// //increase day by one
// }
// //startDate broken down
// var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
// var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
// var y = newdate.getFullYear();
// //startDate broken down
// // console.log("New Date after adding 1 "+mm+"/"+dd+"/"+y);
// $k++;
// }
// $daysEntitled = $("#daysAvaliable").val();
// if($workingDays > $daysEntitled){
// $("#startDate").val("");
// $("#absenceReason").val("");
// $("#daysAvaliable").val("");
// $("#daysToApply").val("");
// $("#daysRemaining").val("");
// $("#returnDate").val("");
// $("#endDate").val("");
// $('#startDate').prop('disabled',true);
// $('#endDate').prop('disabled',true);
// var element = document.getElementById("applyLeave")
// element.setAttribute('disabled','disabled')
// $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// hideLoginErrorBox();
// }else{
// //display working days
// $("#daysToApply").val($workingDays);
// //display working days
// //get expected return date
// //sex
// $expectedReturnDate = getDateFromStartDateLeaveDays($('#startDate').val(),$leavDaysArray['totalDays']);
// $expectedReturnDate = $expectedReturnDate.split("k");
// $("#returnDate").val($expectedReturnDate[0]);
// //get expected return date
// //compute remaining days
// $daysApplied = parseInt($("#daysToApply").val());
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($daysApplied+" "+$availableDays);
// $remaingDays = $availableDays - $daysApplied;
// console.log($daysApplied);
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// }
// }
// //On End Date Change
// // $("#endDate").change(function(){
// // $startDate = $('#startDate').val();
// // $endDate = $('#endDate').val();
// // $handleHolidayFallingOnWeekend = 0;
// // $.ajax({
// // url:$validateStartDateUrl,
// // data:{"startDate":$startDate,"endDate":$endDate},
// // type:'POST',
// // success:function($resp,status){
// // $resp = JSON.parse($resp);
// // $status = parseInt($resp['status']);
// // if($status == 0){
// // $startDate = $startDate.split("/");
// // $endDate = $endDate.split("/");
// // if($startDate.toString() === $endDate.toString()){
// // //ok
// // var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // $leaveDetails = computeLeaveDetails23(a,b);
// // $leaveDetails = JSON.parse($leaveDetails);
// // $message = $leaveDetails['message'];
// // $workingDays = $leaveDetails['workingDays'];
// // console.log($leaveDetails);
// // if($message === "OK"){
// // $("#daysToApply").val($workingDays);
// // //compute remaining days
// // $daysApplied = parseInt($("#daysToApply").val());
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log($daysApplied+" "+$availableDays);
// // $remaingDays = $availableDays - $daysApplied;
// // console.log($daysApplied);
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// // }else{
// // console.log("Error computing number of working days");
// // }
// // $("#applyLeave").prop("disabled", false);
// // console.log("Data appears fine ");
// // }else{
// // if($endDate[0] < $startDate[0]){//if end month is less than start month
// // console.log('End month is less than start year');
// // //wrong throw error, except if end year is greater than start year
// // if($endDate[2] > $startDate[2]){//if end year is greator than start year
// // //ok
// // //get remaining days till end of year
// // var d = new Date($startDate[2], 11, 31);//end date of year
// // var c = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // console.log(d+"current year days comutation "+c);
// // $workingDaysTillEndYear = computeLeaveDetails23(c,d);
// // $workingDaysTillEndYear = JSON.parse($workingDaysTillEndYear);
// // $workingDaysTillEndYearQty = $workingDaysTillEndYear['workingDays'];
// // $workingDaysTillEndYearMsg = $workingDaysTillEndYear['message'];
// // $handleHolidayFallingOnWeekendCurrentYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// // //get remaining days till end of year
// // //get days between start of year and end date selected
// // $endDate = $endDate.split("/");
// // var e = new Date($endDate[2], 00, 01);//first day of first month of end year
// // var f = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // console.log($endDate+"Year ");
// // console.log(e+"new year days comutation "+f);
// // $workingDaysFromYearStart = computeLeaveDetails23(e,f);
// // $workingDaysFromYearStart = JSON.parse($workingDaysFromYearStart);
// // $workingDaysFromYearStartQty = $workingDaysFromYearStart['workingDays'];
// // $workingDaysFromYearStartMsg = $workingDaysFromYearStart['message'];
// // $handleHolidayFallingOnWeekendNextYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// // //get days between start of year and end date selected
// // console.log($workingDaysFromYearStartQty+" $workingDaysTillEndYearQty "+$handleHolidayFallingOnWeekendNextYear);
// // //add the two days to get total days applied
// // console.log("$handleHolidayFallingOnWeekendCurrentYear"+$handleHolidayFallingOnWeekendCurrentYear+"Working days"+$workingDaysTillEndYearQty);
// // console.log($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// // console.log(($workingDaysFromYearStartQty + $workingDaysTillEndYearQty));
// // console.log("$handleHolidayFallingOnWeekendNextYear"+$handleHolidayFallingOnWeekendNextYear+"Working days"+$workingDaysFromYearStartQty );
// // $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)-($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// // //add the two days to get total days applied
// // $daysAvailable = $("#daysAvaliable").val();
// // //check of days applied is more than days entitled
// // if($daysAvailable < $totalDaysApplied){
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#daysToApply").val('');
// // $("#daysRemaining").val('');
// // $("#returnDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// // hideLoginErrorBox();
// // }else{
// // console.log("End month is less than start month");
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of "+$NoOfHolidays+"$totalDaysApplied"+$totalDaysApplied);
// // console.log("Days Less holiday = "+($totalDaysApplied-$NoOfHolidays));
// // $("#daysToApply").val($totalDaysApplied);
// // $remaingDays = parseInt($("#daysAvaliable").val()) - parseInt($("#daysToApply").val());
// // $("#daysRemaining").val($remaingDays);
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$totalDaysApplied);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// // }
// // }else{
// // console.log('1');
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html("The end date cannot be less than start date");
// // hideLoginErrorBox();
// // }
// // }else if($endDate[0] == $startDate[0]){//if month is equal check if dates are valid
// // //validate the days of the week
// // if($endDate[1] < $startDate[1]){//if end day is less than start day
// // //wrong
// // //display error
// // // console.log("The end day can't be less than start day");
// // console.log('2');
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", false);
// // $("#loginerrorBox p").html("The end date cannot be less than start date");
// // hideLoginErrorBox();
// // }else{//end day is greater than start day
// // //ok
// // //call function to compute end date, return date and number of days
// // var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // $leaveDetails = computeLeaveDetails23(a,b);
// // $leaveDetails = JSON.parse($leaveDetails);
// // $message = $leaveDetails['message'];
// // $workingDays = $leaveDetails['workingDays'];
// // $handleHolidayFallingOnWeekend = $leaveDetails['handleHolidayFallingOnWeekend'];
// // console.log($leaveDetails);
// // if($message === "OK"){
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of holidays"+$NoOfHolidays);
// // console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// // $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// // console.log($handleHolidayFallingOnWeekend+"isss"+$workingDaysApplied);
// // $workingDaysApplied = parseInt($workingDaysApplied)+parseInt($handleHolidayFallingOnWeekend)
// // $("#daysToApply").val($workingDaysApplied);//set the days applied
// // //subtract from the number of days applied
// // //get the number of holidays between the dates selected
// // // $workingDaysApplied = parseInt($workingDaysApplied);
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log($availableDays+" remaingi days"+$workingDaysApplied);
// // $remaingDays = $availableDays - $workingDaysApplied;
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// // console.log($workingDays+"sdsdsd");
// // console.log("Return Date info "+$returnDate);
// // $returnDateArray = $returnDate.split("k");
// // $returnDate = $returnDateArray[0].split("/");
// // $month = $returnDate[0];
// // $day = $returnDate[1];
// // $combi = $month+"/"+$day;
// // if($combi == "12/26" || $combi == "12/27" || $combi == "12/28" || $combi == "12/29" || $combi == "12/30" || $combi == "12/31"){
// // $returnDate = "3/01/"+(parseInt($returnDate[2])+parseInt(1));
// // }else{
// // $returnDate = $returnDateArray[0];
// // }
// // console.log("Return Datess"+$returnDate);
// // // $("#returnDate").val($returnDateArray[0]);
// // $("#returnDate").val($returnDate);
// // }else{
// // console.log("Error computing number of working days");
// // }
// // $("#applyLeave").prop("disabled", false);
// // console.log("Data appears fine ");
// // }
// // }else if($endDate[0] > $startDate[0]){//if the end month is greater than start month
// // //call function to compute end date and and number of days
// // //okconsole.log("Error");
// // var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// // var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// // $leaveDetails = computeLeaveDetails23(a,b);
// // $leaveDetails = JSON.parse($leaveDetails);
// // $message = $leaveDetails['message'];
// // $workingDays = $leaveDetails['workingDays'];
// // console.log("Just before leavedetails "+$leaveDetails['message']);
// // if($message === "OK"){
// // console.log("Just before leavedetails");
// // //compute remaining days
// // //get the number of holidays between the dates selected
// // //subtract from the number of days applied
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of "+$NoOfHolidays);
// // console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// // $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// // $("#daysToApply").val($workingDaysApplied);//set the days applied
// // //subtract from the number of days applied
// // //get the number of holidays between the dates selected
// // // $workingDaysApplied = parseInt($workingDaysApplied);
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log("Days applied "+$workingDaysApplied+" Days available xxx "+$availableDays);
// // $remaingDays = $availableDays - $workingDaysApplied;
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// // console.log("Return Date info "+$returnDate);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// // }else{
// // console.log("Error computing number of working days");
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html($message);
// // hideLoginErrorBox();
// // }
// // $("#applyLeave").prop("disabled", false);
// // console.log("End month is greater thus ok");
// // }else{
// // console.log('3');
// // console.log($endDate+" "+$startDate);
// // console.log("condition not tested "+$endDate[0]+ " strat month "+$startDate[0]);
// // }
// // }
// // }else{
// // console.log($resp['message']);
// // $("#startDate").val('');
// // $("#endDate").val('');
// // $("#endDate").prop("disabled", true);
// // $("#loginerrorBox p").html($resp['message']);
// // hideLoginErrorBox();
// // }
// // }
// // });
// // });
// $("#endDate").change(function(){
// $startDate = $('#startDate').val();
// $endDate = $('#endDate').val();
// $handleHolidayFallingOnWeekend = 0;
// $.ajax({
// url:$validateStartDateUrl,
// data:{"startDate":$startDate,"endDate":$endDate},
// type:'POST',
// success:function($resp,status){
// $resp = JSON.parse($resp);
// $status = parseInt($resp['status']);
// if($status == 0){
// $startDate = $startDate.split("/");
// $endDate = $endDate.split("/");
// if($startDate.toString() === $endDate.toString()){
// //ok
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// console.log($leaveDetails);
// if($message === "OK"){
// $("#daysToApply").val($workingDays);
// //compute remaining days
// $daysApplied = parseInt($("#daysToApply").val());
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($daysApplied+" "+$availableDays);
// $remaingDays = $availableDays - $daysApplied;
// console.log($daysApplied);
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }else{
// console.log("Error computing number of working days");
// }
// $("#applyLeave").prop("disabled", false);
// console.log("Data appears fine ");
// }else{
// if($endDate[0] < $startDate[0]){//if end month is less than start month
// console.log('End month is less than start year');
// //wrong throw error, except if end year is greater than start year
// if($endDate[2] > $startDate[2]){//if end year is greator than start year
// //ok
// //get remaining days till end of year
// var d = new Date($startDate[2], 11, 31);//end date of year
// var c = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// console.log(d+"current year days comutation "+c);
// $workingDaysTillEndYear = computeLeaveDetails23(c,d);
// $workingDaysTillEndYear = JSON.parse($workingDaysTillEndYear);
// $workingDaysTillEndYearQty = $workingDaysTillEndYear['workingDays'];
// $workingDaysTillEndYearMsg = $workingDaysTillEndYear['message'];
// $handleHolidayFallingOnWeekendCurrentYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// //get remaining days till end of year
// //get days between start of year and end date selected
// $endDate = $endDate.split("/");
// var e = new Date($endDate[2], 00, 01);//first day of first month of end year
// var f = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// console.log($endDate+"Year ");
// console.log(e+"new year days comutation "+f);
// $workingDaysFromYearStart = computeLeaveDetails23(e,f);
// $workingDaysFromYearStart = JSON.parse($workingDaysFromYearStart);
// $workingDaysFromYearStartQty = $workingDaysFromYearStart['workingDays'];
// $workingDaysFromYearStartMsg = $workingDaysFromYearStart['message'];
// $handleHolidayFallingOnWeekendNextYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// //get days between start of year and end date selected
// //add the two days to get total days applied
// console.log("$handleHolidayFallingOnWeekendCurrentYear"+$handleHolidayFallingOnWeekendCurrentYear+"Working days"+$workingDaysTillEndYearQty);
// console.log($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// console.log(($workingDaysFromYearStartQty + $workingDaysTillEndYearQty));
// console.log("$handleHolidayFallingOnWeekendNextYear"+$handleHolidayFallingOnWeekendNextYear+"Working days"+$workingDaysFromYearStartQty );
// // $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)-($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)
// //add the two days to get total days applied
// $daysAvailable = $("#daysAvaliable").val();
// //check of days applied is more than days entitled
// if($daysAvailable < $totalDaysApplied){
// $("#startDate").val('');
// $("#endDate").val('');
// $("#daysToApply").val('');
// $("#daysRemaining").val('');
// $("#returnDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// hideLoginErrorBox();
// }else{
// console.log("End month is less than start month");
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of "+$NoOfHolidays);
// console.log("Days Less holiday = "+($totalDaysApplied-$NoOfHolidays));
// $("#daysToApply").val($totalDaysApplied);
// $remaingDays = parseInt($("#daysAvaliable").val()) - parseInt($("#daysToApply").val());
// $("#daysRemaining").val($remaingDays);
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$totalDaysApplied);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// $("#applyLeave").prop("disabled", false);
// }
// }else{
// console.log('1');
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html("The end date cannot be less than start date");
// hideLoginErrorBox();
// }
// }else if($endDate[0] == $startDate[0]){//if month is equal check if dates are valid
// //validate the days of the week
// if($endDate[1] < $startDate[1]){//if end day is less than start day
// //wrong
// //display error
// // console.log("The end day can't be less than start day");
// console.log('2');
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", false);
// $("#loginerrorBox p").html("The end date cannot be less than start date");
// hideLoginErrorBox();
// }else{//end day is greater than start day
// //ok
// //call function to compute end date, return date and number of days
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// $handleHolidayFallingOnWeekend = $leaveDetails['handleHolidayFallingOnWeekend'];
// console.log($leaveDetails);
// if($message === "OK"){
// // $("#daysToApply").val($workingDays);
// // //compute remaining days
// // $daysApplied = parseInt($("#daysToApply").val());
// // $availableDays = parseInt($("#daysAvaliable").val());
// // console.log($daysApplied+" "+$availableDays);
// // $remaingDays = $availableDays - $daysApplied;
// // console.log($daysApplied);
// // $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// // console.log("Number of "+$NoOfHolidays);
// // console.log("Days Less holiday = "+($daysApplied-parseInt($NoOfHolidays)));
// // $("#daysRemaining").val($remaingDays);
// // //compute remaining days
// // $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// // $returnDateArray = $returnDate.split("k");
// // $("#returnDate").val($returnDateArray[0]);
// //compute remaining days
// //get the number of holidays between the dates selected
// //subtract from the number of days applied
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of holidays"+$NoOfHolidays);
// console.log("ze working days"+$workingDays);
// console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// console.log($handleHolidayFallingOnWeekend+"isss"+$workingDaysApplied);
// $("#daysToApply").val($workingDaysApplied);
// // $workingDaysApplied = parseInt($workingDaysApplied)+parseInt($handleHolidayFallingOnWeekend)
// // $("#daysToApply").val($workingDaysApplied);//set the days applied
// //subtract from the number of days applied
// //get the number of holidays between the dates selected
// // $workingDaysApplied = parseInt($workingDaysApplied);
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($availableDays+" remaingi days"+$workingDaysApplied);
// $remaingDays = $availableDays - $workingDaysApplied;
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// console.log($workingDays+"sdsdsd");
// console.log("Return Date info "+$returnDate);
// $returnDateArray = $returnDate.split("k");
// $returnDate = $returnDateArray[0].split("/");
// $month = $returnDate[0];
// $day = $returnDate[1];
// $combi = $month+"/"+$day;
// if($combi == "12/26" || $combi == "12/27" || $combi == "12/28" || $combi == "12/29" || $combi == "12/30" || $combi == "12/31"){
// $returnDate = "3/01/"+(parseInt($returnDate[2])+parseInt(1));
// }else{
// $returnDate = $returnDateArray[0];
// }
// console.log("Return Datess"+$returnDate);
// // $("#returnDate").val($returnDateArray[0]);
// $("#returnDate").val($returnDate);
// }else{
// console.log("Error computing number of working days");
// }
// $("#applyLeave").prop("disabled", false);
// console.log("Data appears fine ");
// }
// }else if($endDate[0] > $startDate[0]){//if the end month is greater than start month
// //call function to compute end date and and number of days
// //okconsole.log("Error");
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// console.log($leaveDetails);
// if($message === "OK"){
// //compute remaining days
// //get the number of holidays between the dates selected
// //subtract from the number of days applied
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of "+$NoOfHolidays);
// console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// $("#daysToApply").val($workingDaysApplied);//set the days applied
// //subtract from the number of days applied
// //get the number of holidays between the dates selected
// // $workingDaysApplied = parseInt($workingDaysApplied);
// $availableDays = parseInt($("#daysAvaliable").val());
// $remaingDays = $availableDays - $workingDaysApplied;
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// console.log("Return Date info "+$returnDate);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }else{
// console.log("Error computing number of working days");
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($message);
// hideLoginErrorBox();
// }
// $("#applyLeave").prop("disabled", false);
// console.log("End month is greater thus ok");
// }else{
// console.log('3');
// console.log($endDate+" "+$startDate);
// console.log("condition not tested "+$endDate[0]+ " strat month "+$startDate[0]);
// }
// }
// }else{
// console.log($resp['message']);
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// }
// });
// });
// window.workingDaysBetweenDates = function(startDate, endDate) {
// var millisecondsPerDay = 86400 * 1000;
// startDate.setHours(0,0,0,1);
// endDate.setHours(23,59,59,999);
// var diff = endDate - startDate;
// var days = Math.ceil(diff / millisecondsPerDay);
// // Subtract two weekend days for every week in between
// var weeks = Math.floor(days / 7);
// // Handle special cases
// var startDay = startDate.getDate();
// var endDay = endDate.getDate();
// if(startDay == endDay){
// resp = '{"leavedays":1,"totalDays":1}';
// }else{
// workingdays = days - (weeks * 2);
// resp = '{"leavedays":'+workingdays+',"totalDays":'+days+'}';
// }
// return resp;
// }
// //On End Date Change
// $("#applyLeave").click(function(e){
// e.preventDefault();
// $daysRemaining = $("#daysRemaining").val();
// $daysToApply = $("#daysToApply").val();
// $endDate = $('#endDate').val();
// $startDate = $('#startDate').val();
// $.ajax({
// url:$validateStartDateUrl,
// data:{"startDate":$startDate,"endDate":$endDate},
// type:'POST',
// success:function($resp,status){
// console.log("validate date hase run "+$resp);
// $resp = JSON.parse($resp);
// $status = parseInt($resp['status']);
// if($status == 0){
// if($daysRemaining == "" || $daysRemaining == undefined || $daysRemaining == null || $daysToApply == null || $daysToApply == undefined || $daysToApply == ""){
// $("#loginerrorBox p").html("Complete the application form.");
// console.log("Complete the application form.");
// hideLoginErrorBox();
// }else{
// $(".overlay").show();
// $startDate = $('#startDate').val();
// $daysNID = $('#absenceReason').val();
// $daysNID = $daysNID.split("k");
// $leaveTypeID = $daysNID[1];
// $daysAvaliable = $('#daysAvaliable').val();
// $daysToApply = $('#daysToApply').val();
// $daysRemaining = $('#daysRemaining').val();
// $endDate = $('#endDate').val();
// $totalDays = $("#totalDaysApplied").val();
// $returnDate = $("#returnDate").val();
// // console.log("Return Date "+$returnDate+" End Date "+$endDate);
// // $comment = $('#comment').val();
// //format start date
// $dateValue = $startDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $startDate = $year+'/'+$month+'/'+$dayofWeek;
// //format start date
// //format end date
// $dateValue = $endDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $endDate = $year+'/'+$month+'/'+$dayofWeek;
// //format end date
// //format return date
// $dateValue = $returnDate.split('/');
// $year = $dateValue[2];
// $month = $dateValue[0];
// $dayofWeek = $dateValue[1];
// $returnDate = $year+'/'+$month+'/'+$dayofWeek;
// //format return date
// console.log($endDate+"endDate StartDate"+$startDate);
// $confirm = confirm("Are your sure");
// if($confirm == true){
// $.post($applyLeaveUrl,{"startDate":$startDate, "totalDaysApplied":$totalDays, "endDate":$endDate,"returnDate":$returnDate,"absenceReason":$leaveTypeID, "daysApplied":$daysToApply, "daysAvaliable":$daysAvaliable},function(data, status){
// console.log(data);
// $resp = JSON.parse(data);
// $status =$resp['status'];
// if($status == 0){
// $(".overlay").hide();
// $message = $resp['message'];
// $("#loginSuccessBox p").html($resp['message']);
// hideLoginSuccessBox();
// setTimeout(function(){
// location.reload();
// },10000);
// }else{
// $(".overlay").hide();
// $message = $resp['message'];
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// });
// }else{
// $(".overlay").hide();
// }
// }
// }else{
// console.log($resp['message']);
// $("#startDate").val('');
// $("#endDate").val('');
// $("#daysRemaining").val('');
// $("#returnDate").val('');
// $("#daysToApply").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// }
// });
// });
// //apply leave
// });
//subracts public holidays
//live at kippra
$(document).ready(function(){
//plugin for start Date
$('#startDate').daterangepicker({
singleDatePicker: true,
calender_style: "picker_4",
minDate: new Date(),
isInvalidDate: function(date){
/*
validates the following dates
1) 1st Jan- 2nd Jan
2) 1st May
3) 25th Dec - 31st Dec
4) 1st June
5) 20th Oct
6) 12th Dec
*/
if((date.day() == 0 )|| (date.day() == 6)||(date.format('MM-DD') === '01-01') || (date.format('MM-DD') === '01-02') || (date.format('MM-DD') === '12-25')|| (date.format('MM-DD') === '12-12') || (date.format('MM-DD') === '12-26') || (date.format('MM-DD') === '12-31')|| (date.format('MM-DD') === '05-01')|| (date.format('MM-DD') === '06-01') || (date.format('MM-DD') === '10-20')){
// || (date.format('MM-DD') === '12-27') || (date.format('MM-DD') === '12-28')|| (date.format('MM-DD') === '12-29')|| (date.format('MM-DD') === '12-30') ||
return true;
}else{
return false;
}
},
}, function(start, end, label) {
//console.log(start.toISOString(), end.toISOString(), label);
});
//plugin for start date
//plugin for end date
$('#endDate').daterangepicker({
singleDatePicker: true,
calender_style: "picker_4",
minDate: new Date(),
isInvalidDate: function(date){
/*
validates the following dates
1) 1st Jan- 2nd Jan
2) 1st May
3) 25th Dec - 31st Dec
4) 1st June
5) 20th Oct
6) 12th Dec
*/
if((date.day() == 0 )|| (date.day() == 6)||(date.format('MM-DD') === '01-01') || (date.format('MM-DD') === '01-02') || (date.format('MM-DD') === '12-25')|| (date.format('MM-DD') === '12-12') || (date.format('MM-DD') === '12-26') || (date.format('MM-DD') === '12-31')|| (date.format('MM-DD') === '05-01')|| (date.format('MM-DD') === '06-01') || (date.format('MM-DD') === '10-20')){
// || (date.format('MM-DD') === '12-27') || (date.format('MM-DD') === '12-28')|| (date.format('MM-DD') === '12-29')|| (date.format('MM-DD') === '12-30')
return true;
}else{
return false;
}
},
}, function(start, end, label) {
//console.log(start.toISOString(), end.toISOString(), label);
});
//plugin for end date
$('#startDate').prop('disabled',true);
$('#endDate').prop('disabled',true);
$('#daysAvaliable').prop('disabled', true);
$('#daysToApply').prop('disabled', true);
$('#daysRemaining').prop('disabled', true);
$('#returnDate').prop('disabled', true);
$("#applyLeave").prop('disabled', true);
$("#startDate").change(function(){
$('#endDate').val('');
$('#daysToApply').val('');
$('#daysRemaining').val('');
$('#returnDate').val('');
$("#applyLeave").prop('disabled', true);
$daysNID = $("#absenceReason").val();
$daysNID = $daysNID.split("k");
$leavedays = Math.ceil($daysNID[0]);
$leaveTypeID = $daysNID[1];
console.log($leaveTypeID);
if($leaveTypeID == "MATERNITY" || $leaveTypeID == "PATERNITY"){
console.log("disable entry of end date");
$startDate = $('#startDate').val();
$endDate = $('#endDate').val();
//format date
$dateValue = $startDate.split('/');
$year = $dateValue[2];
$month = $dateValue[0];
$dayofWeek = $dateValue[1];
$startDate = $month+'/'+$dayofWeek+'/'+$year;
// format date
$expectedReturnDate = getReturnDateIncludingWeekends($startDate,$leavedays);
// console.log("getReturnDateIncludingWeekends has run "+getReturnDateIncludingWeekends($startDate,$leavedays));
$endAndReturnDate = $expectedReturnDate.split('k');
$("#daysToApply").val($leavedays);
$("#returnDate").val($endAndReturnDate[0]);
$("#endDate").val($endAndReturnDate[1]);
$('#daysRemaining').val(0);
$("#applyLeave").prop('disabled', false);
}else{
$("#endDate").prop('disabled', false);
$days = $("#daysToApply").val();
if($days == null || $days == undefined || $days == ""){
// console.log("undefined");
}else{
$startDate = $('#startDate').val();
//format date
$dateValue = $startDate.split('/');
$year = $dateValue[2];
$month = $dateValue[0];
$dayofWeek = $dateValue[1];
$newDate = $month+'/'+$dayofWeek+'/'+$year;
//format date
$expectedReturnDate = getDateFromStartDateLeaveDays($newDate,$days);
$expectedReturnDate = $expectedReturnDate.split("k");
$("#returnDate").val($expectedReturnDate[0]);
//sex
//$("#endDate").val($expectedReturnDate[1]);
}
}
});
//apply leave
//auto populates the number of days field during leave application
$("#absenceReason").change(function(){
$('#startDate').prop('disabled',false);
$('#endDate').prop('disabled',true);
$('#endDate').val('');
$('#startDate').val('');
$('#daysToApply').val('');
$('#daysRemaining').val('');
$('#daysAvaliable').val('');
$('#returnDate').val('');
$("#applyLeave").prop('disabled', true);
$daysNID = $("#absenceReason").val();
$daysNID = $daysNID.split("k");
$leavedays = Math.ceil($daysNID[0]);
$leaveTypeID = $daysNID[1];
$("#daysAvaliable").val($leavedays);
if($leaveTypeID == "MATERNITY" || $leaveTypeID == "PATERNITY"){
$("#daysToApply").prop('disabled', true);
$('#endDate').prop('disabled', true);
$startDate = $('#startDate').val();
//format date
$dateValue = $startDate.split('/');
$year = $dateValue[2];
$month = $dateValue[0];
$dayofWeek = $dateValue[1];
$startDate = $month+'/'+$dayofWeek+'/'+$year;
// format date
if($startDate === '/undefined/undefined' || $startDate === '/undefined/undefined' || $startDate === '/undefined/undefined'){
}else{
$expectedReturnDate = getDateFromStartDateLeaveDays($startDate,$leavedays);
$endAndReturnDate = $expectedReturnDate.split('k');
console.log(" End and return Date "+$endAndReturnDate);
$("#returnDate").val($endAndReturnDate[0]);
$("#endDate").val($endAndReturnDate[1]);
$("#applyLeave").prop('disabled', false);
}
}else{
$('#daysToApply').attr('max', $leavedays);
$('#daysToApply').attr('min', parseInt(0));
// $("#daysToApply").prop('disabled', false);
}
});
//auto populates the number of days field during leave application
window.getNumberOfHolidaysWithinDateRange = function(){
//ADJUSTS FOR HOLIDAYS
//get the holidays
//check to see if holidays fall within the date range applied
//do necessary adjustements
$holidaysWithinRange = 0;
$startDate = $("#startDate").val();
$endDate = $("#endDate").val();
$startDateArray = $startDate.split("/");
$endDateArray = $endDate.split("/");
$currentYear = new Date().getFullYear();
$k = 0;
while($k < $holidays.length){
//loop through holidays to check if any falls within range
$holidayDate = $holidays[$k]['holidayDate'];//compare this to the start date end date range provided
$holidayDateArray = $holidayDate.split("-");
// console.log("Start Date broken down: Year "+$startDateArray[2]+" Month "+$startDateArray[0]+" Day "+$startDateArray[1]);
// console.log("End Date broken down: Year "+$endDateArray[2]+" Month "+$endDateArray[0]+" Day "+$endDateArray[1]);
// console.log("Check year broken down: Year "+$currentYear+" Month "+$holidayDateArray[1]+" Day "+$holidayDateArray[0]);
//FromDate >= @startDate AND ToDate <= @endDate
var from = new Date($startDateArray[2], parseInt($startDateArray[0])-1, $startDateArray[1]); // -1 because months are from 0 to 11
var to = new Date($endDateArray[2], parseInt($endDateArray[0])-1, $endDateArray[1]);
var check = new Date($currentYear, parseInt($holidayDateArray[1])-1, $holidayDateArray[0]);
// console.log("From "+from+" To "+to+" Check "+check);
//check if this datae falls on weekedn, if not it is okay increament else don't increament
$dayOfWeek = check.getDay();
$dayOfWeek = parseInt($dayOfWeek);
if((check > from && check < to) && ($dayOfWeek == 6 || $dayOfWeek == 0)){
//if it falls on range increase days buy 1
console.log("falls in range");
// console.log("From: "+from);
// console.log("To: "+to);
// console.log("Check: "+check);
// console.log("DOW "+$dayOfWeek);
$holidaysWithinRange++;
}else{
//if it doesn't fall in range DO NOTHINg
console.log("not in range");
// console.log("From: "+from);
// console.log("To: "+to);
// console.log("Check: "+check);
}
// if($holidayDate >= $startDateDnM && $holidayDate <= $endDateDnM){//if the
//
// }else{
//
// }
$k++;
}
//ADJUSTS FOR HOLIDAYS
return $holidaysWithinRange;
}
//gets the holidays set in the system
window.getHolidays = function(){
$.ajax({
url:$getHolidays,
data:{},
type:'POST',
success:function($resp,status){
console.log($resp);
$holidays = JSON.parse($resp);
// console.log($holidays[0]['holidayName']);
return $holidays;
}
});
}
getHolidays();//this is called to pre-popuate the holidays variable
//gets the holidays set in the system
function getReturnDateIncludingWeekends($startDate,$leavedays){
//compute return date
var date = new Date($startDate);
var newdate = new Date(date);
$newdayvalue = parseInt((newdate.getDate()) + parseInt($leavedays));
newdate.setDate($newdayvalue);
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
var returnDate = mm + '/' + dd + '/' + y;
//compute return date
//compute end date
var date = new Date($startDate);
var newdate = new Date(date);
$newdayvalue = parseInt((newdate.getDate()) + parseInt($leavedays)-1);
newdate.setDate($newdayvalue);
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
var leaveEndDate = mm + '/' + dd + '/' + y;
//compute end date
// console.log('getReturnDateIncludingWeekends '+returnDate+'leave end date'+leaveEndDate);
return returnDate+'k'+leaveEndDate;
}
function getDateFromStartDateLeaveDays($startDate,$leavedays){
//get the number weeks add the weeks
var date = new Date($startDate);
var newdate = new Date(date);
$k = 0;
$("#totalDaysApplied").val($leavedays);//hidden field
// $totalDaysApplied = $leavedays + $totalWeekendDaysOnLeave;
while($k < $leavedays){
//add the number of days
// $newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
// newdate.setDate($newdayvalue);
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
$t = new Date(y,(mm-1),dd);
$dayOfWeek = $t.getDay();
$dayOfWeek = parseInt($dayOfWeek);
// console.log("DOW "+$dayOfWeek);
if($dayOfWeek == 6 || $dayOfWeek == 0){
// console.log("weekend");
$newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
newdate.setDate($newdayvalue);
console.log(newdate);
$k--;
}else{
// console.log("not weekend");
$newdayvalue = parseInt((newdate.getDate()) + parseInt(1));
newdate.setDate($newdayvalue);
console.log(newdate);
}
$k++;
}
var leaveEndDate = mm + '/' + dd + '/' + y;
$newdayvalue = parseInt(newdate.getDate());
newdate.setDate($newdayvalue);
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
// console.log("return date"+dd+" "+mm+" "+y);
$t = new Date(y,(mm-1),dd);
// console.log("return date"+$t);
$dayOfWeek = $t.getDay();
$dayOfWeek = parseInt($dayOfWeek);
// console.log('ODW '+$dayOfWeek);
if($dayOfWeek == 6){//saturday
var $newDate = mm + '/' + dd + '/' + y;
var date = new Date($newDate);
var newdate = new Date(date);
$newdayvalue = parseInt(newdate.getDate()) + parseInt((2));
newdate.setDate($newdayvalue);
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
// dd = parseInt(dd)+2;
}else if($dayOfWeek == 0){//sunday
var $newDate = mm + '/' + dd + '/' + y;
var date = new Date($newDate);
var newdate = new Date(date);
$newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
newdate.setDate($newdayvalue);
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
// dd = parseInt(dd)+1;
}else{}
var returnDate = mm + '/' + dd + '/' + y;
return returnDate+'k'+leaveEndDate;
// return JSON.stringify('{"leavedays":'+someFormattedDate+',"totalDays":'+someFormattedDate+'}');
}
//get expected return date
window.computeLeaveDetails = function(a,b){
$leavDaysArray = JSON.parse(workingDaysBetweenDates(a,b));//gets you the working days and total leave days applied.
$leavedays = $leavDaysArray['leavedays'];
var date = new Date(a);
var newdate = new Date(date);
//if start date plus days applied covers weekend skip weekend
$k = 1;
$workingDays = 0;
$leavedays = $leavDaysArray['totalDays']+1;
while($k < $leavedays){
//startDate broken down
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
//startDate broken down
//put the broken up date together to get the day you applied. monday-sunday
$t = new Date(y,(mm-1),dd);
$dayOfWeek = $t.getDay();
$dayOfWeek = parseInt($dayOfWeek);
//put the broken up date together to get the day you applied. monday-sunday
// console.log('Day of week sun-mon '+$dayOfWeek);
if($dayOfWeek == 0 || $dayOfWeek == 6){
$newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
newdate.setDate($newdayvalue);
}else{
//increase day by one
$newdayvalue = parseInt(newdate.getDate()) + parseInt((1));
newdate.setDate($newdayvalue);
$workingDays++;
// console.log("Working Days "+$workingDays+" Day of week "+$workingDays);
// console.log(newdate);
//increase day by one
}
//startDate broken down
var dd = ("0" + newdate.getDate()).slice(-2);;//newdate.getDate();
var mm = ("0" + (newdate.getMonth() + 1)).slice(-2);
var y = newdate.getFullYear();
//startDate broken down
// console.log("New Date after adding 1 "+mm+"/"+dd+"/"+y);
$k++;
}
$daysEntitled = $("#daysAvaliable").val();
if($workingDays > $daysEntitled){
$("#startDate").val("");
$("#absenceReason").val("");
$("#daysAvaliable").val("");
$("#daysToApply").val("");
$("#daysRemaining").val("");
$("#returnDate").val("");
$("#endDate").val("");
$('#startDate').prop('disabled',true);
$('#endDate').prop('disabled',true);
var element = document.getElementById("applyLeave")
element.setAttribute('disabled','disabled')
$("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
hideLoginErrorBox();
}else{
//display working days
$("#daysToApply").val($workingDays);
//display working days
//get expected return date
//sex
$expectedReturnDate = getDateFromStartDateLeaveDays($('#startDate').val(),$leavDaysArray['totalDays']);
$expectedReturnDate = $expectedReturnDate.split("k");
$("#returnDate").val($expectedReturnDate[0]);
//get expected return date
//compute remaining days
$daysApplied = parseInt($("#daysToApply").val());
$availableDays = parseInt($("#daysAvaliable").val());
console.log($daysApplied+" "+$availableDays);
$remaingDays = $availableDays - $daysApplied;
console.log($daysApplied);
$("#daysRemaining").val($remaingDays);
//compute remaining days
}
}
//On End Date Change
// $("#endDate").change(function(){
// $startDate = $('#startDate').val();
// $endDate = $('#endDate').val();
// $handleHolidayFallingOnWeekend = 0;
// $.ajax({
// url:$validateStartDateUrl,
// data:{"startDate":$startDate,"endDate":$endDate},
// type:'POST',
// success:function($resp,status){
// $resp = JSON.parse($resp);
// $status = parseInt($resp['status']);
// if($status == 0){
// $startDate = $startDate.split("/");
// $endDate = $endDate.split("/");
// if($startDate.toString() === $endDate.toString()){
// //ok
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// console.log($leaveDetails);
// if($message === "OK"){
// $("#daysToApply").val($workingDays);
// //compute remaining days
// $daysApplied = parseInt($("#daysToApply").val());
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($daysApplied+" "+$availableDays);
// $remaingDays = $availableDays - $daysApplied;
// console.log($daysApplied);
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }else{
// console.log("Error computing number of working days");
// }
// $("#applyLeave").prop("disabled", false);
// console.log("Data appears fine ");
// }else{
// if($endDate[0] < $startDate[0]){//if end month is less than start month
// console.log('End month is less than start year');
// //wrong throw error, except if end year is greater than start year
// if($endDate[2] > $startDate[2]){//if end year is greator than start year
// //ok
// //get remaining days till end of year
// var d = new Date($startDate[2], 11, 31);//end date of year
// var c = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// console.log(d+"current year days comutation "+c);
// $workingDaysTillEndYear = computeLeaveDetails23(c,d);
// $workingDaysTillEndYear = JSON.parse($workingDaysTillEndYear);
// $workingDaysTillEndYearQty = $workingDaysTillEndYear['workingDays'];
// $workingDaysTillEndYearMsg = $workingDaysTillEndYear['message'];
// $handleHolidayFallingOnWeekendCurrentYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// //get remaining days till end of year
// //get days between start of year and end date selected
// $endDate = $endDate.split("/");
// var e = new Date($endDate[2], 00, 01);//first day of first month of end year
// var f = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// console.log($endDate+"Year ");
// console.log(e+"new year days comutation "+f);
// $workingDaysFromYearStart = computeLeaveDetails23(e,f);
// $workingDaysFromYearStart = JSON.parse($workingDaysFromYearStart);
// $workingDaysFromYearStartQty = $workingDaysFromYearStart['workingDays'];
// $workingDaysFromYearStartMsg = $workingDaysFromYearStart['message'];
// $handleHolidayFallingOnWeekendNextYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
// //get days between start of year and end date selected
// console.log($workingDaysFromYearStartQty+" $workingDaysTillEndYearQty "+$handleHolidayFallingOnWeekendNextYear);
// //add the two days to get total days applied
// console.log("$handleHolidayFallingOnWeekendCurrentYear"+$handleHolidayFallingOnWeekendCurrentYear+"Working days"+$workingDaysTillEndYearQty);
// console.log($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// console.log(($workingDaysFromYearStartQty + $workingDaysTillEndYearQty));
// console.log("$handleHolidayFallingOnWeekendNextYear"+$handleHolidayFallingOnWeekendNextYear+"Working days"+$workingDaysFromYearStartQty );
// $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)-($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
// //add the two days to get total days applied
// $daysAvailable = $("#daysAvaliable").val();
// //check of days applied is more than days entitled
// if($daysAvailable < $totalDaysApplied){
// $("#startDate").val('');
// $("#endDate").val('');
// $("#daysToApply").val('');
// $("#daysRemaining").val('');
// $("#returnDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
// hideLoginErrorBox();
// }else{
// console.log("End month is less than start month");
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of "+$NoOfHolidays+"$totalDaysApplied"+$totalDaysApplied);
// console.log("Days Less holiday = "+($totalDaysApplied-$NoOfHolidays));
// $("#daysToApply").val($totalDaysApplied);
// $remaingDays = parseInt($("#daysAvaliable").val()) - parseInt($("#daysToApply").val());
// $("#daysRemaining").val($remaingDays);
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$totalDaysApplied);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }
// }else{
// console.log('1');
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html("The end date cannot be less than start date");
// hideLoginErrorBox();
// }
// }else if($endDate[0] == $startDate[0]){//if month is equal check if dates are valid
// //validate the days of the week
// if($endDate[1] < $startDate[1]){//if end day is less than start day
// //wrong
// //display error
// // console.log("The end day can't be less than start day");
// console.log('2');
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", false);
// $("#loginerrorBox p").html("The end date cannot be less than start date");
// hideLoginErrorBox();
// }else{//end day is greater than start day
// //ok
// //call function to compute end date, return date and number of days
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// $handleHolidayFallingOnWeekend = $leaveDetails['handleHolidayFallingOnWeekend'];
// console.log($leaveDetails);
// if($message === "OK"){
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of holidays"+$NoOfHolidays);
// console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// console.log($handleHolidayFallingOnWeekend+"isss"+$workingDaysApplied);
// $workingDaysApplied = parseInt($workingDaysApplied)+parseInt($handleHolidayFallingOnWeekend)
// $("#daysToApply").val($workingDaysApplied);//set the days applied
// //subtract from the number of days applied
// //get the number of holidays between the dates selected
// // $workingDaysApplied = parseInt($workingDaysApplied);
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log($availableDays+" remaingi days"+$workingDaysApplied);
// $remaingDays = $availableDays - $workingDaysApplied;
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// console.log($workingDays+"sdsdsd");
// console.log("Return Date info "+$returnDate);
// $returnDateArray = $returnDate.split("k");
// $returnDate = $returnDateArray[0].split("/");
// $month = $returnDate[0];
// $day = $returnDate[1];
// $combi = $month+"/"+$day;
// if($combi == "12/26" || $combi == "12/27" || $combi == "12/28" || $combi == "12/29" || $combi == "12/30" || $combi == "12/31"){
// $returnDate = "3/01/"+(parseInt($returnDate[2])+parseInt(1));
// }else{
// $returnDate = $returnDateArray[0];
// }
// console.log("Return Datess"+$returnDate);
// // $("#returnDate").val($returnDateArray[0]);
// $("#returnDate").val($returnDate);
// }else{
// console.log("Error computing number of working days");
// }
// $("#applyLeave").prop("disabled", false);
// console.log("Data appears fine ");
// }
// }else if($endDate[0] > $startDate[0]){//if the end month is greater than start month
// //call function to compute end date and and number of days
// //okconsole.log("Error");
// var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
// var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
// $leaveDetails = computeLeaveDetails23(a,b);
// $leaveDetails = JSON.parse($leaveDetails);
// $message = $leaveDetails['message'];
// $workingDays = $leaveDetails['workingDays'];
// console.log("Just before leavedetails "+$leaveDetails['message']);
// if($message === "OK"){
// console.log("Just before leavedetails");
// //compute remaining days
// //get the number of holidays between the dates selected
// //subtract from the number of days applied
// $NoOfHolidays = getNumberOfHolidaysWithinDateRange();
// console.log("Number of "+$NoOfHolidays);
// console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
// $workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
// $("#daysToApply").val($workingDaysApplied);//set the days applied
// //subtract from the number of days applied
// //get the number of holidays between the dates selected
// // $workingDaysApplied = parseInt($workingDaysApplied);
// $availableDays = parseInt($("#daysAvaliable").val());
// console.log("Days applied "+$workingDaysApplied+" Days available xxx "+$availableDays);
// $remaingDays = $availableDays - $workingDaysApplied;
// $("#daysRemaining").val($remaingDays);
// //compute remaining days
// $returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
// console.log("Return Date info "+$returnDate);
// $returnDateArray = $returnDate.split("k");
// $("#returnDate").val($returnDateArray[0]);
// }else{
// console.log("Error computing number of working days");
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($message);
// hideLoginErrorBox();
// }
// $("#applyLeave").prop("disabled", false);
// console.log("End month is greater thus ok");
// }else{
// console.log('3');
// console.log($endDate+" "+$startDate);
// console.log("condition not tested "+$endDate[0]+ " strat month "+$startDate[0]);
// }
// }
// }else{
// console.log($resp['message']);
// $("#startDate").val('');
// $("#endDate").val('');
// $("#endDate").prop("disabled", true);
// $("#loginerrorBox p").html($resp['message']);
// hideLoginErrorBox();
// }
// }
// });
// });
$("#endDate").change(function(){
$startDate = $('#startDate').val();
$endDate = $('#endDate').val();
$handleHolidayFallingOnWeekend = 0;
$.ajax({
url:$validateStartDateUrl,//chcecks if the user has applied for leave within this date range
data:{"startDate":$startDate,"endDate":$endDate},
type:'POST',
success:function($resp,status){
$resp = JSON.parse($resp);
$status = parseInt($resp['status']);
if($status == 0){//the date range selected is valid
$startDate = $startDate.split("/");
$endDate = $endDate.split("/");
if($startDate.toString() === $endDate.toString()){//if the start and end date are equal the user wants to apply for a single leave day thus do the computations within this if else
//ok
var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
$leaveDetails = computeLeaveDetails23(a,b);//defined in the main view
$leaveDetails = JSON.parse($leaveDetails);
$message = $leaveDetails['message'];
$workingDays = $leaveDetails['workingDays'];
console.log($leaveDetails);
if($message === "OK"){//the days the user applied for are less than the leave days available thus OK
$("#daysToApply").val($workingDays);
//compute remaining days
$daysApplied = parseInt($("#daysToApply").val());
$availableDays = parseInt($("#daysAvaliable").val());
console.log($daysApplied+" "+$availableDays);
$remaingDays = $availableDays - $daysApplied;
console.log($daysApplied);
$("#daysRemaining").val($remaingDays);
//compute remaining days
$returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$daysApplied);
$returnDateArray = $returnDate.split("k");
$("#returnDate").val($returnDateArray[0]);
}else{
console.log("Error computing number of working days");
}
$("#applyLeave").prop("disabled", false);
console.log("Data appears fine ");
}else{
if($endDate[0] < $startDate[0]){//if end month is less than start month
console.log('End month is less than start year');
//wrong throw error, except if end year is greater than start year
if($endDate[2] > $startDate[2]){//if end year is greator than start year
//ok
//get remaining days till end of year
var d = new Date($startDate[2], 11, 31);//end date of year
var c = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
console.log(d+"current year days comutation "+c);
$workingDaysTillEndYear = computeLeaveDetails23(c,d);
$workingDaysTillEndYear = JSON.parse($workingDaysTillEndYear);
$workingDaysTillEndYearQty = $workingDaysTillEndYear['workingDays'];
$workingDaysTillEndYearMsg = $workingDaysTillEndYear['message'];
$handleHolidayFallingOnWeekendCurrentYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
//get remaining days till end of year
//get days between start of year and end date selected
$endDate = $endDate.split("/");
var e = new Date($endDate[2], 00, 01);//first day of first month of end year
var f = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
console.log($endDate+"Year ");
console.log(e+"new year days comutation "+f);
$workingDaysFromYearStart = computeLeaveDetails23(e,f);
$workingDaysFromYearStart = JSON.parse($workingDaysFromYearStart);
$workingDaysFromYearStartQty = $workingDaysFromYearStart['workingDays'];
$workingDaysFromYearStartMsg = $workingDaysFromYearStart['message'];
$handleHolidayFallingOnWeekendNextYear = $workingDaysTillEndYear['handleHolidayFallingOnWeekend']
//get days between start of year and end date selected
//add the two days to get total days applied
console.log("$handleHolidayFallingOnWeekendCurrentYear"+$handleHolidayFallingOnWeekendCurrentYear+"Working days"+$workingDaysTillEndYearQty);
console.log($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
console.log(($workingDaysFromYearStartQty + $workingDaysTillEndYearQty));
console.log("$handleHolidayFallingOnWeekendNextYear"+$handleHolidayFallingOnWeekendNextYear+"Working days"+$workingDaysFromYearStartQty );
// $totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)-($handleHolidayFallingOnWeekendCurrentYear+$handleHolidayFallingOnWeekendNextYear);
$totalDaysApplied = ($workingDaysFromYearStartQty + $workingDaysTillEndYearQty)
//add the two days to get total days applied
$daysAvailable = $("#daysAvaliable").val();
//check of days applied is more than days entitled
if($daysAvailable < $totalDaysApplied){
$("#startDate").val('');
$("#endDate").val('');
$("#daysToApply").val('');
$("#daysRemaining").val('');
$("#returnDate").val('');
$("#endDate").prop("disabled", true);
$("#loginerrorBox p").html("You have "+$daysEntitled+" leave days only.");
hideLoginErrorBox();
}else{
console.log("End month is less than start month");
$NoOfHolidays = getNumberOfHolidaysWithinDateRange();
console.log("Number of "+$NoOfHolidays);
console.log("Days Less holiday = "+($totalDaysApplied-$NoOfHolidays));
$("#daysToApply").val($totalDaysApplied-$NoOfHolidays);
$remaingDays = parseInt($("#daysAvaliable").val()) - parseInt($("#daysToApply").val());
$("#daysRemaining").val($remaingDays);
//return and end date
$returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$totalDaysApplied);//gets return Date and leave end date
$returnDateArray = $returnDate.split("k");
console.log(" returnDateArray "+$returnDateArray);
$("#returnDate").val($returnDateArray[0]);
$("#applyLeave").prop("disabled", false);
//return and end date
}
}else{
console.log('1');
$("#startDate").val('');
$("#endDate").val('');
$("#endDate").prop("disabled", true);
$("#loginerrorBox p").html("The end date cannot be less than start date");
hideLoginErrorBox();
}
}else if($endDate[0] == $startDate[0]){//if month is equal check if dates are valid
//validate the days of the week
if($endDate[1] < $startDate[1]){//if end day is less than start day
//wrong
//display error
// console.log("The end day can't be less than start day");
console.log('2');
$("#startDate").val('');
$("#endDate").val('');
$("#endDate").prop("disabled", false);
$("#loginerrorBox p").html("The end date cannot be less than start date");
hideLoginErrorBox();
}else{//end day is greater than start day
//ok
//call function to compute end date, return date and number of days
var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
$leaveDetails = computeLeaveDetails23(a,b);
$leaveDetails = JSON.parse($leaveDetails);
$message = $leaveDetails['message'];
$workingDays = $leaveDetails['workingDays'];
$handleHolidayFallingOnWeekend = $leaveDetails['handleHolidayFallingOnWeekend'];
console.log($leaveDetails);
if($message === "OK"){//the days applied are less than the leave days available thus OK
//compute remaining days
//get the number of holidays between the dates selected
//subtract from the number of days applied
$NoOfHolidays = getNumberOfHolidaysWithinDateRange();
console.log("Number of holidays"+$NoOfHolidays);
console.log("ze working days"+$workingDays);
console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
$workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
console.log($handleHolidayFallingOnWeekend+"isss"+$workingDaysApplied);
$("#daysToApply").val($workingDaysApplied);
// $workingDaysApplied = parseInt($workingDaysApplied)+parseInt($handleHolidayFallingOnWeekend)
// $("#daysToApply").val($workingDaysApplied);//set the days applied
//subtract from the number of days applied
//get the number of holidays between the dates selected
// $workingDaysApplied = parseInt($workingDaysApplied);
$availableDays = parseInt($("#daysAvaliable").val());
console.log($availableDays+" remaingi days"+$workingDaysApplied);
$remaingDays = $availableDays - $workingDaysApplied;
$("#daysRemaining").val($remaingDays);
//compute remaining days
$returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
console.log($workingDays+"sdsdsd");
console.log("Return Date info "+$returnDate);
$returnDateArray = $returnDate.split("k");
$returnDate = $returnDateArray[0].split("/");
$month = $returnDate[0];
$day = $returnDate[1];
$combi = $month+"/"+$day;
if($combi == "12/26" || $combi == "12/27" || $combi == "12/28" || $combi == "12/29" || $combi == "12/30" || $combi == "12/31"){
$returnDate = "3/01/"+(parseInt($returnDate[2])+parseInt(1));
}else{
$returnDate = $returnDateArray[0];
}
console.log("Return Datess"+$returnDate);
// $("#returnDate").val($returnDateArray[0]);
$("#returnDate").val($returnDate);
}else{
console.log("Error computing number of working days");
}
$("#applyLeave").prop("disabled", false);
console.log("Data appears fine ");
}
}else if($endDate[0] > $startDate[0]){//if the end month is greater than start month
//call function to compute end date and and number of days
//okconsole.log("Error");
var b = new Date($endDate[2], ($endDate[0]-1), $endDate[1]);
var a = new Date($startDate[2], ($startDate[0]-1), $startDate[1]);
$leaveDetails = computeLeaveDetails23(a,b);
$leaveDetails = JSON.parse($leaveDetails);
$message = $leaveDetails['message'];
$workingDays = $leaveDetails['workingDays'];
console.log($leaveDetails);
if($message === "OK"){
//compute remaining days
//get the number of holidays between the dates selected
//subtract from the number of days applied
$NoOfHolidays = getNumberOfHolidaysWithinDateRange();
console.log("Number of "+$NoOfHolidays);
console.log("Days Less holiday = "+($workingDays-parseInt($NoOfHolidays)));
$workingDaysApplied = $workingDays-parseInt($NoOfHolidays);
$("#daysToApply").val($workingDaysApplied);//set the days applied
//subtract from the number of days applied
//get the number of holidays between the dates selected
// $workingDaysApplied = parseInt($workingDaysApplied);
$availableDays = parseInt($("#daysAvaliable").val());
$remaingDays = $availableDays - $workingDaysApplied;
$("#daysRemaining").val($remaingDays);
//compute remaining days
$returnDate = getDateFromStartDateLeaveDays($("#startDate").val(),$workingDays);
console.log("Return Date info "+$returnDate);
$returnDateArray = $returnDate.split("k");
$("#returnDate").val($returnDateArray[0]);
}else{
console.log("Error computing number of working days");
$("#startDate").val('');
$("#endDate").val('');
$("#endDate").prop("disabled", true);
$("#loginerrorBox p").html($message);
hideLoginErrorBox();
}
$("#applyLeave").prop("disabled", false);
console.log("End month is greater thus ok");
}else{
console.log('3');
console.log($endDate+" "+$startDate);
console.log("condition not tested "+$endDate[0]+ " strat month "+$startDate[0]);
}
}
}else{
console.log($resp['message']);
$("#startDate").val('');
$("#endDate").val('');
$("#endDate").prop("disabled", true);
$("#loginerrorBox p").html($resp['message']);
hideLoginErrorBox();
}
}
});
});
window.workingDaysBetweenDates = function(startDate, endDate) {
var millisecondsPerDay = 86400 * 1000;
startDate.setHours(0,0,0,1);
endDate.setHours(23,59,59,999);
var diff = endDate - startDate;
var days = Math.ceil(diff / millisecondsPerDay);
// Subtract two weekend days for every week in between
var weeks = Math.floor(days / 7);
// Handle special cases
var startDay = startDate.getDate();
var endDay = endDate.getDate();
if(startDay == endDay){
resp = '{"leavedays":1,"totalDays":1}';
}else{
workingdays = days - (weeks * 2);
resp = '{"leavedays":'+workingdays+',"totalDays":'+days+'}';
}
return resp;
}
//On End Date Change
$("#applyLeave").click(function(e){
e.preventDefault();
$daysRemaining = $("#daysRemaining").val();
$daysToApply = $("#daysToApply").val();
$endDate = $('#endDate').val();
$startDate = $('#startDate').val();
$.ajax({
url:$validateStartDateUrl,
data:{"startDate":$startDate,"endDate":$endDate},
type:'POST',
success:function($resp,status){
console.log("validate date hase run "+$resp);
$resp = JSON.parse($resp);
$status = parseInt($resp['status']);
if($status == 0){
if($daysRemaining == "" || $daysRemaining == undefined || $daysRemaining == null || $daysToApply == null || $daysToApply == undefined || $daysToApply == ""){
$("#loginerrorBox p").html("Complete the application form.");
console.log("Complete the application form.");
hideLoginErrorBox();
}else{
$(".overlay").show();
$startDate = $('#startDate').val();
$daysNID = $('#absenceReason').val();
$daysNID = $daysNID.split("k");
$leaveTypeID = $daysNID[1];
$daysAvaliable = $('#daysAvaliable').val();
$daysToApply = $('#daysToApply').val();
$daysRemaining = $('#daysRemaining').val();
$endDate = $('#endDate').val();
$totalDays = $("#totalDaysApplied").val();
$returnDate = $("#returnDate").val();
// console.log("Return Date "+$returnDate+" End Date "+$endDate);
// $comment = $('#comment').val();
//format start date
$dateValue = $startDate.split('/');
$year = $dateValue[2];
$month = $dateValue[0];
$dayofWeek = $dateValue[1];
$startDate = $year+'/'+$month+'/'+$dayofWeek;
//format start date
//format end date
$dateValue = $endDate.split('/');
$year = $dateValue[2];
$month = $dateValue[0];
$dayofWeek = $dateValue[1];
$endDate = $year+'/'+$month+'/'+$dayofWeek;
//format end date
//format return date
$dateValue = $returnDate.split('/');
$year = $dateValue[2];
$month = $dateValue[0];
$dayofWeek = $dateValue[1];
$returnDate = $year+'/'+$month+'/'+$dayofWeek;
//format return date
console.log($endDate+"endDate StartDate"+$startDate);
$confirm = confirm("Are your sure");
if($confirm == true){
$.post($applyLeaveUrl,{"startDate":$startDate, "totalDaysApplied":$totalDays, "endDate":$endDate,"returnDate":$returnDate,"absenceReason":$leaveTypeID, "daysApplied":$daysToApply, "daysAvaliable":$daysAvaliable},function(data, status){
console.log(data);
$resp = JSON.parse(data);
$status =$resp['status'];
if($status == 0){
$(".overlay").hide();
$message = $resp['message'];
$("#loginSuccessBox p").html($resp['message']);
hideLoginSuccessBox();
setTimeout(function(){
location.reload();
},10000);
}else{
$(".overlay").hide();
$message = $resp['message'];
$("#loginerrorBox p").html($resp['message']);
hideLoginErrorBox();
}
});
}else{
$(".overlay").hide();
}
}
}else{
console.log($resp['message']);
$("#startDate").val('');
$("#endDate").val('');
$("#daysRemaining").val('');
$("#returnDate").val('');
$("#daysToApply").val('');
$("#endDate").prop("disabled", true);
$("#loginerrorBox p").html($resp['message']);
hideLoginErrorBox();
}
}
});
});
//apply leave
});
//live at kippra
|
#!/bin/sh
#
# Unified Segger JLink script for RIOT
#
# This script is supposed to be called from RIOTs make system,
# as it depends on certain environment variables. An
#
# Global environment variables used:
# JLINK: JLink command name, default: "JLinkExe"
# JLINK_SERVER: JLink GCB server command name, default: "JLinkGDBDerver"
# JLINK_DEVICE: Device identifier used by JLink
# JLINK_SERIAL: Device serial used by JLink
# JLINK_IF: Interface used by JLink, default: "SWD"
# JLINK_SPEED: Interface clock speed to use (in kHz), default "2000"
# FLASH_ADDR: Starting address of the target's flash memory, default: "0"
# JLINK_PRE_FLASH: Additional JLink commands to execute before flashing
# JLINK_POST_FLASH: Additional JLink commands to execute after flashing
#
# The script supports the following actions:
#
# flash: flash a given hex file to the target.
# hexfile is expected in ihex format and is pointed to
# by BINFILE environment variable
#
# options:
# BINFILE: path to the binary file that is flashed
#
# debug: starts JLink as GDB server in the background and
# connects to the server with the GDB client specified by
# the board (DBG environment variable)
#
# options:
# GDB_PORT: port opened for GDB connections
# TELNET_PORT: port opened for telnet connections
# DBG: debugger client command, default: 'gdb -q'
# TUI: if TUI!=null, the -tui option will be used
# ELFFILE: path to the ELF file to debug
#
# debug-server: starts JLink as GDB server, but does not connect to
# to it with any frontend. This might be useful when using
# IDEs.
#
# options:
# GDB_PORT: port opened for GDB connections
# TELNET_PORT: port opened for telnet connections
#
# reset: triggers a hardware reset of the target board
#
#
# @author Hauke Peteresen <hauke.petersen@fu-berlin.de>
# default GDB port
_GDB_PORT=3333
# default telnet port
_TELNET_PORT=4444
# default JLink command, interface and speed
_JLINK=JLinkExe
_JLINK_SERVER=JLinkGDBServer
_JLINK_IF=SWD
_JLINK_SPEED=2000
# default terminal frontend
_JLINK_TERMPROG=${RIOTTOOLS}/pyterm/pyterm
_JLINK_TERMFLAGS="-ts 19021"
#
# a couple of tests for certain configuration options
#
test_config() {
if [ -z "${JLINK}" ]; then
JLINK=${_JLINK}
fi
if [ -z "${JLINK_SERVER}" ]; then
JLINK_SERVER=${_JLINK_SERVER}
fi
if [ -z "${JLINK_IF}" ]; then
JLINK_IF=${_JLINK_IF}
fi
if [ -z "${JLINK_SPEED}" ]; then
JLINK_SPEED=${_JLINK_SPEED}
fi
if [ -z "${JLINK_DEVICE}" ]; then
echo "Error: No target device defined in JLINK_DEVICE env var"
exit 1
fi
if [ -z "${FLASH_ADDR}" ]; then
echo "Error: No flash address defined in FLASH_ADDR env var"
exit 1
fi
}
test_hexfile() {
if [ ! -f "${HEXFILE}" ]; then
echo "Error: Unable to locate HEXFILE"
echo " (${HEXFILE})"
exit 1
fi
}
test_ports() {
if [ -z "${GDB_PORT}" ]; then
GDB_PORT=${_GDB_PORT}
fi
if [ -z "${TELNET_PORT}" ]; then
TELNET_PORT=${_TELNET_PORT}
fi
}
test_elffile() {
if [ ! -f "${ELFFILE}" ]; then
echo "Error: Unable to locate ELFFILE"
echo " (${ELFFILE})"
exit 1
fi
}
test_tui() {
if [ -n "${TUI}" ]; then
TUI=-tui
fi
}
test_serial() {
if [ -n "${JLINK_SERIAL}" ]; then
JLINK_SERIAL_SERVER="-select usb='${JLINK_SERIAL}'"
JLINK_SERIAL="-SelectEmuBySN '${JLINK_SERIAL}'"
fi
}
test_dbg() {
if [ -z "${DBG}" ]; then
DBG="${GDB}"
fi
}
test_term() {
if [ -z "${JLINK_TERMPROG}" ]; then
JLINK_TERMPROG="${_JLINK_TERMPROG}"
fi
if [ -z "${JLINK_TERMFLAGS}" ]; then
JLINK_TERMFLAGS="${_JLINK_TERMFLAGS}"
fi
}
#
# now comes the actual actions
#
do_flash() {
test_config
test_serial
test_hexfile
# clear any existing contents in burn file
/bin/echo -n "" > ${BINDIR}/burn.seg
# create temporary burn file
if [ ! -z "${JLINK_PRE_FLASH}" ]; then
printf "${JLINK_PRE_FLASH}\n" >> ${BINDIR}/burn.seg
fi
echo "loadbin ${HEXFILE} ${FLASH_ADDR}" >> ${BINDIR}/burn.seg
if [ ! -z "${JLINK_POST_FLASH}" ]; then
printf "${JLINK_POST_FLASH}\n" >> ${BINDIR}/burn.seg
fi
cat ${RIOTTOOLS}/jlink/reset.seg >> ${BINDIR}/burn.seg
# flash device
sh -c "${JLINK} ${JLINK_SERIAL} \
-device '${JLINK_DEVICE}' \
-speed '${JLINK_SPEED}' \
-if '${JLINK_IF}' \
-jtagconf -1,-1 \
-commandfile '${BINDIR}/burn.seg'"
}
do_debug() {
test_config
test_serial
test_elffile
test_ports
test_tui
test_dbg
# start the JLink GDB server
sh -c "${JLINK_SERVER} ${JLINK_SERIAL_SERVER} \
-device '${JLINK_DEVICE}' \
-speed '${JLINK_SPEED}' \
-if '${JLINK_IF}' \
-port '${GDB_PORT}' \
-telnetport '${TELNET_PORT}'" &
# save PID for terminating the server afterwards
DBG_PID=$?
# connect to the GDB server
${DBG} -q ${TUI} -ex "tar ext :${GDB_PORT}" ${ELFFILE}
# clean up
kill ${DBG_PID}
}
do_debugserver() {
test_ports
test_config
test_serial
# start the JLink GDB server
sh -c "${JLINK_SERVER} ${JLINK_SERIAL_SERVER} \
-device '${JLINK_DEVICE}' \
-speed '${JLINK_SPEED}' \
-if '${JLINK_IF}' \
-port '${GDB_PORT}' \
-telnetport '${TELNET_PORT}'"
}
do_reset() {
test_config
test_serial
# reset the board
sh -c "${JLINK} ${JLINK_SERIAL} \
-device '${JLINK_DEVICE}' \
-speed '${JLINK_SPEED}' \
-if '${JLINK_IF}' \
-jtagconf -1,-1 \
-commandfile '${RIOTTOOLS}/jlink/reset.seg'"
}
do_term() {
test_config
test_serial
test_term
# temporary file that save the JLink pid
JLINK_PIDFILE=$(mktemp -t "jilnk_pid.XXXXXXXXXX")
# will be called by trap
cleanup() {
JLINK_PID="$(cat ${JLINK_PIDFILE})"
kill ${JLINK_PID}
rm -r "${JLINK_PIDFILE}"
exit 0
}
# cleanup after script terminates
trap "cleanup ${JLINK_PIDFILE}" EXIT
# don't trapon Ctrl+C, because JLink keeps running
trap '' INT
# start Jlink as RTT server
sh -c "${JLINK} ${JLINK_SERIAL} \
-device '${JLINK_DEVICE}' \
-speed '${JLINK_SPEED}' \
-if '${JLINK_IF}' \
-jtagconf -1,-1 \
-commandfile '${RIOTTOOLS}/jlink/term.seg' >/dev/null & \
echo \$! > $JLINK_PIDFILE" &
sh -c "${JLINK_TERMPROG} ${JLINK_TERMFLAGS}"
}
#
# parameter dispatching
#
ACTION="$1"
shift # pop $1 from $@
case "${ACTION}" in
flash)
echo "### Flashing Target ###"
echo "### Flashing at address ${FLASH_ADDR} ###"
do_flash "$@"
;;
debug)
echo "### Starting Debugging ###"
do_debug "$@"
;;
debug-server)
echo "### Starting GDB Server ###"
do_debugserver "$@"
;;
reset)
echo "### Resetting Target ###"
do_reset "$@"
;;
term_rtt)
echo "### Starting RTT terminal ###"
do_term
;;
*)
echo "Usage: $0 {flash|debug|debug-server|reset}"
;;
esac
|
#!/bin/bash
set -evx
mkdir ~/.kzcash
# safety check
if [ ! -f ~/.kzcash/.kzcash.conf ]; then
cp share/kzcash.conf.example ~/.kzcash/kzcash.conf
fi
|
<filename>lib/core_extensions.rb<gh_stars>0
Object.module_eval do
def se command
log_info command
exit(1) unless system command
end
def log_info message
message = "[INFO]\t#{message}"
puts message
end
def log_error message
message = "[ERROR]\t#{message}"
all_error_messages << message
puts message
end
def all_error_messages
$all_error_messages ||= []
end
end
|
#!/bin/sh
set -ex
apt-get -y autoremove
apt-get -y clean
rm -f /var/lib/dhcp/* # clean up dhcp leases
|
#!/bin/bash
FUZZER=$1 #fuzzer name (e.g., aflnet) -- this name must match the name of the fuzzer folder inside the Docker container
OUTDIR=$2 #name of the output folder
OPTIONS=$3 #all configured options -- to make it flexible, we only fix some options (e.g., -i, -o, -N) in this script
TIMEOUT=$4 #time for fuzzing
SKIPCOUNT=$5 #used for calculating cov over time. e.g., SKIPCOUNT=5 means we run gcovr after every 5 test cases
NO_SEEDS=$6
strstr() {
[ "${1#*$2*}" = "$1" ] && return 1
return 0
}
#Commands for afl-based fuzzers (e.g., aflnet, aflnwe)
if $(strstr $FUZZER "afl"); then
#Step-1. Do Fuzzing
#Move to fuzzing folder
cd $WORKDIR
if [ "$NO_SEEDS" = 1 ]; then
INPUTS="$WORKDIR/in-dtls-empty"
else
INPUTS="$WORKDIR/in-dtls"
fi
if [ "$FUZZER" = "aflpp" ]; then
AFL_PRELOAD="/home/ubuntu/preeny/src/desock.so" \
timeout -k 0 $TIMEOUT /home/ubuntu/${FUZZER}/afl-fuzz \
-d -i "$INPUTS" -o $OUTDIR $OPTIONS \
./tinydtls/tests/dtls-server
else
timeout -k 0 $TIMEOUT /home/ubuntu/${FUZZER}/afl-fuzz \
-d -i "$INPUTS" -o $OUTDIR -N udp://127.0.0.1/20220 $OPTIONS \
./tinydtls/tests/dtls-server
fi
wait
#Step-2. Collect code coverage over time
#Move to gcov folder
cd $WORKDIR
#The last argument passed to cov_script should be 0 if the fuzzer is afl/nwe and it should be 1 if the fuzzer is based on aflnet
#0: the test case is a concatenated message sequence -- there is no message boundary
#1: the test case is a structured file keeping several request messages
if [ $FUZZER == "aflnwe" ]; then
cov_script ${WORKDIR}/${OUTDIR}/ 20220 ${SKIPCOUNT} ${WORKDIR}/${OUTDIR}/cov_over_time.csv 0
elif [ "$FUZZER" = "aflpp" ]; then
cov_script ${WORKDIR}/${OUTDIR}/default 20220 ${SKIPCOUNT} ${WORKDIR}/${OUTDIR}/cov_over_time.csv 0
else
cov_script ${WORKDIR}/${OUTDIR}/ 20220 ${SKIPCOUNT} ${WORKDIR}/${OUTDIR}/cov_over_time.csv 1
fi
gcovr -r $WORKDIR/tinydtls-gcov --html --html-details -o index.html
mkdir ${WORKDIR}/${OUTDIR}/cov_html/
cp *.html ${WORKDIR}/${OUTDIR}/cov_html/
# genhtml -o "${WORKDIR}/${OUTDIR}/cov_html/" --branch-coverage "$WORKDIR/coverage.info"
#Step-3. Save the result to the ${WORKDIR} folder
#Tar all results to a file
cd ${WORKDIR}
tar -zcvf ${WORKDIR}/${OUTDIR}.tar.gz ${OUTDIR}
fi
|
class TicTacToe:
def __init__(self):
self.size = 3
self.board = [[' ' for _ in range(self.size)] for _ in range(self.size)]
def get_move(self, board, toplay):
for i in range(self.size):
for j in range(self.size):
if board[i][j] == ' ':
board[i][j] = toplay
if self.check_winner(board, toplay):
return i, j
board[i][j] = ' '
for i in range(self.size):
for j in range(self.size):
if board[i][j] == ' ':
board[i][j] = 'X' if toplay == 'O' else 'O'
if self.check_winner(board, 'X' if toplay == 'O' else 'O'):
return i, j
board[i][j] = ' '
for i in range(self.size):
for j in range(self.size):
if board[i][j] == ' ':
return i, j
return None
def check_winner(self, board, symbol):
for i in range(self.size):
if all(cell == symbol for cell in board[i]):
return True
if all(board[j][i] == symbol for j in range(self.size)):
return True
if all(board[i][i] == symbol for i in range(self.size)):
return True
if all(board[i][self.size - 1 - i] == symbol for i in range(self.size)):
return True
return False
|
rm wingedgudda.deb || true
rm -rf Builds/ || true
rm lamo_staging/Library/MobileSubstrate/DynamicLibraries/Lamo.dylib || true
rm lamo_staging/Library/MobileSubstrate/DynamicLibraries/LamoClient.dylib || true
xctool -sdk iphoneos -project Lamo.xcodeproj/ -scheme Lamo CODE_SIGNING_REQUIRED=NO owner=$1
cp Builds/Lamo.dylib lamo_staging/Library/MobileSubstrate/DynamicLibraries/Lamo.dylib
cp Builds/LamoClient.dylib lamo_staging/Library/MobileSubstrate/DynamicLibraries/LamoClient.dylib
dpkg-deb -b lamo_staging/ wingedgudda.deb
|
<reponame>toastier/srf
(function () {
'use strict';
angular
.module('core')
.factory('_', LoDash);
/**
* wrapping the lodash library in an angular service
* @param $window
* @returns {*}
* @constructor
*/
function LoDash($window) {
// creating local var for lodash to return as the service
var _ = $window._;
// removing lodash from the global scope
delete $window._;
// return lodash as an angular service
return ( _ );
}
})();
|
<filename>db/migrate/20190606114621_create_reactions.rb<gh_stars>100-1000
class CreateReactions < ActiveRecord::Migration[5.2]
def change
create_table :reactions do |t|
t.belongs_to :user, null: false, index: false
t.belongs_to :post, null: false
t.integer :type, null: false
t.timestamps
t.index [:user_id, :post_id], unique: true
end
end
end
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.configuration;
import java.io.Serializable;
import java.util.ArrayList;
import javax.cache.configuration.Factory;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.internal.util.TransientSerializable;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteExperimental;
import org.apache.ignite.lang.IgniteProductVersion;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
/**
* Transactions configuration.
*/
@TransientSerializable(methodName = "transientSerializableFields")
public class TransactionConfiguration implements Serializable {
/** */
private static final IgniteProductVersion TX_PME_TIMEOUT_SINCE = IgniteProductVersion.fromString("2.5.1");
/** */
private static final IgniteProductVersion DEADLOCK_TIMEOUT_SINCE = IgniteProductVersion.fromString("2.7.3");
/** */
private static final long serialVersionUID = 0L;
/** Default value for 'txSerializableEnabled' flag. */
public static final boolean DFLT_TX_SERIALIZABLE_ENABLED = false;
/** Default concurrency mode. */
public static final TransactionConcurrency DFLT_TX_CONCURRENCY = TransactionConcurrency.PESSIMISTIC;
/** Default transaction isolation level. */
public static final TransactionIsolation DFLT_TX_ISOLATION = TransactionIsolation.REPEATABLE_READ;
/** Default transaction timeout. */
public static final long DFLT_TRANSACTION_TIMEOUT = 0;
/** Transaction timeout on partition map synchronization. */
public static final long TX_TIMEOUT_ON_PARTITION_MAP_EXCHANGE = 0;
/** Default timeout before starting deadlock detection. */
public static final long DFLT_DEADLOCK_TIMEOUT = 10_000;
/** Default size of pessimistic transactions log. */
public static final int DFLT_PESSIMISTIC_TX_LOG_LINGER = 10_000;
/** Default transaction serializable flag. */
private boolean txSerEnabled = DFLT_TX_SERIALIZABLE_ENABLED;
/** Transaction isolation. */
private TransactionIsolation dfltIsolation = DFLT_TX_ISOLATION;
/** Cache concurrency. */
private TransactionConcurrency dfltConcurrency = DFLT_TX_CONCURRENCY;
/** Default transaction timeout. */
private long dfltTxTimeout = DFLT_TRANSACTION_TIMEOUT;
/**
* Transaction timeout on partition map exchange.
* Volatile in order to be changed dynamically.
*/
private volatile long txTimeoutOnPartitionMapExchange = TX_TIMEOUT_ON_PARTITION_MAP_EXCHANGE;
/** Timeout before starting deadlock detection. */
private long deadlockTimeout = DFLT_DEADLOCK_TIMEOUT;
/** Pessimistic tx log size. */
private int pessimisticTxLogSize;
/** Pessimistic tx log linger. */
private int pessimisticTxLogLinger = DFLT_PESSIMISTIC_TX_LOG_LINGER;
/** Name of class implementing GridCacheTmLookup. */
private String tmLookupClsName;
/** {@code javax.transaction.TransactionManager} factory. */
private Factory txManagerFactory;
/**
* Whether to use JTA {@code javax.transaction.Synchronization}
* instead of {@code javax.transaction.xa.XAResource}.
*/
private boolean useJtaSync;
/**
* Empty constructor.
*/
public TransactionConfiguration() {
// No-op.
}
/**
* @param cfg Configuration to copy.
*/
public TransactionConfiguration(TransactionConfiguration cfg) {
dfltConcurrency = cfg.getDefaultTxConcurrency();
dfltIsolation = cfg.getDefaultTxIsolation();
dfltTxTimeout = cfg.getDefaultTxTimeout();
txTimeoutOnPartitionMapExchange = cfg.getTxTimeoutOnPartitionMapExchange();
deadlockTimeout = cfg.getDeadlockTimeout();
pessimisticTxLogLinger = cfg.getPessimisticTxLogLinger();
pessimisticTxLogSize = cfg.getPessimisticTxLogSize();
txSerEnabled = cfg.isTxSerializableEnabled();
tmLookupClsName = cfg.getTxManagerLookupClassName();
txManagerFactory = cfg.getTxManagerFactory();
useJtaSync = cfg.isUseJtaSynchronization();
}
/**
* Gets flag to enable/disable {@link TransactionIsolation#SERIALIZABLE} isolation
* level for cache transactions. Serializable level does carry certain overhead and
* if not used, should be disabled. Default value is {@code false}.
*
* @return {@code True} if serializable transactions are enabled, {@code false} otherwise.
*/
@Deprecated
public boolean isTxSerializableEnabled() {
return txSerEnabled;
}
/**
* @param txSerEnabled Flag to enable/disable serializable cache transactions.
* @deprecated This method has no effect, {@link TransactionIsolation#SERIALIZABLE} isolation is always enabled.
* @return {@code this} for chaining.
*/
@Deprecated
public TransactionConfiguration setTxSerializableEnabled(boolean txSerEnabled) {
this.txSerEnabled = txSerEnabled;
return this;
}
/**
* Default cache transaction concurrency to use when one is not explicitly
* specified. Default value is defined by {@link #DFLT_TX_CONCURRENCY}.
*
* @return Default cache transaction concurrency.
* @see Transaction
*/
public TransactionConcurrency getDefaultTxConcurrency() {
return dfltConcurrency;
}
/**
* Sets default transaction concurrency.
*
* @param dfltConcurrency Default cache transaction concurrency.
* @return {@code this} for chaining.
*/
public TransactionConfiguration setDefaultTxConcurrency(TransactionConcurrency dfltConcurrency) {
this.dfltConcurrency = dfltConcurrency;
return this;
}
/**
* Default cache transaction isolation to use when one is not explicitly
* specified. Default value is defined by {@link #DFLT_TX_ISOLATION}.
*
* @return Default cache transaction isolation.
* @see Transaction
*/
public TransactionIsolation getDefaultTxIsolation() {
return dfltIsolation;
}
/**
* Sets default transaction isolation.
*
* @param dfltIsolation Default cache transaction isolation.
* @return {@code this} for chaining.
*/
public TransactionConfiguration setDefaultTxIsolation(TransactionIsolation dfltIsolation) {
this.dfltIsolation = dfltIsolation;
return this;
}
/**
* Gets default transaction timeout. Default value is defined by {@link #DFLT_TRANSACTION_TIMEOUT}
* which is {@code 0} and means that transactions will never time out.
*
* @return Default transaction timeout.
*/
public long getDefaultTxTimeout() {
return dfltTxTimeout;
}
/**
* Sets default transaction timeout in milliseconds. By default this value is defined by {@link
* #DFLT_TRANSACTION_TIMEOUT}.
*
* @param dfltTxTimeout Default transaction timeout.
* @return {@code this} for chaining.
*/
public TransactionConfiguration setDefaultTxTimeout(long dfltTxTimeout) {
this.dfltTxTimeout = dfltTxTimeout;
return this;
}
/**
* Some Ignite operations provoke partition map exchange process within Ignite to ensure the partitions distribution
* state is synchronized cluster-wide. Topology update events and a start of a new distributed cache are examples
* of those operations.
* <p>
* When the partition map exchange starts, Ignite acquires a global lock at a particular stage. The lock can't be
* obtained until pending transactions are running in parallel. If there is a transaction that runs for a while,
* then it will prevent the partition map exchange process from the start freezing some operations such as a new
* node join process.
* <p>
* This property allows to rollback such long transactions to let Ignite acquire the lock faster and initiate the
* partition map exchange process. The timeout is enforced only at the time of the partition map exchange process.
* <p>
* If not set, default value is {@link #TX_TIMEOUT_ON_PARTITION_MAP_EXCHANGE} which means transactions will never be
* rolled back on partition map exchange.
*
* @return Transaction timeout for partition map synchronization in milliseconds.
*/
public long getTxTimeoutOnPartitionMapExchange() {
return txTimeoutOnPartitionMapExchange;
}
/**
* Sets the transaction timeout that will be enforced if the partition map exchange process starts.
*
* @param txTimeoutOnPartitionMapExchange Transaction timeout value in milliseconds.
* @return {@code this} for chaining.
*/
public TransactionConfiguration setTxTimeoutOnPartitionMapExchange(long txTimeoutOnPartitionMapExchange) {
this.txTimeoutOnPartitionMapExchange = txTimeoutOnPartitionMapExchange;
return this;
}
/**
* <b>This is an experimental feature. Transactional SQL is currently in a beta status.</b>
* <p>
* Transaction deadlocks occurred for caches configured with {@link CacheAtomicityMode#TRANSACTIONAL_SNAPSHOT}
* can be resolved automatically.
* <p>
* Deadlock detection starts when one transaction is waiting for an entry lock more than a timeout specified by
* this property.
* <p>
* Timeout is specified in milliseconds and {@code 0} means that automatic deadlock detection is disabled. Default
* value is defined by {@link #DFLT_DEADLOCK_TIMEOUT}.
*
* @return Timeout before starting deadlock detection.
*/
@IgniteExperimental
public long getDeadlockTimeout() {
return deadlockTimeout;
}
/**
* <b>This is an experimental feature. Transactional SQL is currently in a beta status.</b>
* <p>
* Sets a timeout before starting deadlock detection for caches configured with
* {@link CacheAtomicityMode#TRANSACTIONAL_SNAPSHOT}.
* <p>
* Timeout is specified in milliseconds and {@code 0} means that automatic deadlock detection is disabled. Default
* value is defined by {@link #DFLT_DEADLOCK_TIMEOUT}.
*
* @param deadlockTimeout Timeout value in milliseconds.
* @return {@code this} for chaining.
*/
@IgniteExperimental
public TransactionConfiguration setDeadlockTimeout(long deadlockTimeout) {
this.deadlockTimeout = deadlockTimeout;
return this;
}
/**
* Gets size of pessimistic transactions log stored on node in order to recover transaction commit if originating
* node has left grid before it has sent all messages to transaction nodes.
* <p>
* If not set, default value is {@code 0} which means unlimited log size.
*
* @return Pessimistic transaction log size.
*/
public int getPessimisticTxLogSize() {
return pessimisticTxLogSize;
}
/**
* Sets pessimistic transactions log size.
*
* @param pessimisticTxLogSize Pessimistic transactions log size.
* @see #getPessimisticTxLogSize()
* @return {@code this} for chaining.
*/
public TransactionConfiguration setPessimisticTxLogSize(int pessimisticTxLogSize) {
this.pessimisticTxLogSize = pessimisticTxLogSize;
return this;
}
/**
* Gets delay, in milliseconds, after which pessimistic recovery entries will be cleaned up for failed node.
* <p>
* If not set, default value is {@link #DFLT_PESSIMISTIC_TX_LOG_LINGER}.
*
* @return Pessimistic log cleanup delay in milliseconds.
*/
public int getPessimisticTxLogLinger() {
return pessimisticTxLogLinger;
}
/**
* Sets cleanup delay for pessimistic transaction recovery log for failed node, in milliseconds.
*
* @param pessimisticTxLogLinger Pessimistic log cleanup delay.
* @see #getPessimisticTxLogLinger()
* @return {@code this} for chaining.
*/
public TransactionConfiguration setPessimisticTxLogLinger(int pessimisticTxLogLinger) {
this.pessimisticTxLogLinger = pessimisticTxLogLinger;
return this;
}
/**
* Gets class name of transaction manager finder for integration for JEE app servers.
*
* @return Transaction manager finder.
* @deprecated Use {@link #getTxManagerFactory()} instead.
*/
@Deprecated
public String getTxManagerLookupClassName() {
return tmLookupClsName;
}
/**
* Sets look up mechanism for available {@code TransactionManager} implementation, if any.
*
* @param tmLookupClsName Name of class implementing GridCacheTmLookup interface that is used to
* receive JTA transaction manager.
* @deprecated Use {@link #setTxManagerFactory(Factory)} instead.
* @return {@code this} for chaining.
*/
@Deprecated
public TransactionConfiguration setTxManagerLookupClassName(String tmLookupClsName) {
this.tmLookupClsName = tmLookupClsName;
return this;
}
/**
* Gets transaction manager factory for integration with JEE app servers.
*
* @param <T> Instance of {@code javax.transaction.TransactionManager}.
* @return Transaction manager factory.
* @see #isUseJtaSynchronization()
*/
@SuppressWarnings("unchecked")
public <T> Factory<T> getTxManagerFactory() {
return txManagerFactory;
}
/**
* Sets transaction manager factory for available {@code javax.transaction.TransactionManager} implementation,
* if any.
* <p>
* It allows to use different transactional systems. Implement factory that produce native
* {@code javax.transaction.TransactionManager} within your environment.
* <p>
* The following implementations are provided out of the box (jta module must be enabled):
* <ul>
* <li>
* {@code org.apache.ignite.cache.jta.jndi.CacheJndiTmFactory} utilizes configured JNDI names to look up
* a transaction manager.
* </li>
* <li>
* {@code org.apache.ignite.cache.jta.websphere.WebSphereTmFactory} an implementation of Transaction Manager
* factory to be used within WebSphere Application Server.
* </li>
* <li>
* {@code org.apache.ignite.cache.jta.websphere.WebSphereLibertyTmFactory} an implementation of Transaction Manager
* factory to be used within WebSphere Liberty.
* </li>
* </ul>
*
* Ignite will throw IgniteCheckedException if {@link Factory#create()} method throws any exception,
* returns {@code null}-value or returns non-{@code TransactionManager} instance.
*
* @param factory Transaction manager factory.
* @param <T> Instance of {@code javax.transaction.TransactionManager}.
* @see #setUseJtaSynchronization(boolean)
* @return {@code this} for chaining.
*/
public <T> TransactionConfiguration setTxManagerFactory(Factory<T> factory) {
txManagerFactory = factory;
return this;
}
/**
* @return Whether to use JTA {@code javax.transaction.Synchronization}
* instead of {@code javax.transaction.xa.XAResource}.
* @see #getTxManagerFactory()
*/
public boolean isUseJtaSynchronization() {
return useJtaSync;
}
/**
* Sets the flag that defines whether to use lightweight JTA synchronization callback to enlist
* into JTA transaction instead of creating a separate XA resource. In some cases this can give
* performance improvement, but keep in mind that most of the transaction managers do not allow
* to add more that one callback to a single transaction.
*
* @param useJtaSync Whether to use JTA {@code javax.transaction.Synchronization}
* instead of {@code javax.transaction.xa.XAResource}.
* @see #setTxManagerFactory(Factory)
* @return {@code this} for chaining.
*/
public TransactionConfiguration setUseJtaSynchronization(boolean useJtaSync) {
this.useJtaSync = useJtaSync;
return this;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(TransactionConfiguration.class, this);
}
/**
* Excludes incompatible fields from serialization/deserialization process.
*
* @param ver Sender/Receiver node version.
* @return Array of excluded from serialization/deserialization fields.
*/
@SuppressWarnings("unused")
private static String[] transientSerializableFields(IgniteProductVersion ver) {
ArrayList<String> transients = new ArrayList<>(2);
if (TX_PME_TIMEOUT_SINCE.compareToIgnoreTimestamp(ver) >= 0)
transients.add("txTimeoutOnPartitionMapExchange");
if (DEADLOCK_TIMEOUT_SINCE.compareToIgnoreTimestamp(ver) >= 0)
transients.add("deadlockTimeout");
return transients.isEmpty() ? null : transients.toArray(new String[transients.size()]);
}
}
|
#!/bin/bash
set -e
DIR=`echo $PWD | xargs dirname | xargs dirname`
OS=$(cat /etc/os-release | grep "^ID=" | sed 's/ID=//g' | sed 's\"\\g')
if [ $OS = "centos" ] || [ $OS = "rhel" ];then
echo "Installing the environment in $OS"
GOREL="go1.8.7.linux-amd64.tar.gz"
# TODO: ALLWAYS DOWNLOAD AND INSTALL GOLANG!!!!
#install Go
if ! type "go" > /dev/null; then
#INSTALACION DE GO
PATH="$PATH:/usr/local/go/bin"
echo "Installing GO"
sudo yum -y install wget
wget -q "https://storage.googleapis.com/golang/${GOREL}"
tar -xvzf "${GOREL}"
rm -Rf /usr/local/go
mv go /usr/local/go
sudo rm "${GOREL}"
else
V1=$(go version | grep -oP '\d+(?:\.\d+)+')
V2=$(echo $GOREL | grep -oP '\d+(?:\.\d+)+')
nV1=$(echo $V1 | sed 's/\.//g')
nV2=$(echo $V2 | sed 's/\.//g')
if (( $nV1 >= $nV2 )); then
echo "Using your own version of Go"
else
echo "Your version of go is smaller than required"
exit
fi
fi
# install build deps
sudo yum clean all
echo "Installing Libraries"
sudo yum -y update
sudo yum -y install gmp-devel
sudo yum -y install gcc gcc-c++ make openssl-devel
sudo yum -y install libdb-devel
sudo yum -y install ncurses-devel
# Check EPEL repository availability. It is available by default in Fedora and CentOS, but it requires manuall
# installation in RHEL
EPEL_AVAILABLE=$(sudo yum search epel | grep release || true)
if [[ -z $EPEL_AVAILABLE ]];then
echo EPEL Repository is not available via YUM. Downloading
wget https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm -O /tmp/epel-release-latest-7.noarch.rpm
sudo yum -y install /tmp/epel-release-latest-7.noarch.rpm
else
echo EPEL repository is available in YUM via distro packages. Adding it as a source for packages
sudo yum -y install epel-release
fi
echo "Installing LIBSODIUM"
wget https://github.com/naphaso/jsodium/raw/master/native/linux/libsodium.so.18
sudo chmod 755 libsodium.so.18
sudo cp libsodium.so.18 /usr/lib64/
echo "Installing LIBDB"
wget https://github.com/hypergraphdb/hypergraphdb/raw/master/storage/bdb-native/native/linux/x86_64/libdb-5.3.so
sudo chmod 755 libdb-5.3.so
sudo cp libdb-5.3.so /usr/lib64/ || true
cd ..
#LEVELDB FIX
echo "Installing LEVELDB"
git clone https://github.com/google/leveldb.git
cd leveldb/
git checkout 0fa5a4f
make
sudo scp -r out-static/lib* out-shared/lib* /usr/local/lib/
sudo cp /usr/local/lib/libleveldb.* /usr/lib64/
cd include/
sudo scp -r leveldb /usr/local/include/
sudo ldconfig || true
cd ../..
sudo rm -r leveldb
cd ..
echo "Cleaning CACHE RPM"
rm -f /var/lib/rpm/__db*
rpm --rebuilddb
echo "Installing NODE"
sudo yum install -y nodejs
#ETHEREUM
echo "Installing ETHEREUM"
git clone https://github.com/ethereum/go-ethereum
cd go-ethereum/
make geth
ls -al build/bin/geth
sudo npm install -g solc
# install constellation
echo "Installing CONSTELLATION"
wget -q https://github.com/jpmorganchase/constellation/releases/download/v0.3.2/constellation-0.3.2-ubuntu1604.tar.xz
unxz constellation-0.3.2-ubuntu1604.tar.xz
tar -xf constellation-0.3.2-ubuntu1604.tar
sudo cp constellation-0.3.2-ubuntu1604/constellation-node /usr/local/bin
sudo chmod 0755 /usr/local/bin/constellation-node
sudo rm -rf constellation-0.3.2-ubuntu1604.tar.xz constellation-0.3.2-ubuntu1604.tar constellation-0.3.2-ubuntu1604
# make/install quorum
echo "Installing QUORUM"
git clone https://github.com/alastria/quorum.git
pushd quorum >/dev/null
git checkout 0915c95705f92ab96f60cb51a7e001174eb694a6
make all
sudo cp build/bin/geth /usr/local/bin
sudo cp build/bin/bootnode /usr/local/bin
popd >/dev/null
# install Porosity
echo "Installing POROSITY"
wget -q https://github.com/jpmorganchase/quorum/releases/download/v1.2.0/porosity
sudo mv porosity /usr/local/bin
sudo chmod 0755 /usr/local/bin/porosity
elif [ $OS = "ubuntu" ];then
echo "Installing the environment in " + $OS
GOREL="go1.8.7.linux-amd64.tar.gz"
#Do not mess with Go instalations
if ! type "go" > /dev/null; then
#INSTALACION DE GO
PATH="$PATH:/usr/local/go/bin"
echo "Installing GO"
wget -q "https://storage.googleapis.com/golang/${GOREL}"
tar -xvzf "${GOREL}"
sudo rm -rf /usr/local/go
mv go /usr/local/go
sudo rm "${GOREL}"
else
V1=$(go version | grep -oP '\d+(?:\.\d+)+')
V2=$(echo $GOREL | grep -oP '\d+(?:\.\d+)+')
nV1=$(echo $V1 | sed 's/\.//g')
nV2=$(echo $V2 | sed 's/\.//g')
if (( $nV1 >= $nV2 )); then
echo "Using your own version of Go"
else
echo "Your version of go is smaller than required"
exit
fi
fi
sudo apt-get update && sudo apt-get install -y
#INSTALACION DE LIBRERIAS
sudo apt-get install -y software-properties-common unzip wget git make gcc libsodium-dev build-essential libdb-dev zlib1g-dev libtinfo-dev sysvbanner wrk psmisc
echo "Installing WRK"
rm -rf wrk
git clone https://github.com/wg/wrk.git wrk
cd wrk
make
sudo cp wrk /usr/local/bin
#LEVELDB FIX
git clone https://github.com/google/leveldb.git
cd leveldb/
git checkout 0fa5a4f
make
sudo scp -r out-static/lib* out-shared/lib* /usr/local/lib/
cd include/
sudo scp -r leveldb /usr/local/include/
sudo ldconfig
cd ../..
rm -r leveldb
#INSTALACION ETHEREUM
sudo add-apt-repository -y ppa:ethereum/ethereum && sudo add-apt-repository -y ppa:ethereum/ethereum-dev && sudo apt-get update && sudo apt-get install -y solc
#INSTALACION CONSTELLATION 0.2.0
wget -q https://github.com/jpmorganchase/constellation/releases/download/v0.3.2/constellation-0.3.2-ubuntu1604.tar.xz
unxz constellation-0.3.2-ubuntu1604.tar.xz
tar -xf constellation-0.3.2-ubuntu1604.tar
sudo cp constellation-0.3.2-ubuntu1604/constellation-node /usr/local/bin && sudo chmod 0755 /usr/local/bin/constellation-node
sudo rm -rf constellation-0.3.2-ubuntu1604.tar.xz constellation-0.3.2-ubuntu1604.tar constellation-0.3.2-ubuntu1604
#INSTALACION DE QUORUM
git clone https://github.com/alastria/quorum.git
cd quorum && git checkout 0915c95705f92ab96f60cb51a7e001174eb694a6 && make all && cp build/bin/geth /usr/local/bin && cp build/bin/bootnode /usr/local/bin
cd ..
sudo rm -rf constellation-0.3.2-ubuntu1604.tar.xz constellation-0.3.2-ubuntu1604.tar constellation-0.3.2-ubuntu1604 quorum
fi
# Manage GOROOT variable
if [[ -z "$GOROOT" ]]; then
echo "[*] Trying default $GOROOT. If the script fails please run $DIR/alastria-node/bootstrap.sh or configure GOROOT correctly"
echo 'export GOROOT=/usr/local/go' >> $DIR/.bashrc
echo 'export GOPATH=$HOME/alastria/workspace' >> $DIR/.bashrc
echo 'export PATH=$GOROOT/bin:$GOPATH/bin:$PATH' >> $DIR/.bashrc
export GOROOT=/usr/local/go
export GOPATH=$DIR/alastria/workspace
export PATH=$GOROOT/bin:$GOPATH/bin:$PATH
echo "[*] GOROOT = $GOROOT, GOPATH = $GOPATH"
mkdir -p "$GOPATH"/bin
mkdir -p "$GOPATH"/src
fi
set +e
source ~/.bashrc
|
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { Box, Text } from 'ink';
import { Header, Style, useProgram } from '@boost/cli/react';
import { PackemonPackageConfig } from '../../types';
import { PackageForm } from './PackageForm';
export type InitPackageConfigs = Record<string, PackemonPackageConfig>;
export interface InitProps {
packageNames: string[];
onComplete: (configs: InitPackageConfigs) => Promise<unknown>;
}
export function Init({ packageNames, onComplete }: InitProps) {
const { exit } = useProgram();
const [pkgsToConfigure, setPkgsToConfigure] = useState(() => packageNames);
const [pkgConfigs, setPkgConfigs] = useState<InitPackageConfigs>({});
const currentPkg = useMemo(() => pkgsToConfigure[0], [pkgsToConfigure]);
// Save config and move to next package
const handleSubmit = useCallback(
(config: PackemonPackageConfig) => {
setPkgConfigs((prev) => ({
...prev,
[currentPkg]: config,
}));
setPkgsToConfigure((prev) => prev.slice(1));
},
[currentPkg],
);
// Complete once all packages have been configured
useEffect(() => {
async function complete() {
if (pkgsToConfigure.length > 0) {
return;
}
try {
await onComplete(pkgConfigs);
} finally {
exit();
}
}
void complete();
}, [pkgsToConfigure, pkgConfigs, onComplete, exit]);
// Exit when theres no packages
if (pkgsToConfigure.length === 0) {
return null;
}
return (
<Box flexDirection="column">
<Header label="Initializing packages" />
<Box>
<Text>
<Text bold>Packages to configure: </Text>
<Style type="notice">{currentPkg}</Style>
{pkgsToConfigure.length > 1 && `, ${pkgsToConfigure.slice(1).join(', ')}`}
</Text>
</Box>
<Box flexDirection="column" marginTop={1}>
<PackageForm key={currentPkg} onSubmit={handleSubmit} />
</Box>
</Box>
);
}
|
from flask import Flask, request
import sqlite3
app = Flask(name)
conn = sqlite3.connect('movies.db')
cur = conn.cursor()
@app.route('/api/movies', methods=['GET'])
def get_movie():
title = request.args.get('title')
language = request.args.get('language')
year = request.args.get('year')
query = 'SELECT * FROM movies WHERE title=' + title + ' AND language=' + language + ' AND year=' + year
cur.execute(query)
result = cur.fetchone()
if not result:
return {'error': 'No movie found for given parameters.'}, 400
return {
'title': result[0],
'language': result[1],
'year': result[2]
}
if name == 'main':
app.run(debug=True)
|
<filename>01-upload-a-file/javascript/test/upload.spec.js
const assert = require("assert");
const { Builder, By } = require("selenium-webdriver");
const path = require("path");
describe("Upload Test", function() {
let driver;
beforeEach(async function() {
driver = await new Builder().forBrowser("firefox").build();
});
afterEach(async function() {
await driver.quit();
});
it("upload a file", async function() {
let filename = "some-file.txt";
let filePath = path.join(process.cwd(), filename);
await driver.get("http://the-internet.herokuapp.com/upload");
await driver.findElement(By.id("file-upload")).sendKeys(filePath);
await driver.findElement(By.id("file-submit")).click();
let text = await driver.findElement(By.id("uploaded-files")).getText();
assert.equal(text, filename);
});
});
|
<reponame>andersonzup/orange-talents-07-template-ecommerce
package br.com.zup.mercadolivre.usuario;
import br.com.zup.mercadolivre.config.validacao.annotation.UniqueValue;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.hibernate.validator.constraints.Length;
import javax.validation.constraints.Email;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
public class NovoUsuarioRequest {
@NotBlank(message = "O email é obrigatório")
@Email(message = "Formato de email invalido")
@UniqueValue(domainClass = Usuario.class, fieldName = "email", message = "Esse email já existe")
private String email;
@NotNull(message = "A senha não pode ser nula")
@Length(min = 6, message = "A senha deve ter no mínimo 6 caracteres")
private String senha;
public NovoUsuarioRequest() {
}
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public NovoUsuarioRequest(String email, String senha) {
this.email = email;
this.senha = senha;
}
public Usuario toModel(){
return new Usuario(email, new SenhaLimpa(senha));
}
}
|
#!/usr/bin/env bash
BAR_ICON=""
NOTIFY_ICON=/usr/share/icons/Papirus/32x32/apps/system-software-update.svg
get_total_updates() { UPDATES=$(checkupdates 2>/dev/null | wc -l); }
while true; do
get_total_updates
# notify user of updates
if hash notify-send &>/dev/null; then
if (( UPDATES > 50 )); then
notify-send -u critical -i $NOTIFY_ICON \
"You really need to update!!" "$UPDATES New packages"
elif (( UPDATES > 25 )); then
notify-send -u normal -i $NOTIFY_ICON \
"You should update soon" "$UPDATES New packages"
elif (( UPDATES > 2 )); then
notify-send -u low -i $NOTIFY_ICON \
"$UPDATES New packages"
fi
fi
# when there are updates available
# every 10 seconds another check for updates is done
while (( UPDATES > 0 )); do
if (( UPDATES == 1 )); then
echo " $UPDATES"
elif (( UPDATES > 1 )); then
echo " $UPDATES"
else
echo $BAR_ICON
fi
sleep 10
get_total_updates
done
# when no updates are available, use a longer loop, this saves on CPU
# and network uptime, only checking once every 30 min for new updates
while (( UPDATES == 0 )); do
echo $BAR_ICON
sleep 1800
get_total_updates
done
done
|
<gh_stars>1-10
package lvdb_test
import (
"encoding/json"
"io/ioutil"
"log"
"os"
"strconv"
"testing"
"github.com/incognitochain/incognito-chain/blockchain"
"github.com/incognitochain/incognito-chain/common"
"github.com/incognitochain/incognito-chain/common/base58"
"github.com/incognitochain/incognito-chain/database/lvdb"
"github.com/incognitochain/incognito-chain/metadata"
"github.com/incognitochain/incognito-chain/transaction"
"github.com/stretchr/testify/assert"
"github.com/incognitochain/incognito-chain/database"
_ "github.com/incognitochain/incognito-chain/database/lvdb"
)
var db database.DatabaseInterface
var _ = func() (_ struct{}) {
dbPath, err := ioutil.TempDir(os.TempDir(), "test_")
if err != nil {
log.Fatalf("failed to create temp dir: %+v", err)
}
log.Println(dbPath)
db, err = database.Open("leveldb", dbPath)
if err != nil {
log.Fatalf("could not open db path: %s, %+v", dbPath, err)
}
database.Logger.Init(common.NewBackend(nil).Logger("test", true))
return
}()
func TestDb_Setup(t *testing.T) {
dbPath, err := ioutil.TempDir(os.TempDir(), "test_")
if err != nil {
t.Fatalf("failed to create temp dir: %+v", err)
}
t.Log(dbPath)
db, err := database.Open("leveldb", dbPath)
if err != nil {
t.Fatalf("could not open db path: %s, %+v", dbPath, err)
}
if err := db.Close(); err != nil {
t.Fatalf("db.close %+v", err)
}
os.RemoveAll(dbPath)
}
func TestDb_Base(t *testing.T) {
if db != nil {
db.Put([]byte("a"), []byte{1})
result, err := db.Get([]byte("a"))
if err != nil {
t.Error(err)
}
assert.Equal(t, result[0], []byte{1}[0])
has, err := db.HasValue([]byte("a"))
if err != nil {
t.Error(err)
}
assert.Equal(t, has, true)
err = db.Delete([]byte("a"))
assert.Equal(t, nil, err)
err = db.Delete([]byte("b"))
assert.Equal(t, nil, err)
has, err = db.HasValue([]byte("a"))
assert.Equal(t, err, nil)
assert.Equal(t, has, false)
batchData := []database.BatchData{}
batchData = append(batchData, database.BatchData{
Key: []byte("abc1"),
Value: []byte("abc1"),
})
batchData = append(batchData, database.BatchData{
Key: []byte("abc2"),
Value: []byte("abc2"),
})
err = db.PutBatch(batchData)
assert.Equal(t, err, nil)
v, err := db.Get([]byte("abc2"))
assert.Equal(t, err, nil)
assert.Equal(t, "abc2", string(v))
} else {
t.Error("DB is not open")
}
}
// Process on Block data
func TestDb_StoreShardBlock(t *testing.T) {
if db != nil {
block := &blockchain.ShardBlock{
Header: blockchain.ShardHeader{
Version: 1,
ShardID: 3,
Height: 1,
},
}
// test store block
err := db.StoreShardBlock(block, *block.Hash(), block.Header.ShardID)
assert.Equal(t, err, nil)
// test Fetch block
fail, err := db.FetchBlock(common.Hash{})
assert.Equal(t, nil, err)
assert.Equal(t, 0, len(fail))
blockInBytes, err := db.FetchBlock(*block.Hash())
assert.Equal(t, err, nil)
blockNew := blockchain.ShardBlock{}
err = json.Unmarshal(blockInBytes, &blockNew)
assert.Equal(t, err, nil)
assert.Equal(t, blockNew.Hash(), block.Hash())
// has block
has, err := db.HasBlock(*block.Hash())
assert.Equal(t, err, nil)
assert.Equal(t, has, true)
// delete block
err = db.DeleteBlock(*blockNew.Hash(), blockNew.Header.Height, blockNew.Header.ShardID)
assert.Equal(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Block index
func TestDb_StoreShardBlockIndex(t *testing.T) {
if db != nil {
block := &blockchain.ShardBlock{
Header: blockchain.ShardHeader{
Version: 1,
ShardID: 3,
Height: 1,
},
}
// test store block
err := db.StoreShardBlockIndex(*block.Hash(), block.Header.Height, block.Header.ShardID)
assert.Equal(t, err, nil)
// test GetIndexOfBlock
blockHeigh, shardID, err := db.GetIndexOfBlock(*block.Hash())
assert.Equal(t, err, nil)
assert.Equal(t, blockHeigh, uint64(1))
assert.Equal(t, shardID, uint8(3))
// GetBlockByIndex
hash, err := db.GetBlockByIndex(1, 3)
assert.Equal(t, hash.String(), block.Hash().String())
} else {
t.Error("DB is not open")
}
}
// Beacon
func TestDb_StoreBeaconBlock(t *testing.T) {
if db != nil {
beaconBlock := &blockchain.BeaconBlock{
Header: blockchain.BeaconHeader{
Version: 1,
Height: 1,
},
}
// test store block
err := db.StoreBeaconBlock(beaconBlock, *beaconBlock.Hash())
assert.Equal(t, err, nil)
// test Fetch block
blockInBytes, err := db.FetchBeaconBlock(*beaconBlock.Hash())
assert.Equal(t, err, nil)
blockNew := blockchain.BeaconBlock{}
err = json.Unmarshal(blockInBytes, &blockNew)
assert.Equal(t, err, nil)
assert.Equal(t, blockNew.Hash(), beaconBlock.Hash())
// has block
has, err := db.HasBeaconBlock(*beaconBlock.Hash())
assert.Equal(t, err, nil)
assert.Equal(t, has, true)
// delete block
err = db.DeleteBeaconBlock(*blockNew.Hash(), blockNew.Header.Height)
assert.Equal(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Block beacon index
func TestDb_StoreShardBlockBeaconIndex(t *testing.T) {
if db != nil {
beaconBlock := &blockchain.BeaconBlock{
Header: blockchain.BeaconHeader{
Version: 1,
Height: 1,
},
}
// test store block
err := db.StoreBeaconBlockIndex(*beaconBlock.Hash(), beaconBlock.Header.Height)
assert.Equal(t, err, nil)
// test GetIndexOfBlock
blockHeigh, err := db.GetIndexOfBeaconBlock(*beaconBlock.Hash())
assert.Equal(t, err, nil)
assert.Equal(t, blockHeigh, uint64(1))
// GetBlockByIndex
hash, err := db.GetBeaconBlockHashByIndex(1)
assert.Equal(t, hash.String(), beaconBlock.Hash().String())
} else {
t.Error("DB is not open")
}
}
//Crossshard
func TestDb_StoreCrossShardNextHeight(t *testing.T) {
if db != nil {
err := db.StoreCrossShardNextHeight(0, 1, 1, 2)
assert.Equal(t, err, nil)
err = db.StoreCrossShardNextHeight(0, 1, 2, 0)
assert.Equal(t, err, nil)
val, err := db.FetchCrossShardNextHeight(0, 1, 1)
assert.Equal(t, err, nil)
assert.Equal(t, uint64(val), uint64(2))
err = db.StoreCrossShardNextHeight(0, 1, 2, 4)
assert.Equal(t, err, nil)
err = db.StoreCrossShardNextHeight(0, 1, 4, 0)
assert.Equal(t, err, nil)
err = db.RestoreCrossShardNextHeights(0, 1, 2)
assert.Equal(t, err, nil)
val, err = db.FetchCrossShardNextHeight(0, 1, 2)
assert.Equal(t, err, nil)
assert.Equal(t, uint64(val), uint64(0))
} else {
t.Error("DB is not open")
}
}
// Transaction index
func TestDb_StoreTxIndex(t *testing.T) {
if db != nil {
block := &blockchain.ShardBlock{
Header: blockchain.ShardHeader{
Version: 1,
ShardID: 3,
Height: 1,
},
Body: blockchain.ShardBody{
Transactions: []metadata.Transaction{},
},
}
block.Body.Transactions = append(block.Body.Transactions, &transaction.Tx{
Version: 1,
Info: []byte("Test 1"),
})
block.Body.Transactions = append(block.Body.Transactions, &transaction.Tx{
Version: 1,
Info: []byte("Test 2"),
})
err := db.StoreTransactionIndex(*block.Body.Transactions[1].Hash(), *block.Hash(), 1)
assert.Equal(t, err, nil)
blockHash, index, err := db.GetTransactionIndexById(*block.Body.Transactions[1].Hash())
if err.(*database.DatabaseError) != nil {
t.Error(err)
}
assert.Equal(t, blockHash, *block.Hash())
assert.Equal(t, index, 1)
err = db.DeleteTransactionIndex(*block.Body.Transactions[1].Hash())
assert.Equal(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Best state of Prev
func TestDb_StorePrevBestState(t *testing.T) {
if db != nil {
bestState := blockchain.BestState{
Beacon: &blockchain.BeaconBestState{
Epoch: 100,
},
}
tempMarshal, err := json.Marshal(bestState.Beacon)
assert.Equal(t, err, nil)
err = db.StorePrevBestState(tempMarshal, true, 0)
assert.Equal(t, err, nil)
beaconInBytes, err := db.FetchPrevBestState(true, 0)
assert.Equal(t, err, nil)
temp := blockchain.BeaconBestState{}
json.Unmarshal(beaconInBytes, &temp)
assert.Equal(t, bestState.Beacon.Epoch, temp.Epoch)
err = db.CleanBackup(true, 0)
_, err = db.FetchPrevBestState(true, 0)
assert.NotEqual(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Best state of shard chain
func TestDb_StoreShardBestState(t *testing.T) {
if db != nil {
besState := blockchain.BestState{
Shard: make(map[byte]*blockchain.ShardBestState),
}
bestStateShard := blockchain.ShardBestState{
Epoch: 100,
}
besState.Shard[0] = &bestStateShard
err := db.StoreShardBestState(bestStateShard, 0)
assert.Equal(t, err, nil)
temp, err := db.FetchShardBestState(0)
assert.Equal(t, err, nil)
tempObject := blockchain.ShardBestState{}
err = json.Unmarshal(temp, &tempObject)
assert.Equal(t, err, nil)
assert.Equal(t, tempObject.Epoch, bestStateShard.Epoch)
err = db.CleanShardBestState()
assert.Equal(t, err, nil)
_, err = db.FetchShardBestState(0)
assert.NotEqual(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Best state of beacon chain
func TestDb_StoreBeaconBestState(t *testing.T) {
if db != nil {
bestState := blockchain.BestState{
Beacon: &blockchain.BeaconBestState{
Epoch: 100,
},
}
err := db.StoreBeaconBestState(bestState)
assert.Equal(t, err, nil)
temp, err := db.FetchBeaconBestState()
assert.Equal(t, err, nil)
tempObject := blockchain.BestState{}
err = json.Unmarshal(temp, &tempObject)
assert.Equal(t, err, nil)
assert.Equal(t, tempObject.Beacon.Epoch, bestState.Beacon.Epoch)
err = db.CleanBeaconBestState()
assert.Equal(t, err, nil)
_, err = db.FetchBeaconBestState()
assert.NotEqual(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Commitee with epoch
func TestDb_StoreCommitteeByHeight(t *testing.T) {
if db != nil {
block := blockchain.ShardBlock{
Header: blockchain.ShardHeader{
Height: 100,
},
}
bestState := blockchain.BestState{
Beacon: &blockchain.BeaconBestState{
Epoch: 100,
ShardCommittee: make(map[byte][]string),
},
}
bestState.Beacon.ShardCommittee[0] = make([]string, 0)
bestState.Beacon.ShardCommittee[0] = append(bestState.Beacon.ShardCommittee[0], "committee1")
bestState.Beacon.ShardCommittee[0] = append(bestState.Beacon.ShardCommittee[0], "committee2")
err := db.StoreCommitteeByEpoch(block.Header.Height, bestState.Beacon.GetShardCommittee())
assert.Equal(t, err, nil)
shardCommittee := make(map[byte][]string)
data, err := db.FetchCommitteeByEpoch(100)
assert.Equal(t, err, nil)
err = json.Unmarshal(data, &shardCommittee)
assert.Equal(t, err, nil)
assert.Equal(t, shardCommittee[0][0], "committee1")
assert.Equal(t, shardCommittee[0][1], "committee2")
has, err := db.HasCommitteeByEpoch(100)
assert.Equal(t, has, true)
assert.Equal(t, err, nil)
err = db.DeleteCommitteeByEpoch(100)
assert.Equal(t, err, nil)
has, err = db.HasCommitteeByEpoch(100)
assert.Equal(t, has, false)
assert.Equal(t, err, nil)
} else {
t.Error("DB is not open")
}
}
func TestDb_StoreSerialNumbers(t *testing.T) {
if db != nil {
serialNumber := make([][]byte, 0)
ser1 := []byte{0, 1}
ser2 := []byte{0, 2}
serialNumber = append(serialNumber, ser1)
serialNumber = append(serialNumber, ser2)
tokenID := common.Hash{}
err := db.StoreSerialNumbers(tokenID, serialNumber, 0)
assert.Equal(t, err, nil)
has, err := db.HasSerialNumber(tokenID, ser1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, true)
err = db.BackupSerialNumbersLen(tokenID, 0)
assert.Equal(t, err, nil)
err = db.RestoreSerialNumber(tokenID, 0, serialNumber)
assert.Equal(t, err, nil)
has, err = db.HasSerialNumber(tokenID, ser1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, false)
err = db.StoreSerialNumbers(tokenID, serialNumber, 0)
assert.Equal(t, err, nil)
has, err = db.HasSerialNumber(tokenID, ser1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, true)
err = db.CleanSerialNumbers()
assert.Equal(t, err, nil)
has, err = db.HasSerialNumber(tokenID, ser1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, false)
} else {
t.Error("DB is not open")
}
}
func TestDb_StoreCommitments(t *testing.T) {
if db != nil {
committments := make([][]byte, 0)
cm1 := []byte{0, 1}
cm2 := []byte{0, 2}
committments = append(committments, cm1)
committments = append(committments, cm2)
tokenID := common.Hash{}
publicKey := common.Hash{}
err := db.StoreCommitments(tokenID, publicKey.GetBytes(), committments, 0)
assert.Equal(t, err, nil)
has, err := db.HasCommitment(tokenID, cm1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, true)
has, err = db.HasCommitmentIndex(tokenID, 0, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, true)
len, err := db.GetCommitmentLength(tokenID, 0)
assert.Equal(t, err, nil)
assert.Equal(t, len.Int64(), int64(2))
temp, err := db.GetCommitmentByIndex(tokenID, 1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, temp, cm2)
index, err := db.GetCommitmentIndex(tokenID, cm1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, index.Uint64(), uint64(0))
err = db.BackupCommitmentsOfPubkey(tokenID, 0, publicKey.GetBytes())
assert.Equal(t, err, nil)
err = db.RestoreCommitmentsOfPubkey(tokenID, 0, publicKey.GetBytes(), committments)
assert.Equal(t, err, nil)
has, err = db.HasSerialNumber(tokenID, cm1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, false)
err = db.CleanCommitments()
assert.Equal(t, err, nil)
has, err = db.HasCommitment(tokenID, cm1, 0)
assert.Equal(t, err, nil)
assert.Equal(t, has, false)
} else {
t.Error("DB is not open")
}
}
// output
func TestDb_StoreOutputCoins(t *testing.T) {
if db != nil {
outputCoins := make([][]byte, 0)
cm1 := []byte{0, 1}
cm2 := []byte{0, 2}
cm3 := []byte{0, 3}
outputCoins = append(outputCoins, cm1)
outputCoins = append(outputCoins, cm2)
outputCoins = append(outputCoins, cm3)
tokenID := common.Hash{}
publicKey := common.Hash{}
err := db.StoreOutputCoins(tokenID, publicKey.GetBytes(), outputCoins, 1)
assert.Equal(t, err, nil)
data, err := db.GetOutcoinsByPubkey(tokenID, publicKey.GetBytes(), 1)
assert.Equal(t, err, nil)
assert.NotEqual(t, 2, len(data))
assert.Equal(t, 3, len(data))
err = db.DeleteOutputCoin(tokenID, publicKey.GetBytes(), outputCoins, 1)
assert.Equal(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// SNDerivator
func TestDb_StoreSNDerivators(t *testing.T) {
if db != nil {
snd := make([][]byte, 0)
snd1 := []byte{0, 1}
snd2 := []byte{0, 2}
snd = append(snd, snd1)
snd = append(snd, snd2)
tokenID := common.Hash{}
err := db.StoreSNDerivators(tokenID, snd, 1)
assert.Equal(t, err, nil)
has, err := db.HasSNDerivator(tokenID, snd1, 1)
assert.Equal(t, nil, err)
assert.Equal(t, true, has)
err = db.CleanSNDerivator()
assert.Equal(t, err, nil)
has, err = db.HasSerialNumber(tokenID, snd2, 1)
assert.Equal(t, err, nil)
assert.Equal(t, has, false)
} else {
t.Error("DB is not open")
}
}
// Fee estimator
func TestDb_StoreFeeEstimator(t *testing.T) {
if db != nil {
feeEstimatorData := []byte{1, 2, 3, 4, 5}
err := db.StoreFeeEstimator(feeEstimatorData, 1)
assert.Equal(t, err, nil)
data, err := db.GetFeeEstimator(1)
assert.Equal(t, data, feeEstimatorData)
assert.Equal(t, err, nil)
db.CleanFeeEstimator()
_, err = db.GetFeeEstimator(1)
assert.NotEqual(t, err, nil)
} else {
t.Error("DB is not open")
}
}
// Custom token
func TestDb_StoreCustomToken(t *testing.T) {
tokenID := common.Hash{}
data := []byte{1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8}
err := db.StoreCustomToken(tokenID, data)
assert.Equal(t, err, nil)
err = db.StorePrivacyCustomToken(tokenID, data)
assert.Equal(t, err, nil)
dataTemp, err := db.ListCustomToken()
assert.Equal(t, err, nil)
assert.Equal(t, len(dataTemp), 1)
dataTemp, err = db.ListPrivacyCustomToken()
assert.Equal(t, err, nil)
assert.Equal(t, len(dataTemp), 1)
err = db.DeleteCustomToken(tokenID)
assert.Equal(t, err, nil)
err = db.DeletePrivacyCustomToken(tokenID)
assert.Equal(t, err, nil)
dataTemp, err = db.ListCustomToken()
assert.Equal(t, err, nil)
assert.Equal(t, len(dataTemp), 0)
dataTemp, err = db.ListPrivacyCustomToken()
assert.Equal(t, err, nil)
assert.Equal(t, len(dataTemp), 0)
err = db.StoreCustomToken(tokenID, data)
assert.Equal(t, err, nil)
err = db.StorePrivacyCustomToken(tokenID, data)
assert.Equal(t, err, nil)
has := db.CustomTokenIDExisted(tokenID)
assert.Equal(t, true, has)
has = db.PrivacyCustomTokenIDExisted(tokenID)
assert.Equal(t, true, has)
err = db.StoreCustomTokenTx(tokenID, 0, 1, 0, data)
assert.Equal(t, err, nil)
temp, err := db.CustomTokenTxs(tokenID)
assert.Equal(t, 1, len(temp))
err = db.DeleteCustomTokenTx(tokenID, 0, 0, 1)
assert.Equal(t, err, nil)
temp, err = db.CustomTokenTxs(tokenID)
assert.Equal(t, 0, len(temp))
err = db.StorePrivacyCustomTokenTx(tokenID, 0, 1, 0, data)
assert.Equal(t, err, nil)
temp, err = db.PrivacyCustomTokenTxs(tokenID)
assert.Equal(t, 1, len(temp))
err = db.DeletePrivacyCustomTokenTx(tokenID, 0, 0, 1)
assert.Equal(t, err, nil)
temp, err = db.PrivacyCustomTokenTxs(tokenID)
assert.Equal(t, 0, len(temp))
// custom token payment address
tokenKey := lvdb.TokenPaymentAddressPrefix
tokenKey = append(tokenKey, lvdb.Splitter...)
tokenKey = append(tokenKey, tokenID.String()...)
utxoHash := []byte{0, 0, 1}
voutIndex := 0
value := 10
paymentAddressKey := tokenKey
paymentAddressKey = append(paymentAddressKey, lvdb.Splitter...)
paymentAddressKey = append(paymentAddressKey, []byte("<KEY>")...)
paymentAddressKey = append(paymentAddressKey, lvdb.Splitter...)
paymentAddressKey = append(paymentAddressKey, utxoHash[:]...)
paymentAddressKey = append(paymentAddressKey, lvdb.Splitter...)
paymentAddressKey = append(paymentAddressKey, common.Int32ToBytes(int32(voutIndex))...)
paymentAddressValue := strconv.Itoa(int(value)) + string(lvdb.Splitter) + string(lvdb.Unspent) + string(lvdb.Splitter)
err = db.Put(paymentAddressKey, []byte(paymentAddressValue))
assert.Equal(t, nil, err)
dataBalance, err := db.GetCustomTokenPaymentAddressesBalance(tokenID)
assert.Equal(t, nil, err)
balance, ok := dataBalance["<KEY>"]
assert.Equal(t, true, ok)
assert.Equal(t, uint64(10), uint64(balance))
p, _, _ := base58.Base58Check{}.Decode("<KEY>")
dataUTXO, err := db.GetCustomTokenPaymentAddressUTXO(tokenID, p)
assert.Equal(t, nil, err)
assert.Equal(t, 1, len(dataUTXO))
}
func TestDb_StorePrivacyCustomTokenCrossShard(t *testing.T) {
tokenID := common.Hash{}
data := []byte{1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8}
err := db.StorePrivacyCustomTokenCrossShard(tokenID, data)
assert.Equal(t, nil, err)
result, err := db.ListPrivacyCustomTokenCrossShard()
assert.Equal(t, nil, err)
assert.Equal(t, 1, len(result))
assert.Equal(t, data, result[0])
has := db.PrivacyCustomTokenIDCrossShardExisted(tokenID)
assert.Equal(t, true, has)
err = db.DeletePrivacyCustomTokenCrossShard(tokenID)
assert.Equal(t, nil, err)
}
func TestDb_StoreIncomingCrossShard(t *testing.T) {
err := db.StoreIncomingCrossShard(0, 1, 1000, common.Hash{})
assert.Equal(t, nil, err)
err = db.HasIncomingCrossShard(0, 1, common.Hash{})
assert.Equal(t, nil, err)
height, err := db.GetIncomingCrossShard(0, 1, common.Hash{})
assert.Equal(t, nil, err)
assert.Equal(t, uint64(1000), uint64(height))
err = db.DeleteIncomingCrossShard(0, 1, common.Hash{})
assert.Equal(t, nil, err)
}
func TestDb_StoreAcceptedShardToBeacon(t *testing.T) {
err := db.StoreAcceptedShardToBeacon(0, 1000, common.Hash{})
assert.Equal(t, nil, err)
err = db.HasAcceptedShardToBeacon(0, common.Hash{})
assert.Equal(t, nil, err)
err = db.HasAcceptedShardToBeacon(1, common.Hash{})
assert.NotEqual(t, nil, err)
height, err := db.GetAcceptedShardToBeacon(0, common.Hash{})
assert.Equal(t, nil, err)
assert.Equal(t, uint64(1000), uint64(height))
height, err = db.GetAcceptedShardToBeacon(1, common.Hash{})
assert.NotEqual(t, nil, err)
err = db.DeleteAcceptedShardToBeacon(0, common.Hash{})
assert.Equal(t, nil, err)
err = db.HasAcceptedShardToBeacon(0, common.Hash{})
assert.NotEqual(t, nil, err)
}
|
# modified version of SSHKey rubygem module
# https://github.com/bensie/sshkey
#
# Copyright (c) 2011 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'openssl'
require 'base64'
require 'digest/md5'
require 'digest/sha1'
module RHC
module Vendor
class SSHKey
SSH_TYPES = {"rsa" => "ssh-rsa", "dsa" => "ssh-dss"}
SSH_CONVERSION = {"rsa" => ["e", "n"], "dsa" => ["p", "q", "g", "pub_key"]}
attr_reader :key_object, :comment, :type
attr_accessor :passphrase
class << self
# Generate a new keypair and return an SSHKey object
#
# The default behavior when providing no options will generate a 2048-bit RSA
# keypair.
#
# ==== Parameters
# * options<~Hash>:
# * :type<~String> - "rsa" or "dsa", "rsa" by default
# * :bits<~Integer> - Bit length
# * :comment<~String> - Comment to use for the public key, defaults to ""
# * :passphrase<~String> - Encrypt the key with this passphrase
#
def generate(options = {})
type = options[:type] || "rsa"
# JRuby modulus size must range from 512 to 1024
default_bits = type == "rsa" ? 2048 : 1024
bits = options[:bits] || default_bits
cipher = OpenSSL::Cipher::Cipher.new("AES-128-CBC") if options[:passphrase]
case type.downcase
when "rsa" then new(OpenSSL::PKey::RSA.generate(bits).to_pem(cipher, options[:passphrase]), options)
when "dsa" then new(OpenSSL::PKey::DSA.generate(bits).to_pem(cipher, options[:passphrase]), options)
else
raise "Unknown key type: #{type}"
end
end
# Validate an existing SSH public key
#
# Returns true or false depending on the validity of the public key provided
#
# ==== Parameters
# * ssh_public_key<~String> - "ssh-rsa AAAAB3NzaC1yc2EA...."
#
def valid_ssh_public_key?(ssh_public_key)
ssh_type, encoded_key = ssh_public_key.split(" ")
type = SSH_TYPES.invert[ssh_type]
prefix = [0,0,0,7].pack("C*")
decoded = Base64.decode64(encoded_key)
# Base64 decoding is too permissive, so we should validate if encoding is correct
return false unless Base64.encode64(decoded).gsub("\n", "") == encoded_key
return false unless decoded.sub!(/^#{prefix}#{ssh_type}/, "")
unpacked = decoded.unpack("C*")
data = []
index = 0
until unpacked[index].nil?
datum_size = from_byte_array unpacked[index..index+4-1], 4
index = index + 4
datum = from_byte_array unpacked[index..index+datum_size-1], datum_size
data << datum
index = index + datum_size
end
SSH_CONVERSION[type].size == data.size
rescue
false
end
# Fingerprints
#
# Accepts either a public or private key
#
# MD5 fingerprint for the given SSH key
def md5_fingerprint(key)
if key.match(/PRIVATE/)
new(key).md5_fingerprint
else
Digest::MD5.hexdigest(decoded_key(key)).gsub(fingerprint_regex, '\1:\2')
end
end
alias_method :fingerprint, :md5_fingerprint
# SHA1 fingerprint for the given SSH key
def sha1_fingerprint(key)
if key.match(/PRIVATE/)
new(key).sha1_fingerprint
else
Digest::SHA1.hexdigest(decoded_key(key)).gsub(fingerprint_regex, '\1:\2')
end
end
private
def from_byte_array(byte_array, expected_size = nil)
num = 0
raise "Byte array too short" if !expected_size.nil? && expected_size != byte_array.size
byte_array.reverse.each_with_index do |item, index|
num += item * 256**(index)
end
num
end
def decoded_key(key)
Base64.decode64(key.chomp.gsub(/ssh-[dr]s[as] /, ''))
end
def fingerprint_regex
/(.{2})(?=.)/
end
end
# Create a new SSHKey object
#
# ==== Parameters
# * private_key - Existing RSA or DSA private key
# * options<~Hash>
# * :comment<~String> - Comment to use for the public key, defaults to ""
# * :passphrase<~String> - If the key is encrypted, supply the passphrase
#
def initialize(private_key, options = {})
@passphrase = options[:passphrase]
@comment = options[:comment] || ""
begin
@key_object = OpenSSL::PKey::RSA.new(private_key, passphrase)
@type = "rsa"
rescue
@key_object = OpenSSL::PKey::DSA.new(private_key, passphrase)
@type = "dsa"
end
end
# Fetch the RSA/DSA private key
#
# rsa_private_key and dsa_private_key are aliased for backward compatibility
def private_key
key_object.to_pem
end
alias_method :rsa_private_key, :private_key
alias_method :dsa_private_key, :private_key
# Fetch the encrypted RSA/DSA private key using the passphrase provided
#
# If no passphrase is set, returns the unencrypted private key
def encrypted_private_key
return private_key unless passphrase
key_object.to_pem(OpenSSL::Cipher::Cipher.new("AES-128-CBC"), passphrase)
end
# Fetch the RSA/DSA public key
#
# rsa_public_key and dsa_public_key are aliased for backward compatibility
def public_key
key_object.public_key.to_pem
end
alias_method :rsa_public_key, :public_key
alias_method :dsa_public_key, :public_key
# SSH public key
def ssh_public_key
[SSH_TYPES[type], Base64.encode64(ssh_public_key_conversion).gsub("\n", ""), comment].join(" ").strip
end
# Fingerprints
#
# MD5 fingerprint for the given SSH public key
def md5_fingerprint
Digest::MD5.hexdigest(ssh_public_key_conversion).gsub(/(.{2})(?=.)/, '\1:\2')
end
alias_method :fingerprint, :md5_fingerprint
# SHA1 fingerprint for the given SSH public key
def sha1_fingerprint
Digest::SHA1.hexdigest(ssh_public_key_conversion).gsub(/(.{2})(?=.)/, '\1:\2')
end
private
# SSH Public Key Conversion
#
# All data type encoding is defined in the section #5 of RFC #4251.
# String and mpint (multiple precision integer) types are encoded this way:
# 4-bytes word: data length (unsigned big-endian 32 bits integer)
# n bytes: binary representation of the data
# For instance, the "ssh-rsa" string is encoded as the following byte array
# [0, 0, 0, 7, 's', 's', 'h', '-', 'r', 's', 'a']
def ssh_public_key_conversion
out = [0,0,0,7].pack("C*")
out += SSH_TYPES[type]
SSH_CONVERSION[type].each do |method|
byte_array = to_byte_array(key_object.public_key.send(method).to_i)
out += encode_unsigned_int_32(byte_array.length).pack("c*")
out += byte_array.pack("C*")
end
return out
end
def encode_unsigned_int_32(value)
out = []
out[0] = value >> 24 & 0xff
out[1] = value >> 16 & 0xff
out[2] = value >> 8 & 0xff
out[3] = value & 0xff
return out
end
def to_byte_array(num)
result = []
begin
result << (num & 0xff)
num >>= 8
end until (num == 0 || num == -1) && (result.last[7] == num[7])
result.reverse
end
end
end
end
|
<reponame>ooooo-youwillsee/leetcode<gh_stars>10-100
//
// Created by ooooo on 2020/3/17.
//
#ifndef CPP_026__SOLUTION1_H_
#define CPP_026__SOLUTION1_H_
#include "TreeNode.h"
class Solution {
public:
bool isSameTree(TreeNode *node1, TreeNode *node2) {
if (!node2) return true;
if (!node1) return false;
return node1->val == node2->val && isSameTree(node1->left, node2->left) && isSameTree(node1->right, node2->right);
}
bool isSubStructure(TreeNode *A, TreeNode *B) {
if (!A || !B) return false;
if (A->val != B->val) {
return isSubStructure(A->left, B) || isSubStructure(A->right, B);
} else {
return isSameTree(A->left, B->left) && isSameTree(A->right, B->right);
}
}
};
#endif //CPP_026__SOLUTION1_H_
|
<filename>app/overrides/add_original_price_to_products_list.rb
Deface::Override.new(
:virtual_path => 'spree/shared/_products',
:name => 'add_original_price_to_product_list',
:insert_after => "span.price.selling",
:text => '
<span class="old price">
<%= display_original_price(product) if product.on_sale?%>
</span>
')
|
#!/bin/bash
set -e
opt=${1}
env=${2}
aws_login() {
aws configure set default.region us-east-1
eval $(aws ecr get-login --no-include-email)
}
setup() {
export LC_ALL="en_US.UTF-8"
export LC_CTYPE="en_US.UTF-8"
sudo add-apt-repository ppa:deadsnakes/ppa
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
sudo apt-get update
apt-cache policy docker-ce
sudo apt-get install -y docker-ce
sudo apt-get install python3.6
sudo apt-get install python3-pip
pip3 install awscli
pip3 install docker-compose
}
if ! python3 -c "import awscli" &> /dev/null; then
echo "Installing packages and dependencies..."
setup;
fi
if [ -z ${AWS_ACCOUNT_ID} ]; then
echo "AWS_ACCOUNT_ID not set."
exit 0
fi
if [ -z ${COMMIT_ID} ]; then
export COMMIT_ID="latest"
fi
case $opt in
pull)
aws_login;
echo "Pulling environment variables file..."
aws s3 cp s3://cloudcv-secrets/evalai/${env}/docker_${env}.env ./docker/prod/docker_${env}.env
echo "Environment varibles file successfully downloaded."
echo "Pulling docker images from ECR..."
docker-compose -f docker-compose-${env}.yml pull
echo "Completed Pull operation."
;;
deploy-django)
echo "Deploying django docker container..."
docker-compose -f docker-compose-${env}.yml up -d django
echo "Completed deploy operation."
;;
deploy-nodejs)
echo "Deploying nodejs docker container..."
docker-compose -f docker-compose-${env}.yml up -d nodejs
echo "Completed deploy operation."
;;
deploy-worker)
token=${3}
challenge=${4}
if [ -z "$4" ]; then
echo "Please input Challenge ID"
exit 0
fi
echo "Pulling queue name for $env server challenge..."
if [ ${env} == "staging" ]; then
queue_name=$(curl -k -L -X GET -H "Authorization: Token $token" https://staging-evalai.cloudcv.org/api/challenges/get_broker_url/$challenge/)
elif [ ${env} == "production" ]; then
queue_name=$(curl -k -L -X GET -H "Authorization: Token $token" https://evalapi.cloudcv.org/api/challenges/get_broker_url/$challenge/)
fi
echo "Completed pulling Queue name"
# preprocess the python list to bash array
queue_name=($(echo ${queue_name//,/ } | tr -d '[]'))
queue=$(echo $queue_name | tr -d '"')
echo "Deploying worker for queue: " $queue
docker-compose -f docker-compose-${env}.yml run -e CHALLENGE_QUEUE=$queue -e CHALLENGE_PK=$challenge -d worker
echo "Deployed worker docker container for queue: " $queue
;;
deploy-workers)
token=${3}
echo "Pulling queue names for $env server challenges..."
if [ ${env} == "staging" ]; then
queue_names=$(curl -k -L -X GET -H "Authorization: Token $token" https://staging-evalai.cloudcv.org/api/challenges/get_broker_urls/)
elif [ ${env} == "production" ]; then
queue_names=$(curl -k -L -X GET -H "Authorization: Token $token" https://evalapi.cloudcv.org/api/challenges/get_broker_urls/)
fi
echo "Completed pulling Queue list"
# preprocess the python list to bash array
queue_names=($(echo ${queue_names//,/ } | tr -d '[]'))
for queue_name in "${queue_names[@]}"
do
queue=$(echo $queue_name | tr -d '"')
echo "Deploying worker for queue: " $queue
docker-compose -f docker-compose-${env}.yml run -e CHALLENGE_QUEUE=$queue -d worker
echo "Deployed worker docker container for queue: " $queue
done
;;
scale)
service=${3}
instances=${4}
echo "Scaling the containers..."
docker-compose -f docker-compose-${env}.yml scale ${service}=${instances}
;;
clean)
{
docker-compose -f docker-compose-${env}.yml rm -s -v -f
} || {
echo "Delete operation skipped since no container or image found!"
}
docker rmi $(docker images -a -q)
echo "Sucessfully cleaned all the images."
;;
*)
echo "EvalAI deployment utility script"
echo " Usage: $0 {pull|deploy|scale|clean}"
echo
echo " pull : Pull docker images from ECR."
echo " Eg. ./scripts/deployment/deploy.sh pull production"
echo " deploy-django : Deploy django containers in the respective environment."
echo " Eg. ./scripts/deployment/deploy.sh deploy-django production"
echo " deploy-nodejs : Deploy nodejs containers in the respective environment."
echo " Eg. ./scripts/deployment/deploy.sh deploy-nodejs production"
echo " deploy-worker : Deploy worker container for a challenge using challenge pk."
echo " Eg. ./scripts/deployment/deploy.sh deploy production <superuser_auth_token> <challenge_pk>"
echo " deploy-workers : Deploy worker containers in the respective environment."
echo " Eg. ./scripts/deployment/deploy.sh deploy production <superuser_auth_token>"
echo " scale : Scale particular docker service in an environment."
echo " Eg. ./scripts/deployment/deploy.sh scale production django 5"
echo " clean : Remove all docker containers and images."
echo " Eg. ./scripts/deployment/deploy.sh clean production"
esac
|
#!/bin/bash
blue(){
echo -e "\033[34m\033[01m$1\033[0m"
}
green(){
echo -e "\033[32m\033[01m$1\033[0m"
}
red(){
echo -e "\033[31m\033[01m$1\033[0m"
}
yellow(){
echo -e "\033[33m\033[01m$1\033[0m"
}
bred(){
echo -e "\033[31m\033[01m\033[05m$1\033[0m"
}
byellow(){
echo -e "\033[33m\033[01m\033[05m$1\033[0m"
}
if [ ! -e '/etc/redhat-release' ]; then
red "==============="
red " 仅支持CentOS7"
red "==============="
exit
fi
if [ -n "$(grep ' 6\.' /etc/redhat-release)" ] ;then
red "==============="
red " 仅支持CentOS7"
red "==============="
exit
fi
function install_trojan(){
systemctl stop firewalld
systemctl disable firewalld
CHECK=$(grep SELINUX= /etc/selinux/config | grep -v "#")
if [ "$CHECK" == "SELINUX=enforcing" ]; then
sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config
setenforce 0
fi
if [ "$CHECK" == "SELINUX=permissive" ]; then
sed -i 's/SELINUX=permissive/SELINUX=disabled/g' /etc/selinux/config
setenforce 0
fi
yum -y install bind-utils wget unzip zip curl tar
green "======================="
yellow "请输入绑定到本VPS的域名"
green "======================="
read your_domain
real_addr=`ping ${your_domain} -c 1 | sed '1{s/[^(]*(//;s/).*//;q}'`
local_addr=`curl ipv4.icanhazip.com`
if [ $real_addr == $local_addr ] ; then
green "=========================================="
green "域名解析正常,开启安装nginx并申请https证书"
green "=========================================="
sleep 1s
rpm -Uvh http://nginx.org/packages/centos/7/noarch/RPMS/nginx-release-centos-7-0.el7.ngx.noarch.rpm
yum install -y nginx
systemctl enable nginx.service
#设置伪装站
rm -rf /usr/share/nginx/html/*
cd /usr/share/nginx/html/
wget https://tj.fdsa.cf/web.zip
unzip web.zip
systemctl restart nginx.service
#申请https证书
mkdir /usr/src/trojan-cert
curl https://get.acme.sh | sh
~/.acme.sh/acme.sh --issue -d $your_domain --webroot /usr/share/nginx/html/
~/.acme.sh/acme.sh --installcert -d $your_domain \
--key-file /usr/src/trojan-cert/private.key \
--fullchain-file /usr/src/trojan-cert/fullchain.cer \
--reloadcmd "systemctl force-reload nginx.service"
if test -s /usr/src/trojan-cert/fullchain.cer; then
cd /usr/src
#wget https://github.com/trojan-gfw/trojan/releases/download/v1.13.0/trojan-1.13.0-linux-amd64.tar.xz
wget https://github.com/trojan-gfw/trojan/releases/download/v1.14.0/trojan-1.14.0-linux-amd64.tar.xz
tar xf trojan-1.*
#下载trojan客户端
#wget https://github.com/atrandys/trojan/raw/master/trojan-cli.zip
wget https://github.com/ivannovs/trojan-1/raw/master/trojan-cli.zip
unzip trojan-cli.zip
cp /usr/src/trojan-cert/fullchain.cer /usr/src/trojan-cli/fullchain.cer
trojan_passwd=$(cat /dev/urandom | head -1 | md5sum | head -c 8)
cat > /usr/src/trojan-cli/config.json <<-EOF
{
"run_type": "client",
"local_addr": "127.0.0.1",
"local_port": 1080,
"remote_addr": "$your_domain",
"remote_port": 443,
"password": [
"$trojan_passwd"
],
"log_level": 1,
"ssl": {
"verify": true,
"verify_hostname": true,
"cert": "fullchain.cer",
"cipher_tls13":"TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_256_GCM_SHA384",
"sni": "",
"alpn": [
"h2",
"http/1.1"
],
"reuse_session": true,
"session_ticket": false,
"curves": ""
},
"tcp": {
"no_delay": true,
"keep_alive": true,
"fast_open": false,
"fast_open_qlen": 20
}
}
EOF
rm -rf /usr/src/trojan/server.conf
cat > /usr/src/trojan/server.conf <<-EOF
{
"run_type": "server",
"local_addr": "0.0.0.0",
"local_port": 443,
"remote_addr": "127.0.0.1",
"remote_port": 80,
"password": [
"$trojan_passwd"
],
"log_level": 1,
"ssl": {
"cert": "/usr/src/trojan-cert/fullchain.cer",
"key": "/usr/src/trojan-cert/private.key",
"key_password": "",
"cipher_tls13":"TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_256_GCM_SHA384",
"prefer_server_cipher": true,
"alpn": [
"http/1.1"
],
"reuse_session": true,
"session_ticket": false,
"session_timeout": 600,
"plain_http_response": "",
"curves": "",
"dhparam": ""
},
"tcp": {
"no_delay": true,
"keep_alive": true,
"fast_open": false,
"fast_open_qlen": 20
},
"mysql": {
"enabled": false,
"server_addr": "127.0.0.1",
"server_port": 3306,
"database": "trojan",
"username": "trojan",
"password": ""
}
}
EOF
cd /usr/src/trojan-cli/
zip -q -r trojan-cli.zip /usr/src/trojan-cli/
trojan_path=$(cat /dev/urandom | head -1 | md5sum | head -c 16)
mkdir /usr/share/nginx/html/${trojan_path}
mv /usr/src/trojan-cli/trojan-cli.zip /usr/share/nginx/html/${trojan_path}/
#增加启动脚本
cat > /usr/lib/systemd/system/trojan.service <<-EOF
[Unit]
Description=trojan
After=network.target
[Service]
Type=simple
PIDFile=/usr/src/trojan/trojan/trojan.pid
ExecStart=/usr/src/trojan/trojan -c "/usr/src/trojan/server.conf"
ExecReload=
ExecStop=/usr/src/trojan/trojan
PrivateTmp=true
[Install]
WantedBy=multi-user.target
EOF
chmod +x /usr/lib/systemd/system/trojan.service
systemctl start trojan.service
systemctl enable trojan.service
green "======================================================================"
green "Trojan已安装完成,请使用以下链接下载trojan客户端,此客户端已配置好所有参数"
green "1、复制下面的链接,在浏览器打开,下载客户端"
blue "http://${your_domain}/$trojan_path/trojan-cli.zip"
green "2、将下载的压缩包解压,打开文件夹,打开start.bat即打开并运行Trojan客户端"
green "3、打开stop.bat即关闭Trojan客户端"
green "4、Trojan客户端需要搭配浏览器插件使用,例如switchyomega等"
green "======================================================================"
else
red "================================"
red "https证书没有申请成果,本次安装失败"
red "================================"
fi
else
red "================================"
red "域名解析地址与本VPS IP地址不一致"
red "本次安装失败,请确保域名解析正常"
red "================================"
fi
}
function remove_trojan(){
red "================================"
red "即将卸载trojan"
red "同时卸载安装的nginx"
red "================================"
systemctl stop trojan
systemctl disable trojan
rm -f /usr/lib/systemd/system/trojan.service
yum remove -y nginx
rm -rf /usr/src/trojan*
rm -rf /usr/share/nginx/html/*
green "=============="
green "trojan删除完毕"
green "=============="
}
start_menu(){
clear
green " ===================================="
green " 介绍:一键安装trojan "
green " 系统:>=centos7 "
green " 作者:atrandys "
green " 网站:www.atrandys.com "
green " Youtube:atrandys "
green " ===================================="
echo
green " 1. 安装trojan"
red " 2. 卸载trojan"
yellow " 0. 退出脚本"
echo
read -p "请输入数字:" num
case "$num" in
1)
install_trojan
;;
2)
remove_trojan
;;
0)
exit 1
;;
*)
clear
red "请输入正确数字"
sleep 1s
start_menu
;;
esac
}
start_menu
|
<filename>bundestag.io/admin/components/App.tsx
const App = ({ children }) => (
<main>
{children}
<style jsx global>{`
* {
font-family: Menlo, Monaco, "Lucida Console", "Liberation Mono",
"DejaVu Sans Mono", "Bitstream Vera Sans Mono", "Courier New",
monospace, serif;
}
body {
margin: 0;
padding: 25px 50px;
}
a {
color: #22bad9;
}
p {
font-size: 14px;
line-height: 24px;
}
article {
margin: 0 auto;
max-width: 650px;
}
button {
align-items: center;
background-color: #22bad9;
border: 0;
color: white;
display: flex;
padding: 5px 7px;
}
button:active {
background-color: #1b9db7;
transition: background-color 0.3s;
}
button:focus {
outline: none;
}
`}</style>
</main>
);
export default App;
|
<reponame>Nedelosk/OreRegistry<filename>src/main/java/oreregistry/api/registry/IProduct.java
/*
* Copyright (c) 2017 Nedelosk, Mezz
*
* This work (the MOD) is licensed under the "MIT" License, see LICENSE for details.
*/
package oreregistry.api.registry;
import java.util.List;
import net.minecraft.item.ItemStack;
/**
* A product has a type (see {@link ProductTypes}.
* This is used to unify resources from many mods so that they all use the same items.
* <p>
* All mods have to register there variant of this product in the pre init phase of fml with {@link IResource#registerProduct(String, ItemStack)}.
* Later in or after the init phase of fml they can get the by OR chosen variant with {@link #getChosenProduct()}.
* <p>
* Get an instance from {@link IResource#getProduct(String)} or {@link IResource#registerProduct(String, ItemStack)}.
*/
public interface IProduct {
/**
* The type of the product. For examples see {@link ProductTypes}.
*/
String getType();
/**
* @return A list with all registered variants of this product.
*/
List<ItemStack> getVariants();
/**
* @return A copy of the chosen product.
*/
ItemStack getChosenProduct();
/**
* @return The resource of this product.
*/
IResource getResource();
}
|
#!/bin/bash
filename="webops-perf"
url="http://www.oreilly.com/webops-perf/free/"
lynx --dump $url | awk '/http/{$1=$200""; print}' | grep -E -i -w 'csp' > $filename
#cp $filename.txt{,.pdf,.mobi,.epub}
replace "free/" "free/files/" -- $filename
replace "?intcmp=il-data-free-lp-lgen_free_reports_page" "" -- $filename
sed 's/csp*/pdf/g' $filename > $filename.pdf.txt
sed 's/csp*/mobi/g' $filename > $filename.mobi.txt
sed 's/csp*/epub/g' $filename > $filename.epub.txt
#this part to download all the files listed in filename.*
for i in $filename.*
do
wget -c -i "$i" # use "" to avoid the url containing spaces
done
|
<reponame>acidbubbles/MeshSync
#include "pch.h"
#include "msMisc.h"
namespace ms {
bool StartsWith(const std::string& a, const char *b)
{
if (!b)
return false;
size_t n = std::strlen(b);
return std::strncmp(a.c_str(), b, n) == 0;
}
bool StartsWith(const std::string& a, const std::string& b) {
return (a.rfind(b, 0) == 0);
}
//----------------------------------------------------------------------------------------------------------------------
#ifndef msRuntime
bool FileToByteArray(const char *path, RawVector<char> &dst)
{
if (!path)
return false;
// note: FILE or std::fstream may fail to open files if path contains multi-byte characters
Poco::FileStream f(path, std::ios::in);
if (!f)
return false;
auto size = Poco::File(path).getSize();
dst.resize_discard((size_t)size);
f.read(dst.data(), (size_t)size);
return true;
}
bool FileToByteArray(const char *path, SharedVector<char>& out)
{
return FileToByteArray(path, out.as_raw());
}
bool ByteArrayToFile(const char *path, const RawVector<char> &data)
{
return ByteArrayToFile(path, data.cdata(), data.size());
}
bool ByteArrayToFile(const char *path, const SharedVector<char>& data)
{
return ByteArrayToFile(path, data.as_craw());
}
bool ByteArrayToFile(const char *path, const char *data, size_t size)
{
if (!path)
return false;
Poco::FileStream f(path, std::ios::out);
if (!f)
return false;
f.write(data, size);
return true;
}
bool FileExists(const char *path)
{
if (!path || *path == '\0')
return false;
try {
// this is fater than using fopen()
return Poco::File(path).exists();
}
catch (...) {
return false;
}
}
uint64_t FileMTime(const char *path)
{
if (!FileExists(path))
return 0;
try {
Poco::File f(path);
return f.getLastModified().raw();
}
catch (...) {
return 0;
}
}
void FindFilesSortedByLastModified(const std::string& path, std::multimap<uint64_t, std::string>& ret) {
using namespace std;
Poco::DirectoryIterator dir_itr(path);
Poco::DirectoryIterator end;
ret.clear();
while(dir_itr!= end) {
if (dir_itr->isDirectory()) {
continue;
}
ret.insert(multimap<uint64_t, string>::value_type(dir_itr->getLastModified().raw(), dir_itr->path()));
++dir_itr;
}
}
#endif // msRuntime
} // namespace ms
|
<reponame>wing-puah/thegeekwing-jekyll
function loading(){
let loader = document.getElementById('loader'),
footer = document.getElementById('site-footer');
if( !loader ){
return;
} else {
document.getElementById('loader-content').style.display = 'none';
if( footer ){
footer.style.display = 'none';
}
setTimeout(showPage, 1000);
console.log ('loader is present');
}
};
function showPage(){
let footer = document.getElementById('site-footer');
document.getElementById('loader').style.display = 'none';
document.getElementById('loader-content').style.display = 'block';
if( footer ){
footer.style.display = 'block';
}
}
document.addEventListener("DOMContentLoaded", loading);
|
<reponame>CS-3398-264/DeadpoolRepo<gh_stars>0
const { riderModel, driverModel, tripModel } = require('../models');
const { getRating, calculateRate, computeMileage, distanceMatrixRequest, newDirectionRequest, buildSteps, simulateTrip } = require('../utils/tools');
const auth = require('basic-auth');
exports = module.exports = {};
// NOTE: these methods currently return JSON (for debugging),
// but, for idempotency, may get switched to 200 OK later on
exports.riderID = async (req, res, next, riderID) => {
try {
const riderDoc = await riderModel.findById(riderID);
req.rider = riderDoc;
if (!riderDoc) throw 'Error: rider not found.'
} catch (e) {
req.rider = null;
console.error(e.message || e);
}
return next();
}
exports.getRiderByID = (req, res) => {
if (req.rider)
res.send(req.rider);
else
res.sendStatus(404); // Should only occur if ID sent is not in DB.
}
exports.getAllRiders = async (req, res) => {
try {
const riderDocs = await riderModel.find();
res.send(riderDocs);
} catch (e) {
res.sendStatus(500); // Should only error if there is a DB issue.
}
}
exports.getPotentialDrivers = async (req, res) => {
try {
let driverDocs = await driverModel.find({ available : true });
const maxDistance = req.query.maxDistance || 15;
if (!req.rider.location.latitude || !req.rider.location.longitude)
throw 'Error: Rider does not have location set.';
if (req.query.minCapacity) {
driverDocs = driverDocs.filter(driver => driver.capacity >= req.query.minCapacity);
}
const distanceData = await distanceMatrixRequest(
driverDocs.filter(driver => driver.location.latitude && driver.location.longitude).map(driver => driver.location),
req.rider.location
);
const newDriverDocs = driverDocs.filter(driver => driver.location.latitude && driver.location.longitude)
.map((driver, index) => {
return {
...JSON.parse(JSON.stringify(driver)),
distance : String(distanceData.rows[index].elements[0].distance.text),
timeToPickup : String(distanceData.rows[index].elements[0].duration.text)
};
});
driverDocs = newDriverDocs.filter(driver => parseFloat(computeMileage(driver.distance)) <= maxDistance);
res.send(driverDocs);
} catch (e) {
if(e.message === 'Error: Rider does not have location set.') {
console.error(e.message || e);
res.sendStatus(422); // Pre-condition not met, rider does not have location set.
} else {
console.error(e.message || e);
res.sendStatus(500); // Other error, server related.
}
}
}
exports.getTripEstimate = async (req, res) => {
try {
if (!req.query.lat || !req.query.lon)
throw 'Error: Destination not set.';
else if (!req.rider.location.latitude || !req.rider.location.longitude)
throw 'Error: Rider does not have location set.';
const dest = {
latitude: req.query.lat,
longitude: req.query.lon
};
const tripData = await distanceMatrixRequest(req.rider.location, dest);
const currentRate = calculateRate(new Date(Date.now()).getHours());
const mileage = computeMileage(tripData.rows[0].elements[0].distance.text);
const tripCost = `$${(currentRate * mileage).toFixed(2)}`;
const tripEstimate = {
pickup: tripData.origin_addresses[0],
dropoff: tripData.destination_addresses[0],
distance: tripData.rows[0].elements[0].distance.text,
travelTime: tripData.rows[0].elements[0].duration.text,
cost: tripCost
};
res.send(tripEstimate);
} catch (e) {
if(e.message === 'Error: Rider does not have location set.') {
console.error(e.message || e);
res.sendStatus(422); // Bad request, rider does not have location set.
} else if(e.message === 'Error: Rider does not have location set.') {
console.error(e.message || e);
res.sendStatus(422); // Bad request, rider does not have location set.
} else {
console.error(e.message || e);
res.sendStatus(500); // Other error, server related.
}
}
}
exports.getRiderRating = (req, res) => {
if (req.rider)
res.json(getRating(req.rider));
else
res.sendStatus(404);
console.log(req.rider.reviews);
}
exports.setRiderLocation = async (req, res) => {
try {
if (!req.body.latitude || !req.body.longitude)
throw 'Error: Incomplete location.';
const updatedRider = await riderModel.findByIdAndUpdate(
req.rider._id,
{ $set: { location: {
latitude: req.body.latitude,
longitude: req.body.longitude
} } }, { new: true }
);
res.send(updatedRider);
} catch (e) {
console.error(e.message || e);
res.sendStatus(400); // Bad request, invalid information.
}
}
exports.rateDriver = async (req, res) => {
try {
if (!req.rider)
throw 'Error: Invalid riderID.';
const validTrip = await tripModel.findById(req.body.tripID);
if (!validTrip)
throw 'Error: Invalid trip.';
else if (!validTrip.isComplete)
throw 'Error: Trip incomplete.';
const existingRating = await driverModel.findOne({"reviews.tripID" : req.body.tripID});
if (existingRating)
throw 'Error: Rating already submitted for this trip.';
const updatedDriver = await driverModel.findByIdAndUpdate(
req.body.driverID,
{ $push: {
reviews: {
tripID: req.body.tripID,
score: parseFloat(req.body.score).toFixed(2)
}
}
},
{ new: true }
);
res.send(updatedDriver);
} catch (e) {
if(e.message === 'Error: Invalid riderID.') {
console.error(e.message || e);
res.sendStatus(422); // Bad request, no ID provided.
} else {
console.error(e.message || e);
res.sendStatus(500); // Other server issue.
}
}
}
exports.requestPickup = async (req, res) => {
try {
const requestedDriver = await driverModel.findById(req.body.driverID);
if (!req.rider.location.latitude || !req.rider.location.longitude)
throw 'Error: Current location not set.';
else if (!req.body.dropoff.latitude || !req.body.dropoff.longitude)
throw 'Error: Destination coordinates incomplete.'
else if (!requestedDriver.available)
throw 'Error: Selected driver is unavailable.';
const dirData = await newDirectionRequest(
requestedDriver.location, req.rider.location, req.body.dropoff
);
const currentRate = calculateRate(new Date(Date.now()).getHours());
const mileage = computeMileage(dirData.routes[0].legs[1].distance.text);
const tripRequest = new tripModel({
riderID: req.rider._id,
driverID: requestedDriver._id,
isComplete: false,
rate: currentRate,
cost: `$${(currentRate * mileage).toFixed(2)}`,
driverLoc: {
address: dirData.routes[0].legs[0].start_address,
latitude: requestedDriver.location.latitude,
longitude: requestedDriver.location.longitude
},
pickup: {
address: dirData.routes[0].legs[0].end_address,
latitude: req.rider.location.latitude,
longitude: req.rider.location.longitude
},
dropoff: {
address: dirData.routes[0].legs[1].end_address,
latitude: req.body.dropoff.latitude,
longitude: req.body.dropoff.longitude
},
distance: dirData.routes[0].legs[1].distance.text,
travelTime: dirData.routes[0].legs[1].duration.text,
timeToPickup: dirData.routes[0].legs[0].duration.text,
directions: {
toPickup: buildSteps(dirData.routes[0].legs[0].steps),
toDropoff: buildSteps(dirData.routes[0].legs[1].steps)
}
});
const tripDoc = await tripRequest.save();
const updatedDriver = await driverModel.findByIdAndUpdate(
req.body.driverID,
{ $set: {
available: false,
currentTrip: tripDoc._id
} },
{ new: true }
);
simulateTrip(tripDoc);
res.send(tripDoc);
} catch (e) {
if(e.message === 'Error: Current location not set.') {
console.error(e.message || e);
res.sendStatus(422); // Bad request, current location not provided.
} else if(e.message === 'Error: Destination coordinates incomplete..') {
console.error(e.message || e);
res.sendStatus(422); // Bad request, destination not provided.
} else if(e.message === 'Error: Selected driver is unavailable.') {
console.error(e.message || e);
res.sendStatus(404); // Driver requested 'not found'.
} else {
console.error(e.message || e);
res.sendStatus(500); // Other server issue.
}
}
}
exports.addRider = async (req, res) => {
try {
if (!req.body.name)
throw 'Error: Name not set.';
const newRider = new riderModel({
name: req.body.name,
location: {
latitude: null,
longitude: null
},
reviews: []
});
const newDoc = await newRider.save();
console.log('saved new Rider "%s" to db. id: %s', newDoc.name, newDoc._id);
res.send(newDoc);
} catch (e) {
if(e.message === 'Error: Name not set.') {
console.error(e.message || e);
res.sendStatus(422); // Bad request, name not provided.
} else {
console.error(e.message || e);
res.sendStatus(500); // Other server error.
}
}
}
/* ADMIN AUTH REQUIRED */
exports.removeRider = async (req, res) => {
const credentials = auth(req);
try {
if (!credentials)
throw 'Error: Missing Credentials.';
else if (credentials.name !== 'admin' || credentials.pass !== 'password')
throw 'Error: Invalid Credentials.';
const rider = await riderModel.remove({ _id : req.rider._id });
res.sendStatus(200);
} catch (e) {
res.sendStatus(404);
}
}
|
import {
put, takeEvery, call,
fork
} from 'redux-saga/effects';
import { push } from 'connected-react-router';
import { GET_EVENTS_ARTIST } from './types';
import { errorMessage } from '../error/actions';
import { getEventsToServer, getPositionUser } from './utilities/events';
import { successGetEventsArtist, successGetPositionUser } from './actions';
export function* getGeolocation() {
try {
const geolocation = yield call(getPositionUser);
yield put(successGetPositionUser(geolocation));
} catch (e) {
yield put(errorMessage(e.message));
}
}
export function* getEvent({ payload }) {
const {
id, startDate, endDate,
name
} = payload;
try {
const events = yield call(getEventsToServer, id, startDate, endDate);
yield put(successGetEventsArtist(events));
yield put(push(`/event/${name}`));
} catch (e) {
yield put(errorMessage(e.message));
}
}
export function* sagasEvents() {
yield fork(getGeolocation);
yield takeEvery(GET_EVENTS_ARTIST, getEvent);
}
|
import secrets
import jwt
class AuthenticationHandler:
@staticmethod
def generate_auth_token(roles, secret):
token = jwt.encode(roles, secret, algorithm='HS256')
return token
@staticmethod
def validate_and_decode_token(token, key, audience):
try:
decoded_token = jwt.decode(token, key, algorithms=['HS256'], audience=audience)
return decoded_token
except jwt.ExpiredSignatureError:
raise Exception("Token has expired")
except jwt.InvalidTokenError:
raise Exception("Invalid token")
|
import React, { useEffect } from 'react'
import { useNavigate, useParams } from 'react-router-dom'
import { ModeForm } from 'src/constants/object'
import { UPDATE_SUCCESS } from 'src/constants/string'
import { useNotification } from 'src/hook/useNotification'
import { useLoadUsersQuery, useUpdateUsersMutation } from 'src/reducers/user/api'
import Form from './Form'
const EditUser = () => {
const { id } = useParams()
const [updateUsers, { isLoading, error, isSuccess }] = useUpdateUsersMutation()
const { data } = useLoadUsersQuery(id)
const navigate = useNavigate()
const handleSubmit = (value) => {
updateUsers({ id, ...value })
}
useEffect(() => {
if (isSuccess) {
navigate('/dashboard/users')
}
}, [isSuccess])
useNotification(error, isSuccess, UPDATE_SUCCESS)
return (
<React.Fragment>
{data && (
<Form
title={'Cập nhật người dùng'}
submit={handleSubmit}
data={data?.user}
mode={ModeForm.Edit}
isLoading={isLoading}
/>
)}
</React.Fragment>
)
}
export default EditUser
|
package wordcram;
/**
* A WordColorer tells WordCram what color to render a word in.
* <p>
* <b>Note:</b> if you implement your own WordColorer, you should be familiar
* with how <a href="http://processing.org/reference/color_datatype.html"
* target="blank">Processing represents colors</a> -- or just make sure it uses
* Processing's <a href="http://processing.org/reference/color_.html"
* target="blank">color</a> method.
* <p>
* Some useful implementations are available in {@link Colorers}.
*
* @author <NAME>
*/
public interface WordColorer {
/**
* What color should this {@link Word} be?
*
* @param word the word to pick the color for
* @return the color for the word
*/
public int colorFor(Word word);
}
|
#!/bin/sh
test_description='git blame corner cases'
. ./test-lib.sh
pick_fc='s/^[0-9a-f^]* *\([^ ]*\) *(\([^ ]*\) .*/\1-\2/'
test_expect_success setup '
echo A A A A A >one &&
echo B B B B B >two &&
echo C C C C C >tres &&
echo ABC >mouse &&
for i in 1 2 3 4 5 6 7 8 9
do
echo $i
done >nine_lines &&
for i in 1 2 3 4 5 6 7 8 9 a
do
echo $i
done >ten_lines &&
git add one two tres mouse nine_lines ten_lines &&
test_tick &&
GIT_AUTHOR_NAME=Initial git commit -m Initial &&
cat one >uno &&
mv two dos &&
cat one >>tres &&
echo DEF >>mouse &&
git add uno dos tres mouse &&
test_tick &&
GIT_AUTHOR_NAME=Second git commit -a -m Second &&
echo GHIJK >>mouse &&
git add mouse &&
test_tick &&
GIT_AUTHOR_NAME=Third git commit -m Third &&
cat mouse >cow &&
git add cow &&
test_tick &&
GIT_AUTHOR_NAME=Fourth git commit -m Fourth &&
cat >cow <<-\EOF &&
ABC
DEF
XXXX
GHIJK
EOF
git add cow &&
test_tick &&
GIT_AUTHOR_NAME=Fifth git commit -m Fifth
'
test_expect_success 'straight copy without -C' '
git blame uno | grep Second
'
test_expect_success 'straight move without -C' '
git blame dos | grep Initial
'
test_expect_success 'straight copy with -C' '
git blame -C1 uno | grep Second
'
test_expect_success 'straight move with -C' '
git blame -C1 dos | grep Initial
'
test_expect_success 'straight copy with -C -C' '
git blame -C -C1 uno | grep Initial
'
test_expect_success 'straight move with -C -C' '
git blame -C -C1 dos | grep Initial
'
test_expect_success 'append without -C' '
git blame -L2 tres | grep Second
'
test_expect_success 'append with -C' '
git blame -L2 -C1 tres | grep Second
'
test_expect_success 'append with -C -C' '
git blame -L2 -C -C1 tres | grep Second
'
test_expect_success 'append with -C -C -C' '
git blame -L2 -C -C -C1 tres | grep Initial
'
test_expect_success 'blame wholesale copy' '
git blame -f -C -C1 HEAD^ -- cow | sed -e "$pick_fc" >current &&
cat >expected <<-\EOF &&
mouse-Initial
mouse-Second
mouse-Third
EOF
test_cmp expected current
'
test_expect_success 'blame wholesale copy and more' '
git blame -f -C -C1 HEAD -- cow | sed -e "$pick_fc" >current &&
cat >expected <<-\EOF &&
mouse-Initial
mouse-Second
cow-Fifth
mouse-Third
EOF
test_cmp expected current
'
test_expect_success 'blame wholesale copy and more in the index' '
cat >horse <<-\EOF &&
ABC
DEF
XXXX
YYYY
GHIJK
EOF
git add horse &&
test_when_finished "git rm -f horse" &&
git blame -f -C -C1 -- horse | sed -e "$pick_fc" >current &&
cat >expected <<-\EOF &&
mouse-Initial
mouse-Second
cow-Fifth
horse-Not
mouse-Third
EOF
test_cmp expected current
'
test_expect_success 'blame during cherry-pick with file rename conflict' '
test_when_finished "git reset --hard && git checkout master" &&
git checkout HEAD~3 &&
echo MOUSE >> mouse &&
git mv mouse rodent &&
git add rodent &&
GIT_AUTHOR_NAME=Rodent git commit -m "rodent" &&
git checkout --detach master &&
(git cherry-pick HEAD@{1} || test $? -eq 1) &&
git show HEAD@{1}:rodent > rodent &&
git add rodent &&
git blame -f -C -C1 rodent | sed -e "$pick_fc" >current &&
cat current &&
cat >expected <<-\EOF &&
mouse-Initial
mouse-Second
rodent-Not
EOF
test_cmp expected current
'
test_expect_success 'blame path that used to be a directory' '
mkdir path &&
echo A A A A A >path/file &&
echo B B B B B >path/elif &&
git add path &&
test_tick &&
git commit -m "path was a directory" &&
rm -fr path &&
echo A A A A A >path &&
git add path &&
test_tick &&
git commit -m "path is a regular file" &&
git blame HEAD^.. -- path
'
test_expect_success 'blame to a commit with no author name' '
TREE=$(git rev-parse HEAD:) &&
cat >badcommit <<EOF &&
tree $TREE
author <noname> 1234567890 +0000
committer David Reiss <dreiss@facebook.com> 1234567890 +0000
some message
EOF
COMMIT=$(git hash-object -t commit -w badcommit) &&
git --no-pager blame $COMMIT -- uno >/dev/null
'
test_expect_success 'blame -L with invalid start' '
test_must_fail git blame -L5 tres 2>errors &&
test_i18ngrep "has only 2 lines" errors
'
test_expect_success 'blame -L with invalid end' '
git blame -L1,5 tres >out &&
test_line_count = 2 out
'
test_expect_success 'blame parses <end> part of -L' '
git blame -L1,1 tres >out &&
test_line_count = 1 out
'
test_expect_success 'blame -Ln,-(n+1)' '
git blame -L3,-4 nine_lines >out &&
test_line_count = 3 out
'
test_expect_success 'indent of line numbers, nine lines' '
git blame nine_lines >actual &&
test $(grep -c " " actual) = 0
'
test_expect_success 'indent of line numbers, ten lines' '
git blame ten_lines >actual &&
test $(grep -c " " actual) = 9
'
test_expect_success 'setup file with CRLF newlines' '
git config core.autocrlf false &&
printf "testcase\n" >crlffile &&
git add crlffile &&
git commit -m testcase &&
printf "testcase\r\n" >crlffile
'
test_expect_success 'blame file with CRLF core.autocrlf true' '
git config core.autocrlf true &&
git blame crlffile >actual &&
grep "A U Thor" actual
'
test_expect_success 'blame file with CRLF attributes text' '
git config core.autocrlf false &&
echo "crlffile text" >.gitattributes &&
git blame crlffile >actual &&
grep "A U Thor" actual
'
test_expect_success 'blame file with CRLF core.autocrlf=true' '
git config core.autocrlf false &&
printf "testcase\r\n" >crlfinrepo &&
>.gitattributes &&
git add crlfinrepo &&
git commit -m "add crlfinrepo" &&
git config core.autocrlf true &&
mv crlfinrepo tmp &&
git checkout crlfinrepo &&
rm tmp &&
git blame crlfinrepo >actual &&
grep "A U Thor" actual
'
# Tests the splitting and merging of blame entries in blame_coalesce().
# The output of blame is the same, regardless of whether blame_coalesce() runs
# or not, so we'd likely only notice a problem if blame crashes or assigned
# blame to the "splitting" commit ('SPLIT' below).
test_expect_success 'blame coalesce' '
cat >giraffe <<-\EOF &&
ABC
DEF
EOF
git add giraffe &&
git commit -m "original file" &&
oid=$(git rev-parse HEAD) &&
cat >giraffe <<-\EOF &&
ABC
SPLIT
DEF
EOF
git add giraffe &&
git commit -m "interior SPLIT line" &&
cat >giraffe <<-\EOF &&
ABC
DEF
EOF
git add giraffe &&
git commit -m "same contents as original" &&
cat >expect <<-EOF &&
$oid 1) ABC
$oid 2) DEF
EOF
git -c core.abbrev=40 blame -s giraffe >actual &&
test_cmp expect actual
'
test_done
|
const BREAKPOINTS = {
xs: '480px',
sm: '768px',
md: '992px',
lg: '1200px',
// Other breakpoint definitions may be present here
};
function processViewportSettings(parameters) {
if (parameters && parameters.chromatic && parameters.chromatic.viewports) {
return parameters.chromatic.viewports;
} else {
return [];
}
}
// Test the function with the provided example
const Usage = { parameters: { chromatic: { viewports: [BREAKPOINTS.xs, BREAKPOINTS.sm, BREAKPOINTS.md] } } };
console.log(processViewportSettings(Usage.parameters)); // Output: ['480px', '768px', '992px']
|
class VenueView < ActiveRecord::Base
belongs_to :venue
self.primary_key = "venue_id"
self.table_name = 'venues_view'
def readonly?
true
end
def self.refresh
ActiveRecord::Base.connection.execute('REFRESH VIEW venues_view')
end
end
|
/*
* Copyright (C) 2012-2015 ARM Limited. All rights reserved.
*
* This program is free software and is provided to you under the terms of the GNU General Public License version 2
* as published by the Free Software Foundation, and any use by you of this program is subject to the terms of such GNU licence.
*
* A copy of the licence is included with the program, and can also be obtained from Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include <linux/module.h>
#include <mali_profiling_gator_api.h>
#include "mali_kernel_common.h"
#include "mali_osk.h"
#include "mali_ukk.h"
#include "mali_uk_types.h"
#include "mali_osk_profiling.h"
#include "mali_linux_trace.h"
#include "mali_gp.h"
#include "mali_pp.h"
#include "mali_l2_cache.h"
#include "mali_user_settings_db.h"
#include "mali_executor.h"
_mali_osk_errcode_t _mali_osk_profiling_init(mali_bool auto_start)
{
if (MALI_TRUE == auto_start) {
mali_set_user_setting(_MALI_UK_USER_SETTING_SW_EVENTS_ENABLE, MALI_TRUE);
}
return _MALI_OSK_ERR_OK;
}
void _mali_osk_profiling_term(void)
{
/* Nothing to do */
}
void _mali_osk_profiling_report_sw_counters(u32 *counters)
{
trace_mali_sw_counters(_mali_osk_get_pid(), _mali_osk_get_tid(), NULL, counters);
}
void _mali_osk_profiling_memory_usage_get(u32 *memory_usage)
{
*memory_usage = _mali_ukk_report_memory_usage();
}
_mali_osk_errcode_t _mali_ukk_profiling_add_event(_mali_uk_profiling_add_event_s *args)
{
/* Always add process and thread identificator in the first two data elements for events from user space */
_mali_osk_profiling_add_event(args->event_id, _mali_osk_get_pid(), _mali_osk_get_tid(), args->data[2], args->data[3], args->data[4]);
return _MALI_OSK_ERR_OK;
}
_mali_osk_errcode_t _mali_ukk_sw_counters_report(_mali_uk_sw_counters_report_s *args)
{
u32 *counters = (u32 *)(uintptr_t)args->counters;
_mali_osk_profiling_report_sw_counters(counters);
return _MALI_OSK_ERR_OK;
}
_mali_osk_errcode_t _mali_ukk_profiling_memory_usage_get(_mali_uk_profiling_memory_usage_get_s *args)
{
_mali_osk_profiling_memory_usage_get(&args->memory_usage);
return _MALI_OSK_ERR_OK;
}
/**
* Called by gator.ko to set HW counters
*
* @param counter_id The counter ID.
* @param event_id Event ID that the counter should count (HW counter value from TRM).
*
* @return 1 on success, 0 on failure.
*/
int _mali_profiling_set_event(u32 counter_id, s32 event_id)
{
if (COUNTER_VP_0_C0 == counter_id) {
mali_gp_job_set_gp_counter_src0(event_id);
} else if (COUNTER_VP_0_C1 == counter_id) {
mali_gp_job_set_gp_counter_src1(event_id);
} else if (COUNTER_FP_0_C0 <= counter_id && COUNTER_FP_7_C1 >= counter_id) {
/*
* Two compatibility notes for this function:
*
* 1) Previously the DDK allowed per core counters.
*
* This did not make much sense on Mali-450 with the "virtual PP core" concept,
* so this option was removed, and only the same pair of HW counters was allowed on all cores,
* beginning with r3p2 release.
*
* Starting with r4p0, it is now possible to set different HW counters for the different sub jobs.
* This should be almost the same, since sub job 0 is designed to run on core 0,
* sub job 1 on core 1, and so on.
*
* The scheduling of PP sub jobs is not predictable, and this often led to situations where core 0 ran 2
* sub jobs, while for instance core 1 ran zero. Having the counters set per sub job would thus increase
* the predictability of the returned data (as you would be guaranteed data for all the selected HW counters).
*
* PS: Core scaling needs to be disabled in order to use this reliably (goes for both solutions).
*
* The framework/#defines with Gator still indicates that the counter is for a particular core,
* but this is internally used as a sub job ID instead (no translation needed).
*
* 2) Global/default vs per sub job counters
*
* Releases before r3p2 had only per PP core counters.
* r3p2 releases had only one set of default/global counters which applied to all PP cores
* Starting with r4p0, we have both a set of default/global counters,
* and individual counters per sub job (equal to per core).
*
* To keep compatibility with Gator/DS-5/streamline, the following scheme is used:
*
* r3p2 release; only counters set for core 0 is handled,
* this is applied as the default/global set of counters, and will thus affect all cores.
*
* r4p0 release; counters set for core 0 is applied as both the global/default set of counters,
* and counters for sub job 0.
* Counters set for core 1-7 is only applied for the corresponding sub job.
*
* This should allow the DS-5/Streamline GUI to have a simple mode where it only allows setting the
* values for core 0, and thus this will be applied to all PP sub jobs/cores.
* Advanced mode will also be supported, where individual pairs of HW counters can be selected.
*
* The GUI will (until it is updated) still refer to cores instead of sub jobs, but this is probably
* something we can live with!
*
* Mali-450 note: Each job is not divided into a deterministic number of sub jobs, as the HW DLBU
* automatically distributes the load between whatever number of cores is available at this particular time.
* A normal PP job on Mali-450 is thus considered a single (virtual) job, and it will thus only be possible
* to use a single pair of HW counters (even if the job ran on multiple PP cores).
* In other words, only the global/default pair of PP HW counters will be used for normal Mali-450 jobs.
*/
u32 sub_job = (counter_id - COUNTER_FP_0_C0) >> 1;
u32 counter_src = (counter_id - COUNTER_FP_0_C0) & 1;
if (0 == counter_src) {
mali_pp_job_set_pp_counter_sub_job_src0(sub_job, event_id);
if (0 == sub_job) {
mali_pp_job_set_pp_counter_global_src0(event_id);
}
} else {
mali_pp_job_set_pp_counter_sub_job_src1(sub_job, event_id);
if (0 == sub_job) {
mali_pp_job_set_pp_counter_global_src1(event_id);
}
}
} else if (COUNTER_L2_0_C0 <= counter_id && COUNTER_L2_2_C1 >= counter_id) {
u32 core_id = (counter_id - COUNTER_L2_0_C0) >> 1;
struct mali_l2_cache_core *l2_cache_core = mali_l2_cache_core_get_glob_l2_core(core_id);
if (NULL != l2_cache_core) {
u32 counter_src = (counter_id - COUNTER_L2_0_C0) & 1;
mali_l2_cache_core_set_counter_src(l2_cache_core,
counter_src, event_id);
}
} else {
return 0; /* Failure, unknown event */
}
return 1; /* success */
}
/**
* Called by gator.ko to retrieve the L2 cache counter values for all L2 cache cores.
* The L2 cache counters are unique in that they are polled by gator, rather than being
* transmitted via the tracepoint mechanism.
*
* @param values Pointer to a _mali_profiling_l2_counter_values structure where
* the counter sources and values will be output
* @return 0 if all went well; otherwise, return the mask with the bits set for the powered off cores
*/
u32 _mali_profiling_get_l2_counters(_mali_profiling_l2_counter_values *values)
{
u32 l2_cores_num = mali_l2_cache_core_get_glob_num_l2_cores();
u32 i;
MALI_DEBUG_ASSERT(l2_cores_num <= 3);
for (i = 0; i < l2_cores_num; i++) {
struct mali_l2_cache_core *l2_cache = mali_l2_cache_core_get_glob_l2_core(i);
if (NULL == l2_cache) {
continue;
}
mali_l2_cache_core_get_counter_values(l2_cache,
&values->cores[i].source0,
&values->cores[i].value0,
&values->cores[i].source1,
&values->cores[i].value1);
}
return 0;
}
/**
* Called by gator to control the production of profiling information at runtime.
*/
void _mali_profiling_control(u32 action, u32 value)
{
switch (action) {
case FBDUMP_CONTROL_ENABLE:
mali_set_user_setting(_MALI_UK_USER_SETTING_COLORBUFFER_CAPTURE_ENABLED, (value == 0 ? MALI_FALSE : MALI_TRUE));
break;
case FBDUMP_CONTROL_RATE:
mali_set_user_setting(_MALI_UK_USER_SETTING_BUFFER_CAPTURE_N_FRAMES, value);
break;
case SW_COUNTER_ENABLE:
mali_set_user_setting(_MALI_UK_USER_SETTING_SW_COUNTER_ENABLED, value);
break;
case FBDUMP_CONTROL_RESIZE_FACTOR:
mali_set_user_setting(_MALI_UK_USER_SETTING_BUFFER_CAPTURE_RESIZE_FACTOR, value);
break;
default:
break; /* Ignore unimplemented actions */
}
}
/**
* Called by gator to get mali api version.
*/
u32 _mali_profiling_get_api_version(void)
{
return MALI_PROFILING_API_VERSION;
}
/**
* Called by gator to get the data about Mali instance in use:
* product id, version, number of cores
*/
void _mali_profiling_get_mali_version(struct _mali_profiling_mali_version *values)
{
values->mali_product_id = (u32)mali_kernel_core_get_product_id();
values->mali_version_major = mali_kernel_core_get_gpu_major_version();
values->mali_version_minor = mali_kernel_core_get_gpu_minor_version();
values->num_of_l2_cores = mali_l2_cache_core_get_glob_num_l2_cores();
values->num_of_fp_cores = mali_executor_get_num_cores_total();
values->num_of_vp_cores = 1;
}
EXPORT_SYMBOL(_mali_profiling_set_event);
EXPORT_SYMBOL(_mali_profiling_get_l2_counters);
EXPORT_SYMBOL(_mali_profiling_control);
EXPORT_SYMBOL(_mali_profiling_get_api_version);
EXPORT_SYMBOL(_mali_profiling_get_mali_version);
|
import React from 'react';
import soknadSetup from '../../utils/soknadSetup';
import Soknadstatussjekker from '../Soknadstatussjekker';
import EttSporsmalPerSide from './EttSporsmalPerSide';
import { validerDenneSiden, validerForegaendeSider } from './validerEttSporsmalPerSide';
const EttSporsmalPerSideContainer = (props) => {
return (<Soknadstatussjekker
{...props}
Component={EttSporsmalPerSide}
valider={validerForegaendeSider} />);
};
export default soknadSetup(validerDenneSiden, EttSporsmalPerSideContainer, true);
|
package com.zys.baselibrary.views.pagestatus;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.zys.baselibrary.R;
/**
* 默认的正在加载布局...
*/
public class DefaultLoadingLayout extends SmartLoadingLayout {
private LayoutInflater mInflater;
private Context mContext;
private RelativeLayout rlAddedView;
private boolean mAnyAdded;
private RelativeLayout.LayoutParams mLayoutParams;
private TextView tvLoadingDescription;
private TextView tvEmptyDescription;
private TextView tvErrorDescription;
private TextView tv_empty_message;
private TextView tv_error_message;
private TextView tv_error;
private TextView tv_empty_agreen;
private DotsTextView dtvLoading;
private LinearLayout mLoadingContent;
public DefaultLoadingLayout(Context context, View contentView) {
this.mContext = context;
this.mContentView = contentView;
this.mInflater = LayoutInflater.from(context);
{
mLoadingView = mInflater.inflate(R.layout.smartloadinglayout_view_on_loading, null);
mEmptyView = mInflater.inflate(R.layout.smartloadinglayout_view_on_empty, null);
tv_empty_message = mEmptyView.findViewById(R.id.tv_empty_message);
tv_empty_agreen = mEmptyView.findViewById(R.id.tv_empty_agreen);
mErrorView = mInflater.inflate(R.layout.smartloadinglayout_view_on_error, null);
tv_error_message = mErrorView.findViewById(R.id.tv_error_message);
tv_error = mErrorView.findViewById(R.id.tv_error);
dtvLoading = (DotsTextView) mLoadingView.findViewById(R.id.dots);
mLayoutParams = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams
.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
mLayoutParams.addRule(RelativeLayout.CENTER_IN_PARENT);
}
}
public void setLoadingDescriptionColor(int color) {
if (tvLoadingDescription == null) {
tvLoadingDescription = (TextView) mLoadingView.findViewById(R.id.tv_loading_message);
}
tvLoadingDescription.setTextColor(color);
dtvLoading.setTextColor(color);
}
public void setLoadingDescriptionTextSize(float size) {
if (tvLoadingDescription == null) {
tvLoadingDescription = (TextView) mLoadingView.findViewById(R.id.tv_loading_message);
}
tvLoadingDescription.setTextSize(size);
}
public void setLoadingDescription(String loadingDescription) {
if (tvLoadingDescription == null) {
tvLoadingDescription = (TextView) mLoadingView.findViewById(R.id.tv_loading_message);
}
tvLoadingDescription.setText(loadingDescription);
}
public void setLoadingDescription(int resID) {
if (tvLoadingDescription == null) {
tvLoadingDescription = (TextView) mLoadingView.findViewById(R.id.tv_loading_message);
}
tvLoadingDescription.setText(resID);
}
public void replaceLoadingProgress(View view) {
if (mLoadingContent == null) {
mLoadingContent = (LinearLayout) mLoadingView.findViewById(R.id.ll_loading);
}
RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams
(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams
.WRAP_CONTENT);
lp.addRule(RelativeLayout.CENTER_IN_PARENT);
((RelativeLayout) mLoadingView).addView(view, lp);
((RelativeLayout) mLoadingView).removeView(mLoadingContent);
}
public void setEmptyDescriptionColor(int color) {
if (tvEmptyDescription == null) {
tvEmptyDescription = (TextView) mEmptyView.findViewById(R.id.tv_empty_message);
}
tvEmptyDescription.setTextColor(color);
}
public void setEmptyDescriptionTextSize(float size) {
if (tvEmptyDescription == null) {
tvEmptyDescription = (TextView) mEmptyView.findViewById(R.id.tv_empty_message);
}
tvEmptyDescription.setTextSize(size);
}
public void setEmptyDescription(String emptyDescription) {
if (tvEmptyDescription == null) {
tvEmptyDescription = (TextView) mEmptyView.findViewById(R.id.tv_empty_message);
}
tvEmptyDescription.setText(emptyDescription);
}
public void setEmptyDescription(int resID) {
if (tvEmptyDescription == null) {
tvEmptyDescription = (TextView) mEmptyView.findViewById(R.id.tv_empty_message);
}
tvEmptyDescription.setText(resID);
}
public void replaceEmptyIcon(Drawable newIcon) {
if (tvEmptyDescription == null) {
tvEmptyDescription = (TextView) mEmptyView.findViewById(R.id.tv_empty_message);
}
newIcon.setBounds(0, 0, newIcon.getMinimumWidth(), newIcon.getMinimumHeight());
tvEmptyDescription.setCompoundDrawables(null, newIcon, null, null);
}
public void replaceEmptyBg(int color) {
if (mEmptyView == null) {
mEmptyView.setBackgroundColor(color);
}
}
public void replaceEmptyIcon(int resId) {
if (tvEmptyDescription == null) {
tvEmptyDescription = (TextView) mEmptyView.findViewById(R.id.tv_empty_message);
}
Drawable newIcon = mContext.getResources().getDrawable(resId);
newIcon.setBounds(0, 0, newIcon.getMinimumWidth(), newIcon.getMinimumHeight());
tvEmptyDescription.setCompoundDrawables(null, newIcon, null, null);
}
public void setErrorDescriptionColor(int color) {
if (tvErrorDescription == null) {
tvErrorDescription = (TextView) mErrorView.findViewById(R.id.tv_error_message);
}
tvErrorDescription.setTextColor(color);
}
public void setErrorDescriptionTextSize(float size) {
if (tvErrorDescription == null) {
tvErrorDescription = (TextView) mErrorView.findViewById(R.id.tv_error_message);
}
tvErrorDescription.setTextSize(size);
}
public void setErrorDescription(String errorDescription) {
if (tvErrorDescription == null) {
tvErrorDescription = (TextView) mErrorView.findViewById(R.id.tv_error_message);
}
tvErrorDescription.setText(errorDescription);
}
public void setErrorDescription(int resID) {
if (tvErrorDescription == null) {
tvErrorDescription = (TextView) mErrorView.findViewById(R.id.tv_error_message);
}
tvErrorDescription.setText(resID);
}
public void setErrorButtonListener(View.OnClickListener listener) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
tv_error.setOnClickListener(listener);
}
public void setEmptyButtonListener(View.OnClickListener listener) {
if (tv_empty_agreen == null) {
tv_empty_agreen = (TextView) mEmptyView.findViewById(R.id.tv_empty_agreen);
}
tv_empty_agreen.setOnClickListener(listener);
}
public void setErrorButtonBackground(Drawable background) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
tv_error.setBackground(background);
} else {
//noinspection deprecation
tv_error.setBackgroundDrawable(background);
}
}
public void setErrorButtonBackground(int resID) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
tv_error.setBackgroundResource(resID);
}
public void setErrorButtonTextColor(int color) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
tv_error.setTextColor(color);
}
public void setErrorButtonText(String text) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
tv_error.setText(text);
}
public void setErrorButtonText(int resID) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
tv_error.setText(resID);
}
public void hideErrorButton() {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
tv_error.setVisibility(View.GONE);
}
public void replaceErrorButton(Button newButton) {
if (tv_error == null) {
tv_error = (TextView) mErrorView.findViewById(R.id.tv_error);
}
((RelativeLayout) mErrorView).removeView(tv_error);
tv_error = newButton;
RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams
(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams
.WRAP_CONTENT);
lp.addRule(RelativeLayout.CENTER_HORIZONTAL);
lp.addRule(RelativeLayout.BELOW, R.id.tv_error_message);
lp.topMargin = dp2px(mContext, 12);
((RelativeLayout) mErrorView).addView(tv_error, lp);
}
public void replaceErrorIcon(Drawable newIcon) {
if (tvErrorDescription == null) {
tvErrorDescription = (TextView) mErrorView.findViewById(R.id.tv_error_message);
}
newIcon.setBounds(0, 0, newIcon.getMinimumWidth(), newIcon.getMinimumHeight());
tvErrorDescription.setCompoundDrawables(null, newIcon, null, null);
}
public void replaceErrorIcon(int resId) {
if (tvErrorDescription == null) {
tvErrorDescription = (TextView) mErrorView.findViewById(R.id.tv_error_message);
}
Drawable newIcon = mContext.getResources().getDrawable(resId);
newIcon.setBounds(0, 0, newIcon.getMinimumWidth(), newIcon.getMinimumHeight());
tvErrorDescription.setCompoundDrawables(null, newIcon, null, null);
}
public void setLayoutBackgroundColor(int color) {
initAddedLayout();
rlAddedView.setBackgroundColor(color);
}
public void setLayoutBackground(int resID) {
initAddedLayout();
rlAddedView.setBackgroundResource(resID);
}
@Override
public void onLoading() {
checkContentView();
if (!mLoadingAdded) {
initAddedLayout();
if (mLoadingView != null) {
rlAddedView.addView(mLoadingView, mLayoutParams);
mLoadingAdded = true;
}
}
dtvLoading.showAndPlay();
showViewWithStatus(LayoutStatus.Loading);
}
@Override
public void onShowData() {
checkContentView();
dtvLoading.hideAndStop();
showViewWithStatus(LayoutStatus.ShowData);
}
@Override
public void onEmpty() {
checkContentView();
if (!mEmptyAdded) {
initAddedLayout();
if (mEmptyView != null) {
rlAddedView.addView(mEmptyView, mLayoutParams);
mEmptyAdded = true;
}
}
dtvLoading.hideAndStop();
showViewWithStatus(LayoutStatus.Empty);
}
@Override
public void onEmpty(boolean isShowAgreen) {
checkContentView();
if (!mEmptyAdded) {
initAddedLayout();
if (mEmptyView != null) {
tv_empty_agreen.setVisibility(isShowAgreen ? View.VISIBLE : View.GONE);
rlAddedView.addView(mEmptyView, mLayoutParams);
mEmptyAdded = true;
}
}
dtvLoading.hideAndStop();
showViewWithStatus(LayoutStatus.Empty);
}
@Override
public void onEmpty(String s) {
checkContentView();
if (!mEmptyAdded) {
initAddedLayout();
if (mEmptyView != null) {
tv_empty_message.setText(s);
rlAddedView.addView(mEmptyView, mLayoutParams);
mEmptyAdded = true;
}
}
dtvLoading.hideAndStop();
showViewWithStatus(LayoutStatus.Empty);
}
public void showEmptyAgreen(boolean b) {
if (tv_empty_agreen != null) {
tv_empty_agreen.setVisibility(b ? View.VISIBLE : View.GONE);
}
}
@Override
public void onError() {
checkContentView();
if (!mErrorAdded) {
initAddedLayout();
if (mErrorView != null) {
rlAddedView.addView(mErrorView, mLayoutParams);
mErrorAdded = true;
}
}
dtvLoading.hideAndStop();
showViewWithStatus(LayoutStatus.Error);
}
@Override
public void onError(String e) {
checkContentView();
if (!mErrorAdded) {
initAddedLayout();
if (mErrorView != null) {
tv_error_message.setText(e);
rlAddedView.addView(mErrorView, mLayoutParams);
mErrorAdded = true;
}
}
dtvLoading.hideAndStop();
showViewWithStatus(LayoutStatus.Error);
}
private void checkContentView() {
if (mContentView == null) {
throw new NullPointerException("The content view not set..");
}
}
private void initAddedLayout() {
if (!mAnyAdded) {
rlAddedView = new RelativeLayout(mContext);
rlAddedView.setLayoutParams(mLayoutParams);
ViewGroup parent = (ViewGroup) mContentView.getParent();
parent.addView(rlAddedView);
mAnyAdded = true;
}
}
/**
* dp转px
*/
public int dp2px(Context context, float dp) {
// 拿到屏幕密度
float density = context.getResources().getDisplayMetrics().density;
int px = (int) (dp * density + 0.5f);// 四舍五入
return px;
}
}
|
/**
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package microservices.api.user.jaxrs;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.Authorization;
import microservices.api.user.database.DatabaseRegistry;
import microservices.api.user.jaxrs.model.Profile;
@Path("")
@Api(value = "User Profile", authorizations = { @Authorization(value="basicAuth") })
public class ProfileResource {
@GET
@ApiOperation(value = "Retrieve all profiles", responseContainer = "array", response = Profile.class)
@Produces("application/json")
public Response getProfiles() {
return Response.ok().entity(DatabaseRegistry.getStorageService().getAllEntries()).build();
}
@POST
@ApiOperation("Create a profile")
@Consumes("application/json")
@Produces("application/json")
@ApiResponses({ @ApiResponse(code = 201, message = "Profile created", response = String.class) })
public Response createProfile(@ApiParam(required = true) Profile profile) {
final String id = DatabaseRegistry.getStorageService().createEntry(profile);
return Response.status(Status.CREATED).entity("{\"id\":" + id + "}").build();
}
@GET
@Path("{id}")
@ApiOperation(value = "Get a profile")
@Produces("application/json")
@ApiResponses({ @ApiResponse(code = 200, message = "Profile retrieved", response = Profile.class),
@ApiResponse(code = 404, message = "Profile not found") })
public Response getProfile(@PathParam("id") String id) {
Profile profile = DatabaseRegistry.getStorageService().getEntry(id);
if (profile != null) {
return Response.ok().entity(profile).build();
} else {
return Response.status(Status.NOT_FOUND).build();
}
}
@PUT
@Path("{id}")
@ApiOperation(value = "Update a profile")
@Consumes("application/json")
@Produces("text/plain")
@ApiResponses({ @ApiResponse(code = 200, message = "Profile updated"),
@ApiResponse(code = 404, message = "Profile not found") })
public Response updateProfile(@PathParam("id") String id, @ApiParam(required = true) Profile profile) {
Profile currentProfile = DatabaseRegistry.getStorageService().getEntry(id);
if (currentProfile != null) {
DatabaseRegistry.getStorageService().updateEntry(id, profile);
return Response.ok().build();
} else {
return Response.status(Status.NOT_FOUND).build();
}
}
@DELETE
@Path("{id}")
@ApiOperation(value = "Delete a profile")
@ApiResponses({ @ApiResponse(code = 200, message = "Profile deleted"),
@ApiResponse(code = 404, message = "Profile not found") })
@Produces("text/plain")
public Response deleteProfile(@PathParam("id") String id) {
Profile currentProfile = DatabaseRegistry.getStorageService().getEntry(id);
if (currentProfile != null) {
DatabaseRegistry.getStorageService().deleteEntry(id);
return Response.ok().build();
} else {
return Response.status(Status.NOT_FOUND).build();
}
}
}
|
package io.dronefleet.mavlink.common;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
* Enumeration of the ADSB altimeter types
*/
@MavlinkEnum
public enum AdsbAltitudeType {
/**
* Altitude reported from a Baro source using QNH reference
*/
@MavlinkEntryInfo(0)
ADSB_ALTITUDE_TYPE_PRESSURE_QNH,
/**
* Altitude reported from a GNSS source
*/
@MavlinkEntryInfo(1)
ADSB_ALTITUDE_TYPE_GEOMETRIC
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.