text stringlengths 1 1.05M |
|---|
class SMTPTransaction:
def __init__(self):
self.envelope_to = None
def set_envelope_recipient(self, email):
self.envelope_to = email |
const Reports = require('../models/Reports');
const Accounts = require('../models/Accounts');
const jsPDF = require('jspdf');
const XLSX = require('xlsx');
const FileSaver = require('file-saver');
const request = require("request");
const moment = require('moment');
const machine = require('node-machine-id');
const number_format = require('../helpers/number_format');
const shell = require('electron').shell
// const serverHost = 'http://localhost:8000';
const serverHost = 'http://yadgarcarriage.com/demo/yadgar';
require('jspdf-autotable');
class ReportsController {
constructor() {
ReportsController.chartsOfAccounts = [];
ReportsController.index(ReportsController.chartsOfAccounts);
ReportsController.suppliers(ReportsController.chartsOfAccounts);
ReportsController.profitAndLoss();
ReportsController.vehicle();
ReportsController.totalBalance(ReportsController.chartsOfAccounts);
}
init() {
showLoader();
ReportsController.getSelect2Option(ReportsController.chartsOfAccounts)
}
/**
* Default report account base
* @param chartsOfAccounts
*/
static index(chartsOfAccounts) {
let account = document.getElementById('reports-form-account');
let start = document.getElementById('report-start-date');
let end = document.getElementById('report-end-date');
let buttons = [
{type: 'show', id: 'show_report'},
{type: 'pdf', id: 'download_report_pdf'},
{type: 'xsl', id: 'download_report_xsl'},
{type: 'wpdf', id: 'whats_app_report_pdf'},
{type: 'wxsl', id: 'whats_app_report_xsl'}
];
Array.prototype.forEach.call(buttons, (button) => {
let btnEle = document.getElementById(button.id);
btnEle.addEventListener('click', function () {
if (!account.value) {
showToast('Please select Account ID.', 'danger');
return false;
}
ReportsController.getRecord(account, start, end, chartsOfAccounts, button.type);
});
});
}
static getRecord(account, start, end, chartsOfAccounts, type) {
const tableId = '#reports_list';
let startReport = start.value;
let endReport = end.value;
showLoader()
Reports.getReports(account.value, chartsOfAccounts).then(function (results) {
let sNo = 1, dataSet = [], dates = [], total = 0;
Array.prototype.forEach.call(results, (row) => {
dates.push(row.created_at);
})
if (startReport == '') {
startReport = isset(dates[0]) ? moment(dates[0]) : ''
} else {
startReport = moment(startReport, 'DD/MM/YYYY');
}
if (endReport == '') {
let last = dates.length - 1;
endReport = isset(dates[last]) ? moment(dates[last]) : ''
} else {
endReport = moment(endReport, 'DD/MM/YYYY');
}
let $isHeader = true;
Array.prototype.forEach.call(results, (row) => {
let created = moment(row.created_at).format('YYYY-MM-DD');
if (startReport.format('YYYY-MM-DD') <= created && endReport.format('YYYY-MM-DD') >= created) {
if ($isHeader) {
let hDis = (total >= 0) ? 'Dr' : 'Cr';
dataSet.push(['', '', 'B/F', '', '', number_format(total, 2) + ' ' + hDis]);
$isHeader = false;
}
}
total = parseFloat(total) - parseFloat(row.credit);
total = parseFloat(total) + parseFloat(row.debit);
if (startReport.format('YYYY-MM-DD') <= created && endReport.format('YYYY-MM-DD') >= created) {
let aDis = (total >= 0) ? 'Dr' : 'Cr';
dataSet.push([
sNo,
moment(row.created_at).format('DD-MM-YYYY'),
row.description,
Math.abs(row.debit),
Math.abs(row.credit),
number_format(Math.abs(total), 2) + ' ' + aDis
]);
sNo++;
}
})
// If result not found
if (dataSet.length <= 0) {
showToast('Sorry! Result not found.', 'danger');
return false;
}
// Init Data Table
const headings = ['No', 'Date', 'Description', 'Debit', 'Credit', 'Total'];
HtmlHelper.initDataTable(tableId, dataSet, headings);
let selAccount;
if (isset(chartsOfAccounts[account.value])) {
selAccount = chartsOfAccounts[account.value];
}
startReport = startReport.format('DD-MM-YYYY');
endReport = endReport.format('DD-MM-YYYY');
if (type == 'pdf' || type == 'wpdf') {
ReportsController.generatePDF(headings, dataSet, startReport, endReport, selAccount, type);
} else if (type == 'xsl' || type == 'wxsl') {
ReportsController.generateXSL(1, headings, dataSet, selAccount, type);
} else {
const info = document.getElementById('report-information');
info.querySelector('.a').innerHTML = selAccount.ac_id
info.querySelector('.n').innerHTML = selAccount.name
info.querySelector('.t').innerHTML = getChartOfAccountsLabels(selAccount.type)
info.querySelector('.print').innerHTML = moment().format('DD-MM-YYYY')
info.querySelector('.from').innerHTML = startReport
info.querySelector('.to').innerHTML = endReport
hideLoader()
window.$('#modal-default').modal('show');
window.$('#reports-section').animate({scrollTop: 0}, '300');
}
})
}
static generatePDF(headings, dataSet, start, end, selAccount, type) {
const doc = new jsPDF();
headings = headings.map(x => (x.title));
let img = new Image();
img.src = './assets/img/logo.png';
doc.addImage(img, 'PNG', 15, 10);
const inWidth = doc.internal.pageSize.width;
doc.setFontSize(10);
doc.setFontType('bold');
doc.text(15, 40, 'A/C ID:');
doc.text((inWidth - 65), 40, 'Print on:');
doc.setFontType('normal');
doc.text(40, 40, selAccount.ac_id);
doc.text((inWidth - 35), 40, moment().format('DD-MM-YYYY'));
doc.setFontSize(10);
doc.setFontType('bold');
doc.text(15, 45, 'Name:');
doc.text((inWidth - 65), 45, 'From:');
doc.setFontType('normal');
doc.text(40, 45, selAccount.name);
doc.text((inWidth - 35), 45, start);
doc.setFontSize(10);
doc.setFontType('bold');
doc.text(15, 50, 'Type:');
doc.text((inWidth - 65), 50, 'To:');
doc.setFontType('normal');
doc.text(40, 50, getChartOfAccountsLabels(selAccount.type));
doc.text((inWidth - 35), 50, end);
doc.autoTable({
margin: {top: 35},
head: [headings],
body: dataSet,
columnStyles: {
0: {halign: 'center', minCellWidth: 10},
1: {halign: 'center', minCellWidth: 20},
3: {minCellWidth: 25},
4: {minCellWidth: 25},
5: {minCellWidth: 30}
},
headStyles: {fillColor: '#00a65a', fontSize: 10},
bodyStyles: {fontSize: 8}
});
if (type == 'wpdf') {
let blob = doc.output('datauristring');
ReportsController.send2server('pdf', blob).then(function (url) {
hideLoader()
url = "https://wa.me/?text=" + url;
shell.openExternal(url)
});
} else {
hideLoader()
doc.save(moment().format('YYYY-MM-DD') + ' ' + selAccount.name + '.pdf');
}
}
/**
* Get Suppliers Reports Section Start
* @param chartsOfAccounts
*/
static suppliers(chartsOfAccounts) {
let supplierDate = document.getElementById('supplier-report-date');
let supplierAccount = document.getElementById('supplier-reports-form-account');
if (supplierDate.value.trim() === '') {
supplierDate.value = moment().format('DD/MM/YYYY');
}
let buttons = [
{type: 'show', id: 'show_supplier_report'},
{type: 'pdf', id: 'download_supplier_report_pdf'},
{type: 'xsl', id: 'download_supplier_report_xsl'},
{type: 'wpdf', id: 'whats_app_supplier_report_pdf'},
{type: 'wxsl', id: 'whats_app_supplier_report_xsl'}
];
Array.prototype.forEach.call(buttons, (button) => {
const btnEle = document.getElementById(button.id);
btnEle.addEventListener('click', function () {
if (!supplierDate.value) {
showToast('Please Select date.', 'danger');
return false;
}
if (!supplierAccount.value) {
showToast('Please select Account ID.', 'danger');
return false;
}
ReportsController.getSupplierRecord(supplierAccount, supplierDate, chartsOfAccounts, button.type);
});
});
}
static getSupplierRecord(account, day, chartsOfAccounts, type) {
const tableId = '#supplier_reports_list';
let dayOfReport = moment(day.value, 'DD/MM/YYYY');
Reports.getSupplierReports(account.value, dayOfReport).then(function (results) {
let sNo = 1, dataSet = [], total = 0;
Array.prototype.forEach.call(results, (row) => {
total = parseFloat(total) + parseFloat(row.debit);
dataSet.push([
row.name,
row.description,
Math.abs(row.debit),
]);
sNo++;
})
// If result not found
if (dataSet.length <= 0) {
showToast('Sorry! Result not found.', 'danger');
return false;
}
// Init Data Table
const headings = ['Name', 'Description', 'Total'];
HtmlHelper.initDataTable(tableId, dataSet, headings);
HtmlHelper.setDataTableFooter(tableId, '<td colspan="2">Total</td><td>' + number_format(total, 2) + '</td>');
let selAccount;
if (isset(chartsOfAccounts[account.value])) {
selAccount = chartsOfAccounts[account.value];
}
if (type == 'pdf' || type == 'wpdf') {
ReportsController.generateSupplierPDF(headings, dataSet, total, selAccount, type);
} else if (type == 'xsl' || type == 'wxsl') {
// Push total
dataSet.push(['Total', '', number_format(total, 2)]);
ReportsController.generateXSL(2, headings, dataSet, selAccount, type);
} else {
const info = document.getElementById('supplier-report-information');
info.querySelector('.a').innerHTML = selAccount.ac_id
info.querySelector('.n').innerHTML = selAccount.name
info.querySelector('.print').innerHTML = moment().format('DD-MM-YYYY')
info.querySelector('.from').innerHTML = dayOfReport.format('DD-MM-YYYY')
hideLoader();
window.$('#modal-supplier').modal('show');
window.$('#reports-section').animate({scrollTop: 0}, '300');
}
})
}
static generateSupplierPDF(headings, dataSet, total, selAccount, type) {
const doc = new jsPDF();
headings = headings.map(x => (x.title));
let img = new Image();
img.src = './assets/img/logo.png';
doc.addImage(img, 'PNG', 15, 10);
const inWidth = doc.internal.pageSize.width;
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 15, 'Print on:');
doc.setFontType('normal');
doc.text((inWidth - 35), 15, moment().format('DD-MM-YYYY'));
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 20, 'A/C ID:');
doc.setFontType('normal');
doc.text((inWidth - 35), 20, selAccount.ac_id);
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 25, 'Name:');
doc.setFontType('normal');
doc.text((inWidth - 35), 25, selAccount.name);
dataSet.push([
'Total',
'',
number_format(total, 2),
]);
doc.autoTable({
margin: {top: 35},
head: [headings],
body: dataSet,
columnStyles: {
0: {minCellWidth: 30},
2: {minCellWidth: 30}
},
headStyles: {fillColor: '#00a65a', fontSize: 10},
bodyStyles: {fontSize: 8}
});
if (type == 'wpdf') {
let blob = doc.output('datauristring');
ReportsController.send2server('pdf', blob).then(function (url) {
hideLoader();
url = "https://wa.me/?text=" + url;
shell.openExternal(url);
});
} else {
hideLoader();
doc.save(moment().format('YYYY-MM-DD') + ' Supplier ' + selAccount.name + '.pdf');
}
}
/**
* Get profit and loss Reports Section Start
*/
static profitAndLoss() {
let start = document.getElementById('profit-report-start-date');
let end = document.getElementById('profit-report-end-date');
let buttons = [
{type: 'show', id: 'show_profit_report'},
{type: 'pdf', id: 'download_profit_report_pdf'},
{type: 'xsl', id: 'download_profit_report_xsl'},
{type: 'wpdf', id: 'whats_app_profit_report_pdf'},
{type: 'wxsl', id: 'whats_app_profit_report_xsl'}
];
Array.prototype.forEach.call(buttons, (button) => {
let btnEle = document.getElementById(button.id);
btnEle.addEventListener('click', function () {
ReportsController.getProfitAndLossRecord(start, end, button.type);
});
});
}
static getProfitAndLossRecord(start, end, type) {
const tableId = '#profit_reports_list';
let startReport = start.value;
let endReport = end.value;
Reports.getProfitAndLossReports().then(function (results) {
let sNo = 1, dataSet = [], dates = [], total = 0;
Array.prototype.forEach.call(results, (row) => {
dates.push(row.created_at);
})
if (startReport == '') {
startReport = isset(dates[0]) ? moment(dates[0]) : ''
} else {
startReport = moment(startReport, 'DD/MM/YYYY');
}
if (endReport == '') {
let last = dates.length - 1;
endReport = isset(dates[last]) ? moment(dates[last]) : ''
} else {
endReport = moment(endReport, 'DD/MM/YYYY');
}
let $isHeader = true;
Array.prototype.forEach.call(results, (row) => {
let created = moment(row.created_at).format('YYYY-MM-DD');
if (startReport.format('YYYY-MM-DD') <= created && endReport.format('YYYY-MM-DD') >= created) {
if ($isHeader) {
let hDis = (total >= 0) ? 'Dr' : 'Cr';
dataSet.push(['', '', 'B/F', '', '', number_format(total, 2) + ' ' + hDis]);
$isHeader = false;
}
}
total = parseFloat(total) - parseFloat(row.credit);
total = parseFloat(total) + parseFloat(row.debit);
if (startReport.format('YYYY-MM-DD') <= created && endReport.format('YYYY-MM-DD') >= created) {
let aDis = (total >= 0) ? 'Dr' : 'Cr';
dataSet.push([
sNo,
moment(row.created_at).format('DD-MM-YYYY'),
row.name + '(' + row.ac_id + ') ' + row.description,
Math.abs(row.debit),
Math.abs(row.credit),
number_format(Math.abs(total), 2) + ' ' + aDis
]);
sNo++;
}
})
// If result not found
if (dataSet.length <= 0) {
showToast('Sorry! Result not found.', 'danger');
return false;
}
// Init Data Table
const headings = ['No', 'Date', 'Description', 'Debit', 'Credit', 'Total'];
HtmlHelper.initDataTable(tableId, dataSet, headings);
startReport = startReport.format('DD-MM-YYYY');
endReport = endReport.format('DD-MM-YYYY');
if (type == 'pdf' || type == 'wpdf') {
ReportsController.generateProfitAndLossPDF(headings, dataSet, startReport, endReport, type);
} else if (type == 'xsl' || type == 'wxsl') {
ReportsController.generateXSL(3, headings, dataSet, {
ac_id: 'Profit and Loss',
name: 'Profit and Loss'
}, type);
} else {
const info = document.getElementById('profit-report-information');
info.querySelector('.print').innerHTML = moment().format('DD-MM-YYYY')
info.querySelector('.from').innerHTML = startReport
info.querySelector('.to').innerHTML = endReport
hideLoader();
window.$('#modal-profit-loss').modal('show');
window.$('#reports-section').animate({scrollTop: 0}, '300');
}
})
}
static generateProfitAndLossPDF(headings, dataSet, start, end, type) {
const doc = new jsPDF();
headings = headings.map(x => (x.title));
let img = new Image();
img.src = './assets/img/logo.png';
doc.addImage(img, 'PNG', 15, 10);
const inWidth = doc.internal.pageSize.width;
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 15, 'Print on:');
doc.setFontType('normal');
doc.text((inWidth - 35), 15, moment().format('DD-MM-YYYY'));
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 20, 'From:');
doc.setFontType('normal');
doc.text((inWidth - 35), 20, start);
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 25, 'To:');
doc.setFontType('normal');
doc.text((inWidth - 35), 25, end);
doc.autoTable({
margin: {top: 35},
head: [headings],
body: dataSet,
columnStyles: {
0: {halign: 'center', minCellWidth: 10},
1: {halign: 'center', minCellWidth: 20},
3: {minCellWidth: 25},
4: {minCellWidth: 25},
5: {minCellWidth: 30}
},
headStyles: {fillColor: '#00a65a', fontSize: 10},
bodyStyles: {fontSize: 8}
});
if (type == 'wpdf') {
let blob = doc.output('datauristring');
ReportsController.send2server('pdf', blob).then(function (url) {
hideLoader();
url = "https://wa.me/?text=" + url;
shell.openExternal(url);
});
} else {
hideLoader();
doc.save(moment().format('YYYY-MM-DD') + ' Profit and Loss.pdf');
}
}
/**
* Get Vehicle Reports Section Start
*/
static vehicle() {
let start = document.getElementById('vehicle-report-start-date');
let end = document.getElementById('vehicle-report-end-date');
let type = document.getElementById('vehicle-reports-type');
let buttons = [
{type: 'show', id: 'show_vehicle_report'},
{type: 'pdf', id: 'download_vehicle_report_pdf'},
{type: 'xsl', id: 'download_vehicle_report_xsl'},
{type: 'wpdf', id: 'whats_app_vehicle_report_pdf'},
{type: 'wxsl', id: 'whats_app_vehicle_report_xsl'}
];
Array.prototype.forEach.call(buttons, (button) => {
let btnEle = document.getElementById(button.id);
btnEle.addEventListener('click', function () {
ReportsController.getVehicleRecord(start, end, button.type, type);
});
});
}
static getVehicleRecord(start, end, type, vehicleType) {
const tableId = '#vehicle_reports_list';
let startReport = start.value;
let endReport = end.value;
Reports.getVehicleReports(vehicleType.value).then(function (results) {
let sNo = 1, dataSet = [], dates = [];
Array.prototype.forEach.call(results, (row) => {
dates.push(row.created_at);
})
if (startReport == '') {
startReport = isset(dates[0]) ? moment(dates[0]) : ''
} else {
startReport = moment(startReport, 'DD/MM/YYYY');
}
if (endReport == '') {
let last = dates.length - 1;
endReport = isset(dates[last]) ? moment(dates[last]) : ''
} else {
endReport = moment(endReport, 'DD/MM/YYYY');
}
Array.prototype.forEach.call(results, (row) => {
let created = moment(row.created_at).format('YYYY-MM-DD');
if (startReport.format('YYYY-MM-DD') <= created && endReport.format('YYYY-MM-DD') >= created) {
dataSet.push([
sNo,
moment(row.created_at).format('DD-MM-YYYY'),
row.product + ' - ' + row.reg_no + ' (' + row.terminal + ')',
row.quantity + ' KL',
Math.abs(row.price),
Math.abs(row.total),
]);
sNo++;
}
})
// If result not found
if (dataSet.length <= 0) {
showToast('Sorry! Result not found.', 'danger');
return false;
}
// Init Data Table
const headings = ['No', 'Date', 'Description', 'Quantity', 'Price', 'Total'];
HtmlHelper.initDataTable(tableId, dataSet, headings);
startReport = startReport.format('DD-MM-YYYY');
endReport = endReport.format('DD-MM-YYYY');
if (type == 'pdf' || type == 'wpdf') {
ReportsController.generateVehiclePDF(headings, dataSet, startReport, endReport, type);
} else if (type == 'xsl' || type == 'wxsl') {
const selAccount = {ac_id: vehicleType.options[vehicleType.selectedIndex].innerHTML, name: 'Vehicles'};
ReportsController.generateXSL(3, headings, dataSet, selAccount, type);
} else {
const info = document.getElementById('vehicle-report-information');
info.querySelector('.print').innerHTML = moment().format('DD-MM-YYYY')
info.querySelector('.from').innerHTML = startReport
info.querySelector('.to').innerHTML = endReport
hideLoader();
window.$('#modal-vehicle').modal('show');
window.$('#reports-section').animate({scrollTop: 0}, '300');
}
})
}
static generateVehiclePDF(headings, dataSet, start, end, type) {
const doc = new jsPDF();
headings = headings.map(x => (x.title));
let img = new Image();
img.src = './assets/img/logo.png';
doc.addImage(img, 'PNG', 15, 10);
const inWidth = doc.internal.pageSize.width;
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 15, 'Print on:');
doc.setFontType('normal');
doc.text((inWidth - 35), 15, moment().format('DD-MM-YYYY'));
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 20, 'From:');
doc.setFontType('normal');
doc.text((inWidth - 35), 20, start);
doc.setFontSize(10);
doc.setFontType('bold');
doc.text((inWidth - 65), 25, 'To:');
doc.setFontType('normal');
doc.text((inWidth - 35), 25, end);
doc.autoTable({
margin: {top: 35},
head: [headings],
body: dataSet,
columnStyles: {
0: {halign: 'center', minCellWidth: 10},
1: {halign: 'center', minCellWidth: 20},
3: {minCellWidth: 25},
4: {minCellWidth: 25},
5: {minCellWidth: 30}
},
headStyles: {fillColor: '#00a65a', fontSize: 10},
bodyStyles: {fontSize: 8}
});
if (type == 'wpdf') {
let blob = doc.output('datauristring');
ReportsController.send2server('pdf', blob).then(function (url) {
hideLoader();
url = "https://wa.me/?text=" + url;
shell.openExternal(url);
});
} else {
hideLoader();
doc.save(moment().format('YYYY-MM-DD') + ' Vehicle Reports.pdf');
}
}
/**
* Get Balance 4 Charts of Accounts
*/
static totalBalance(chartsOfAccounts) {
let type = document.getElementById('balance-reports-type');
let buttons = [
{type: 'show', id: 'show_balance_report'},
{type: 'pdf', id: 'download_balance_report_pdf'},
{type: 'xsl', id: 'download_balance_report_xsl'},
{type: 'wpdf', id: 'whats_app_balance_report_pdf'},
{type: 'wxsl', id: 'whats_app_balance_report_xsl'}
];
Array.prototype.forEach.call(buttons, (button) => {
let btnEle = document.getElementById(button.id);
btnEle.addEventListener('click', function () {
ReportsController.getTotalBalanceRecord(button.type, type, chartsOfAccounts);
});
});
}
static getTotalBalanceRecord(type, chartsOfType, chartsOfAccounts) {
const tableId = '#balance_reports_list', dataSet = [];
let sNo = 1, overallDebit = 0, overallCredit = 0;
showLoader()
// Get only account types
Accounts.getAccounts(chartsOfType.value).then(function (accountResults) {
// Loop on account results
Array.prototype.forEach.call(accountResults, (account) => {
// Get reports from account
Reports.getReports(account.id, chartsOfAccounts).then(function (results) {
// Generate total
let total = 0;
Array.prototype.forEach.call(results, (row) => {
total = parseFloat(total) - parseFloat(row.credit);
total = parseFloat(total) + parseFloat(row.debit);
});
overallDebit += (total >= 0) ? Math.abs(total) : 0;
overallCredit += (total < 0) ? Math.abs(total) : 0;
dataSet.push([
sNo,
account.ac_id,
account.name,
(total >= 0) ? Math.abs(total) : 0,
(total < 0) ? Math.abs(total) : 0,
]);
if (accountResults.length == sNo) {
// Init Data Table
const headings = ['No', 'ID', 'Account Name', 'Debit', 'Credit'];
HtmlHelper.initDataTable(tableId, dataSet, headings);
HtmlHelper.setDataTableFooter(tableId,
'<td colspan="3">Total</td><td>' + number_format(overallDebit, 2) + '</td><td>' + number_format(overallCredit, 2) + '</td>');
const nameForPrint = getChartOfAccountsLabels(chartsOfType.value);
const selAccount = {'ac_id': nameForPrint, 'name': nameForPrint};
if (type == 'pdf' || type == 'wpdf') {
// Push total
dataSet.push(['Total', '', '', number_format(overallDebit, 2), number_format(overallCredit, 2)]);
ReportsController.generateTotalBalancePDF(headings, dataSet, selAccount, type);
} else if (type == 'xsl' || type == 'wxsl') {
// Push total
dataSet.push(['Total', '', '', number_format(overallDebit, 2), number_format(overallCredit, 2)]);
ReportsController.generateXSL(4, headings, dataSet, selAccount, type);
} else {
const info = document.getElementById('balance-report-information');
info.querySelector('.a').innerHTML = nameForPrint;
info.querySelector('.print').innerHTML = moment().format('DD-MM-YYYY')
hideLoader();
window.$('#modal-balance').modal('show');
window.$('#reports-section').animate({scrollTop: 0}, '300');
}
}
sNo++;
})
});
});
}
static generateTotalBalancePDF(headings, dataSet, selAccount, type) {
const doc = new jsPDF();
headings = headings.map(x => (x.title));
let img = new Image();
img.src = './assets/img/logo.png';
doc.addImage(img, 'PNG', 15, 10);
const inWidth = doc.internal.pageSize.width;
doc.setFontSize(10);
doc.setFontType('bold');
doc.text(15, 40, 'Account:');
doc.text((inWidth - 65), 40, 'Print on:');
doc.setFontType('normal');
doc.text(40, 40, selAccount.ac_id);
doc.text((inWidth - 35), 40, moment().format('DD-MM-YYYY'));
doc.autoTable({
margin: {top: 35},
head: [headings],
body: dataSet,
columnStyles: {
0: {halign: 'center', minCellWidth: 10},
1: {minCellWidth: 20},
2: {minCellWidth: 30},
3: {halign: 'center', minCellWidth: 25},
4: {halign: 'center', minCellWidth: 25}
},
headStyles: {fillColor: '#00a65a', fontSize: 10},
bodyStyles: {fontSize: 8}
});
if (type == 'wpdf') {
let blob = doc.output('datauristring');
ReportsController.send2server('pdf', blob).then(function (url) {
hideLoader()
url = "https://wa.me/?text=" + url;
shell.openExternal(url)
});
} else {
hideLoader()
doc.save(moment().format('YYYY-MM-DD') + ' ' + selAccount.name + '.pdf');
}
}
/**
* General function for all type of reports
* @param chartsOfAccounts
*/
static getSelect2Option(chartsOfAccounts) {
let form = document.getElementById('reports-form')
let accountID = form.querySelectorAll('select[name=account]')
let supplierID = form.querySelectorAll('select[name=supplier-accounts]')
let vehicleTypeID = form.querySelectorAll('select[name=vehicle-type]')
let totalBalanceID = form.querySelectorAll('select[name=carts-of-accounts-type]')
let vehicleType = [{id: 1, html: 'Purchase', text: 'Purchase'}, {id: 2, html: 'Sales', text: 'Sales'}];
let totalBalance = [];
Array.prototype.forEach.call(getChartOfAccountsLabels(), (type, i) => {
totalBalance.push({id: i, html: type, text: type})
})
Accounts.getAccounts().then(function (results) {
let pills = [], suppliers = [];
Array.prototype.forEach.call(results, (row) => {
pills.push(HtmlHelper.getObjectOfPill(row));
// Supplier
if (parseInt(row.type) == 2) {
suppliers.push(HtmlHelper.getObjectOfPill(row));
}
chartsOfAccounts[row.id] = row;
});
// Init Select 2
HtmlHelper.initSelect2Field(accountID, pills)
HtmlHelper.initSelect2Field(supplierID, suppliers)
HtmlHelper.initSelect2Field(vehicleTypeID, vehicleType)
HtmlHelper.initSelect2Field(totalBalanceID, totalBalance)
// Hide loader
hideLoader()
})
}
static generateXSL(type, headings, dataSet, selAccount, ty) {
let filename = moment().format('YYYY-MM-DD') + ' ';
let row1 = ['<NAME>', '', '', '', '', ''];
let row2 = ['Ac/Id: ', selAccount.ac_id, '', '', 'Name: ', selAccount.name];
let wsCols = [
{wpx: 50}, // "pixels"
{wpx: 120}, // "pixels"
{wpx: 300}, // "pixels"
{wpx: 120}, // "pixels"
{wpx: 120}, // "pixels"
{wpx: 180}, // "pixels"
];
if (type == 1 || type == 3) {
filename = filename + selAccount.name;
} else if (type == 2) {
filename = filename + ' Supplier ' + selAccount.name;
row1 = [];
row2 = [selAccount.ac_id, selAccount.name, ''];
wsCols = [
{wpx: 200}, // "pixels"
{wpx: 300}, // "pixels"
{wpx: 180}, // "pixels"
];
} else if (type == 4) {
filename = filename + selAccount.name;
row1 = [];
row2 = ['', '', selAccount.name + 's', '', ''];
wsCols = [
{wpx: 50}, // "pixels"
{wpx: 50}, // "pixels"
{wpx: 300}, // "pixels"
{wpx: 200}, // "pixels"
{wpx: 200}, // "pixels"
];
}
const workbook = XLSX.utils.book_new();
headings = headings.map(x => (x.title));
dataSet.unshift(headings);
dataSet.unshift([]);
dataSet.unshift([]);
dataSet.unshift(row2);
dataSet.unshift([]);
dataSet.unshift(row1);
const ws = XLSX.utils.aoa_to_sheet(dataSet);
ws['!cols'] = wsCols;
ws['!merges'] = [{s: {r: 0, c: 0}, e: {r: 0, c: 5}}];
XLSX.utils.book_append_sheet(workbook, ws, selAccount.ac_id);
const workbookOut = XLSX.write(workbook, {bookType: 'xlsx', type: 'binary'});
function s2ab(s) {
const buf = new ArrayBuffer(s.length); //convert s to arrayBuffer
let view = new Uint8Array(buf); //create uint8array as viewer
for (let i = 0; i < s.length; i++) view[i] = s.charCodeAt(i) & 0xFF; //convert to octet
return buf;
}
if (ty == 'wxsl') {
const workbookOut4Server = XLSX.write(workbook, {bookType: 'xlsx', type: 'base64'});
ReportsController.send2server('xsl', workbookOut4Server).then(function (url) {
hideLoader();
url = "https://wa.me/?text=" + url;
shell.openExternal(url);
});
} else {
hideLoader()
FileSaver.saveAs(new Blob([s2ab(workbookOut)], {type: "application/octet-stream"}), filename + '.xlsx');
}
}
static async send2server(type, data) {
return new Promise(function (release, reject) {
request({
uri: serverHost + '/api/save/whats/app',
method: 'POST',
form: {type: type, machineId: machine.machineIdSync(), whats_app_file: data},
}, function (error, response, body) {
if (!error && response.statusCode == 200) {
release(JSON.parse(body));
} else {
console.log(error, response, body);
release(1);
}
});
});
}
}
module.exports = ReportsController
|
/* * * * * * * * * * * * * *
* class LineGraph *
* * * * * * * * * * * * * */
class LineGraph{
constructor(parentElement, Data){
this.parentElement = parentElement;
this.data = Data;
this.displayData = [];
this.initVis()
}
initVis(){
let vis = this;
vis.margin = {top: 40, right: 100, bottom: 80, left: 100};
vis.width = $("#" + vis.parentElement).width() - vis.margin.left - vis.margin.right;
vis.height = $("#" + vis.parentElement).height() - vis.margin.top - vis.margin.bottom;
// init drawing area
vis.svg = d3.select("#" + vis.parentElement).append("svg")
.attr("width", vis.width + vis.margin.left + vis.margin.right)
.attr("height", vis.height + vis.margin.top + vis.margin.bottom)
.append('g')
.attr('transform', `translate (${vis.margin.left}, ${vis.margin.top})`);
// add clip path
vis.svg.append("clipPath")
.attr("id", "clip")
.append("rect")
.attr("x", 0)
.attr("y", 0)
.attr("width", vis.width)
.attr("height", vis.height)
// add title
vis.svg.append('text')
.attr('class', 'title line-title')
//.append('text')
.text('Title for Line Graph')
.attr('transform', `translate(${vis.width / 2}, 10)`)
.attr('text-anchor', 'middle');
// Scales
vis.x = d3.scaleTime()
.rangeRound([0, vis.width])
vis.y = d3.scaleLinear()
.range([vis.height, 0]);
// axis
vis.xAxis = d3.axisBottom()
.scale(vis.x);
vis.yAxis = d3.axisLeft()
.scale(vis.y);
vis.xAxisGroup = vis.svg.append("g")
.attr("class", "x-axis axis");
vis.yAxisGroup = vis.svg.append("g")
.attr("class", "y-axis axis");
// append tooltip
vis.tooltip = d3.select("body").append('div')
.attr('class', "tooltip")
this.wrangleData();
}
wrangleData(){
let vis = this;
vis.displayData = [];
vis.data.forEach(function(d){
d.Year = +d.Year;
d.Mean = +d.Mean;
if (d.Source === "GCAG" && d.Year > 1984) {
vis.displayData.push(d);
}
});
console.log(vis.displayData);
// code to filter data based on start and end date
// let start = selectedTimeRange[0];
// let end = selectedTimeRange[1];
// vis.helperData = vis.displayData.filter(function(d) {
// // date is between start and end
// return end >= d.Year && start <= d.Year ;
// });
//
// vis.displayData = vis.helperData;
vis.updateVis();
}
updateVis(){
let vis = this;
vis.svg.selectAll(".line").remove();
vis.svg.selectAll('.line-title')
.text("Global Annual Mean Temperature \"Anomalies\" in degrees Celsius relative to 20th century average")
//from" + selectedTimeRange[0] + " to " + selectedTimeRange[1])
vis.x.domain(d3.extent(vis.displayData, d => d.Year));
vis.y.domain(d3.extent(vis.displayData, d => d.Mean));
// Add the line
vis.svg.append("path")
.attr("clip-path", "url(#clip)")
.attr("class", "line")
.datum(vis.displayData)
.attr("fill", "none")
.attr("stroke", "steelblue")
.attr("stroke-width", 1.5)
.attr("d", d3.line()
.x(function(d) { return vis.x(d.Year) })
.y(function(d) { return vis.y(d.Mean) })
)
// circles
vis.circle = vis.svg.selectAll("circle")
.attr("clip-path", "url(#clip)")
.data(vis.displayData);
vis.circle.enter().append("circle")
.attr("class", "circle")
.merge(vis.circle)
.style("fill", "steelblue")
.on('mouseover', mouseover)
.on('mouseout', mouseout)
.transition()
.duration(1000)
.attr("r", 3)
.attr("cx", d => vis.x(d.Year))
.attr("cy", d => vis.y(d.Mean))
vis.circle.exit().remove();
function mouseover(event, d){
d3.select(this)
.attr('stroke-width', '2px')
.attr('stroke', 'black')
.attr('fill', 'black')
vis.tooltip
.style("opacity", 1)
.style("left", event.pageX + 20 + "px")
.style("top", event.pageY + "px")
.html(`
<div style="border: thin solid grey; border-radius: 5px; background: lightgrey; padding: 20px">
<h3>${d.Year}<h3>
<h4>Mean: ${d.Mean}</h4>
</div>`);
}
function mouseout(event, d) {
d3.select(this)
.attr('stroke-width', '2px')
.attr('stroke', 'steelblue')
.attr("fill", "steelblue");
vis.tooltip
.style("opacity", 0)
.style("left", 0)
.style("top", 0)
.html(``);
};
vis.svg.select(".x-axis")
.attr("transform", "translate(0," + vis.height + ")")
.transition()
.duration(1000)
.call(vis.xAxis)
// .selectAll("text")
// .attr("font-size", 10)
// .attr("transform", "translate (0,6) rotate(-22)");
vis.svg.select(".y-axis")
.transition()
.duration(1000)
.call(vis.yAxis);
}
} |
<gh_stars>1-10
import React from 'react';
import block from 'bem-cn';
import { bind } from 'decko';
import { IMenuEntry, Menu } from 'shared/view/components';
import { Icon } from 'shared/view/elements';
import ToolbarButton from '../ToolbarButton/ToolbarButton';
import './IndicatorsMenu.scss';
const b = block('indicators-menu');
interface IProps {
kind: 'trading-view' | 'stockChart-x';
onMenuEntryClick?(indicator: string): void;
onButtonClick?(): void;
}
class IndicatorsMenu extends React.PureComponent<IProps> {
private menuIndicatorEntriesSections: IMenuEntry[][] = [
[{ content: 'MACD', onClick: () => this.handleMenuEntryClick('CustomMACD') }],
[{ content: 'Volume indicator', onClick: () => this.handleMenuEntryClick('ColoredVolume') }],
[{ content: 'Simple Moving Average ', onClick: () => this.handleMenuEntryClick('SMA') }],
];
public render() {
const { kind } = this.props;
return (
<div className={b()}>
{
kind === 'stockChart-x'
? (
<Menu entriesSections={this.menuIndicatorEntriesSections} menuPosition="right">
{this.renderToolbarButton()}
</Menu>
) : this.renderToolbarButton()
}
</div>
);
}
@bind
private renderToolbarButton() {
const { onButtonClick } = this.props;
return (
<ToolbarButton title="Add Indicators..." onClick={onButtonClick}>
<div className={b('content')()}>
<Icon className={b('icon')()} src={require('./images/indicators-inline.svg')} />
</div>
</ToolbarButton>
);
}
@bind
private handleMenuEntryClick(indicator: string) {
const { onMenuEntryClick } = this.props;
if (onMenuEntryClick) {
onMenuEntryClick(indicator);
}
}
}
export default IndicatorsMenu;
|
import random
def generate_random_ints():
random_ints = []
for _ in range(10):
random_int = random.randint(1, 1000)
while random_int in random_ints:
random_int = random.randint(1, 1000)
random_ints.append(random_int)
return random_ints |
#!/usr/bin/env bash
f1=$( make_output_file --prefix 'xyz' --channel 'ABC' )
assert_not_empty "${f1}"
assert_is_file "${f1}"
f2=$( make_output_file --prefix 'xyz' --channel 'ABC' )
assert_not_empty "${f2}"
assert_is_file "${f2}"
assert_equals "${f1}" "${f2}"
append_output
assert_success $?
before_fs=$( __calculate_filesize "${f1}" )
append_output --data "Hello World"
assert_success $?
after_fs=$( __calculate_filesize "${f1}" )
assert_not_equals "${before_fs}" "${after_fs}"
detail "<${before_fs}> -- <${after_fs}>"
result=$( append_output --data "Hello World" --channel STDOUT )
assert_success $?
assert_equals "Hello World" "${result}"
detail "${result}"
before_fs=$( __calculate_filesize "${f1}" )
result=$( append_output --data 'Hello World 2.0' --raw )
assert_success $?
after_fs=$( __calculate_filesize "${f1}" )
assert_not_equals "${before_fs}" "${after_fs}"
append_output --data "Hello World" --channel 'ABC' --marker 'HELLO'
assert_success $?
result=$( \cat "${f1}" )
assert_not_equals "Hello World" "${result}"
detail "${result}"
inputs=$( \ls -1 )
append_output --data "${inputs}" --channel 'ABC' --marker 'LISTING'
assert_success $?
result=$( \cat "${f1}" )
assert_not_equals "Hello World" "${result}"
detail "${result}"
#assert_greater_equal $( get_number_output_files ) 1
if [ -n "${SLCF_DETAIL}" ] && [ "${SLCF_DETAIL}" -ne "${NO}" ]
then
printf "%s\n" "========"
\cat "${f1}"
fi
__cleanup_filemgr
|
#!/usr/bin/env bash
echo "PROVISION MACHINE $HOSTNAME"
echo "Install python3-pip for ansible"
sudo apt-get update
sudo apt-get --assume-yes install python3-pip
echo "Write sshd_config"
sudo sed -i 's/PasswordAuthentication no/PasswordAuthentication yes/' /etc/ssh/sshd_config
sudo systemctl restart sshd
|
#!/bin/sh
# Use this to check out the first track in a particular mitigation sequence
# ./checkout-lesson.sh sql-injection
git config branch.$1.description
git checkout `git rev-list master..$1 | tail -1`~1 > /dev/null 2>&1
|
#!/usr/bin/env bash
rm -rf ../../app/assets/php/Framework/routes.db
rm -rf ../../app/uploads
rm -rf ../../app/assets/img/uploads
|
def generateErrorPage(statusCode, errorMessage):
errorPage = f'''
<div class="container">
<center>
<h1 class="display-1">{statusCode}</h1>
<h2>{errorMessage}</h2>
<hr width="50px">
<p>Something went wrong. Please contact web master</p>
</center>
</div>
'''
return errorPage |
//
// ZAEInterActiveGroupVipUserModel.h
// JKPresentationControllerDemo
//
// Created by xuequan on 2020/10/14.
// Copyright © 2020 xuequan. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface JKInterActiveGroupVipUserModel : NSObject
@property (nonatomic, assign) NSInteger userID; ///< 用户ID
@property (nonatomic, copy) NSString *nickname; ///< 昵称
@property (nonatomic, copy) NSString *avatar; ///< 头像
@end
NS_ASSUME_NONNULL_END
|
#!/bin/bash
# Cause the entire deployment to fail if something in this script exits with
# a non-zero exit code. This will make debugging your deployment much simpler.
# Read more about this here: http://redsymbol.net/articles/unofficial-bash-strict-mode/
set -euo pipefail
# Make directory for project
mkdir -p /home/datamade/just-spaces
# Decrypt files encrypted with blackbox
cd /opt/codedeploy-agent/deployment-root/$DEPLOYMENT_GROUP_ID/$DEPLOYMENT_ID/deployment-archive/ && chown -R datamade.datamade . && sudo -H -u datamade blackbox_postdeploy
|
<reponame>k-dominik/nifty<filename>src/python/lib/cgp/geometry.cxx
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
#include <pybind11/stl.h>
#include "export_cell_vector.hxx"
#include "nifty/python/converter.hxx"
#include "nifty/cgp/geometry.hxx"
namespace py = pybind11;
namespace nifty{
namespace cgp{
template<
size_t DIM,
size_t CELL_TYPE,
class CLS
>
void exportCellGeometryT(py::module & m, py::class_<CLS> & pyCls) {
pyCls
//.def("__len__", &CLS::size)
.def("__len__", [](const CLS & self){
return self.size();
})
.def("__getitem__", [](const CLS & self, uint32_t i){
return self[i];
})
.def("centerOfMass",&CLS::centerOfMass)
.def("__array__", [](const CLS & self){
nifty::marray::PyView<uint32_t> out({size_t(self.size()),size_t(DIM)});
for(size_t i=0; i<self.size(); ++i){
const auto & coord = self[i];
for(size_t d=0; d<DIM; ++d){
out(i, d) = coord[d];
}
}
return out;
})
;
}
template<
class CLS
>
inline void exportCellGeometryVector(pybind11::module & m, pybind11::class_<CLS> & pyCls) {
exportCellVector<CLS>(m, pyCls);
pyCls
.def("centersOfMass", [](const CLS & self){
nifty::marray::PyView<float> cArray({self.size(), CLS::value_type::DimensionType::value});
for(auto i=0; i<self.size(); ++i){
const auto com = self[i].centerOfMass();
for(auto d=0; d<CLS::value_type::DimensionType::value; ++d){
cArray(i, d) = com[d];
}
}
return cArray;
})
;
}
void exportGeometry2D(py::module & m) {
// cell 0 geometry
{
typedef CellGeometry<2,0> Cell0Geometry2D;
typedef CellGeometryVector<2,0> Cells0GeometryVector2D;
const std::string clsName = std::string("Cells0Geometry2D");
auto cls = py::class_<Cell0Geometry2D>(m, clsName.c_str());
exportCellGeometryT<2, 0, Cell0Geometry2D>(m, cls);
const std::string clsNameVec = std::string("Cells0GeometryVector2D");
auto clsVec = py::class_<Cells0GeometryVector2D>(m, clsNameVec.c_str());
exportCellGeometryVector<Cells0GeometryVector2D>(m, clsVec);
}
// cell 1 geometry
{
typedef CellGeometry<2,1> Cell1Geometry2D;
typedef CellGeometryVector<2,1> Cell1GeometryVector2D;
const std::string clsName = std::string("Cell1Geometry2D");
auto cls = py::class_<Cell1Geometry2D>(m, clsName.c_str());
exportCellGeometryT<2, 1, Cell1Geometry2D>(m, cls);
const std::string clsNameVec = std::string("Cell1GeometryVector2D");
auto clsVec = py::class_<Cell1GeometryVector2D>(m, clsNameVec.c_str());
exportCellGeometryVector<Cell1GeometryVector2D>(m, clsVec);
}
// cell 2 geometry
{
typedef CellGeometry<2,2> Cell1Geometry2D;
typedef CellGeometryVector<2,2> Cell1GeometryVector2D;
const std::string clsName = std::string("Cell2Geometry2D");
auto cls = py::class_<Cell1Geometry2D>(m, clsName.c_str());
exportCellGeometryT<2, 2, Cell1Geometry2D>(m, cls);
const std::string clsNameVec = std::string("Cell2GeometryVector2D");
auto clsVec = py::class_<Cell1GeometryVector2D>(m, clsNameVec.c_str());
exportCellGeometryVector<Cell1GeometryVector2D>(m, clsVec);
}
// geometry
{
typedef TopologicalGrid<2> TopologicalGridType;
typedef Geometry<2> GeometryType;
const std::string clsName = std::string("Geometry2D");
py::class_<GeometryType>(m, clsName.c_str())
.def(py::init<const TopologicalGridType &, const bool, const bool>(),
py::arg("topologicalGrid"),
py::arg("fill"),
py::arg("sort1Cells")
)
.def("cell0Geometry",[](const GeometryType & self){
return self. template geometry<0>();
},py::return_value_policy::reference_internal)
.def("cell1Geometry",[](const GeometryType & self){
return self. template geometry<1>();
},py::return_value_policy::reference_internal)
.def("cell2Geometry",[](const GeometryType & self){
return self. template geometry<2>();
},py::return_value_policy::reference_internal)
;
}
}
void exportGeometry(py::module & m) {
exportGeometry2D(m);
}
}
}
|
#!/bin/sh
# Entrypoint
#
# Runs pre-flight logic before launching into uptimer
#
set -e
# When the instance is launched as a postgres writer, it is assumed to have
# permissions to update the schema. Thus run migrations before launch.
case "$WRITER_PLUGIN" in
*postgres*)
dbmate --wait --no-dump-schema up
esac
exec python -m uptimer
|
#!/bin/bash
# ------------------------------------------------------------------
# [Masaya Ogushi] Elastic Search
#
# library for Unix shell scripts.
#
# Usage:
# sh shell/elastic_search.sh [KEYWORD]
# If you use the container, you use the --link option
# docker run --link {elasticsearch running continaer id} -it docker_dialogue/dialogue bash
# Confirm the other ip address
# docker inspect {your container id} | grep IPAddress
# ------------------------------------------------------------------
# -- Body ---------------------------------------------------------
# SCRIPT LOGIC GOES HERE
if [ $# -ne 2 ]; then
echo "$0 [HOST (localhost or IP address or domain)] [KEYWORD]"
exit 1
fi
QUERY=$1:9200/_all/_search?pretty
DOUBLE_QUOTE="\""
KEYWORD=${DOUBLE_QUOTE}${2}${DOUBLE_QUOTE}
echo $HOST
curl -XGET $QUERY -d'
{
"query": {
"bool": {
"should": [
{
"match": {
"title": {
"query": '$KEYWORD',
"boost": 10
}
}
},
{
"match": {
"abstract": '$KEYWORD'
}
}
]
}
}
}'
# ----------------------------------------------------------------- |
<gh_stars>1-10
var _ = require('lodash'),
defaultPostOptions = {};
class Channel {
constructor(name, options) {
// Set the name
this.name = name;
// Store the originally passed in options
this._origOptions = _.cloneDeep(options) || {};
// Setup our route
// @TODO should a channel have a route as part of the object? Or should this live elsewhere?
this.route = this._origOptions.route ? this.translateRoute(this._origOptions.route) : '/';
// Define context as name, plus any additional contexts, and don't allow duplicates
this.context = _.union([this.name], this._origOptions.context);
// DATA options
// Options for fetching related posts
this.postOptions = _.defaults({}, defaultPostOptions, this._origOptions.postOptions);
// RESOURCE!!!
// @TODO figure out a better way to declare relation to resource
if (this._origOptions.data) {
this.data = this._origOptions.data;
}
// Template options
// @TODO fix these HORRIBLE names
this.slugTemplate = !!this._origOptions.slugTemplate;
if (this._origOptions.frontPageTemplate) {
this.frontPageTemplate = this._origOptions.frontPageTemplate;
}
if (this._origOptions.editRedirect) {
this.editRedirect = this._origOptions.editRedirect;
}
}
get isPaged() {
return _.has(this._origOptions, 'paged') ? this._origOptions.paged : true;
}
get hasRSS() {
return _.has(this._origOptions, 'rss') ? this._origOptions.rss : true;
}
translateRoute(route) {
const routeKeywords = {
tag: 'tag',
author: 'author'
};
// @TODO find this a more general / global home, as part of the Router system,
// so that ALL routes that get registered WITH Ghost can do this
return route.replace(/:t_([a-zA-Z]+)/, function (fullMatch, keyword) {
return routeKeywords[keyword];
});
}
}
module.exports = Channel;
|
<reponame>Yuheng7/biblioteca3<gh_stars>0
package com.twu.biblioteca;
import java.io.PrintStream;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
public class BibliotecaApp {
Menu menu;
ArrayList<Book> books;
private PrintStream ps;
InputTaker asker;
public BibliotecaApp(ArrayList<Book> books, PrintStream ps, InputTaker asker) {
this.books = books;
this.menu = new Menu(books, ps);
this.ps = ps;
this.asker = asker;
}
public void printWelcome(PrintStream printStream) {
this.ps.print("Welcome to Biblioteca. Your one-stop-shop for great book titles in Bagalore!\n");
}
public void beginMenu() {
boolean running = true;
while (running == true) {
this.menu.printOptions();
running = this.menu.carryOutUserOrder();
}
this.books = this.menu.returnUpdatedBookList();
}
}
|
<reponame>JayceChant/commit-msg.go<filename>lang/chinese.go
package lang
import (
. "github.com/JayceChant/commit-msg/state"
)
var (
langZhCn = &langPack{
Hints: map[State]string{
Validated: "Validated: 提交信息符合规范。",
Merge: "Merge: 合并提交,跳过规范检查。",
ArgumentMissing: "Error ArgumentMissing: 缺少文件参数。",
FileMissing: "Error FileMissing: 文件 %s 不存在。",
ReadError: "Error ReadError: 读取 %s 错误。",
EmptyMessage: "Error EmptyMessage: 提交信息没有内容(不包括空白字符)。",
EmptyHeader: "Error EmptyHeader: 标题(第一行)没有内容(不包括空白字符)。",
BadHeaderFormat: `Error BadHeaderFormat: 标题(第一行)不符合规范:
%s
如果您无法发现错误,请注意是否使用了中文冒号,或者冒号后面缺少空格。`,
WrongType: "Error WrongType: %s, 类型关键字应为以下选项中的一个:\n%s",
ScopeMissing: "Error ScopeMissing: 类型后面缺少'(scope)'。",
WrongScope: "Error WrongScope: %s, 范围关键字应为以下选项中的一个:\n%s",
BodyMissing: "Error BodyMissing: 消息体没有内容(不包括空白字符)。",
NoBlankLineBeforeBody: "Error NoBlankLineBeforeBody: 标题和消息体之间缺少空行。",
LineOverLong: "Error LineOverLong: 该行长度为 %d, 超出了 %d 的限制:\n%s",
UndefindedError: "Error UndefindedError: 没有预料到的错误,请提交一个错误报告。",
},
Rule: `提交信息规范如下:
<type>(<scope>): <subject>
// 空行
<body>
// 空行
<footer>
(<scope>), <body> 和 <footer> 默认可选,也可以在配置设置必选
<type> 必须是关键字 %s 之一
更多信息,请参考项目主页: https://github.com/JayceChant/commit-msg`,
}
)
|
import React from 'react';
import Navbar from '../components/Navbar';
import IntroSlider from '../containers/IntroSlider/IntroSlider';
import Footer from '../components/Footer/Footer';
import '../style/MainPage.css';
export default function MainPage() {
return(
<>
<Navbar/>
<IntroSlider/>
<Footer/>
</>
);
} |
const updateProfile = async ({ commit, dispatch, state }, userProfile) => {
const { displayName } = userProfile
const userSearch = new User(query)
try {
const user = await userSearch.get()
commit('LOADED', [user])
} catch (error) {
commit('LOADED', [])
}
}
export default {
updateProfile
}
|
package com.iovation.launchkey.sdk;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iovation.launchkey.sdk.cache.Cache;
import com.iovation.launchkey.sdk.client.DirectoryFactory;
import com.iovation.launchkey.sdk.client.OrganizationFactory;
import com.iovation.launchkey.sdk.client.ServiceFactory;
import com.iovation.launchkey.sdk.crypto.JCECrypto;
import com.iovation.launchkey.sdk.crypto.jwe.Jose4jJWEService;
import com.iovation.launchkey.sdk.crypto.jwt.Jose4jJWTService;
import com.iovation.launchkey.sdk.transport.Transport;
import com.iovation.launchkey.sdk.transport.apachehttp.ApacheHttpTransport;
import com.iovation.launchkey.sdk.transport.domain.EntityIdentifier;
import com.iovation.launchkey.sdk.transport.domain.EntityIdentifier.EntityType;
import com.iovation.launchkey.sdk.transport.domain.EntityKeyMap;
import org.apache.http.client.HttpClient;
import java.security.Provider;
import java.security.interfaces.RSAPrivateKey;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
/**
* Factory for building {@link ServiceFactory},
* {@link DirectoryFactory}, and
* {@link OrganizationFactory} objects.
*/
public class FactoryFactory {
private final HttpClient httpClient;
private final Cache keyCache;
private final Provider provider;
private final String apiBaseURL;
private final String apiIdentifier;
private final int requestExpireSeconds;
private final int offsetTTL;
private final int currentPublicKeyTTL;
private final EntityKeyMap entityKeyMap;
/**
* @param provider JCE provider
* @param httpClient HTTP client
* @param keyCache Caching for public keys from LaunchKey API
* @param apiBaseURL Base URL for the Platform API
* @param apiIdentifier JWT identifier for the API. Used to send requests with the proper ID and validate
* responses and server sent events.
* @param requestExpireSeconds The number of seconds until a request JWT should expire.
* @param offsetTTL The number of seconds the API time offset will live before obtaining another using a ping call.
* @param currentPublicKeyTTL The number of seconds to current public key as reported by a public key call will
* live before obtaining the value again from the API.
* @param entityKeyMap Mapping of entity private keys to allow for parsing Server Sent Events from entities
*/
public FactoryFactory(
Provider provider, HttpClient httpClient, Cache keyCache,
String apiBaseURL, String apiIdentifier, int requestExpireSeconds,
int offsetTTL, int currentPublicKeyTTL, EntityKeyMap entityKeyMap) {
this.provider = provider;
this.httpClient = httpClient;
this.apiBaseURL = apiBaseURL;
this.apiIdentifier = apiIdentifier;
this.requestExpireSeconds = requestExpireSeconds;
this.keyCache = keyCache;
this.offsetTTL = offsetTTL;
this.currentPublicKeyTTL = currentPublicKeyTTL;
this.entityKeyMap = entityKeyMap;
}
public ServiceFactory makeServiceFactory(String serviceId, String privateKeyPEM) {
RSAPrivateKey privateKey = makePrivateKeyFromPEM(privateKeyPEM);
String publicKeyFingerprint = getPublicKeyFingerprintFromPrivateKey(privateKey);
Map<String, RSAPrivateKey> keys = new ConcurrentHashMap<>();
keys.put(publicKeyFingerprint, privateKey);
return makeServiceFactory(serviceId, keys, publicKeyFingerprint);
}
public ServiceFactory makeServiceFactory(String serviceId, Map<String, RSAPrivateKey> privateKeys, String currentPrivateKey) {
UUID serviceUUID = UUID.fromString(serviceId);
EntityIdentifier serviceEntity = new EntityIdentifier(EntityType.SERVICE, serviceUUID);
Transport transport = getTransport(serviceEntity, privateKeys, currentPrivateKey);
return new ServiceFactory(transport, serviceUUID);
}
public DirectoryFactory makeDirectoryFactory(String directoryId, String privateKeyPEM) {
RSAPrivateKey privateKey = makePrivateKeyFromPEM(privateKeyPEM);
String publicKeyFingerprint = getPublicKeyFingerprintFromPrivateKey(privateKey);
Map<String, RSAPrivateKey> keys = new ConcurrentHashMap<>();
keys.put(publicKeyFingerprint, privateKey);
return makeDirectoryFactory(directoryId, keys, publicKeyFingerprint);
}
public DirectoryFactory makeDirectoryFactory(String directoryId, Map<String, RSAPrivateKey> privateKeys,
String currentPrivateKey) {
if (directoryId == null) throw new IllegalArgumentException("Argument directoryId cannot be null");
UUID directoryUUID = UUID.fromString(directoryId);
EntityIdentifier directoryEntity = new EntityIdentifier(EntityType.DIRECTORY, directoryUUID);
Transport transport = getTransport(directoryEntity, privateKeys, currentPrivateKey);
return new DirectoryFactory(transport, directoryUUID);
}
public synchronized OrganizationFactory makeOrganizationFactory(String organizationId, String privateKeyPEM) {
RSAPrivateKey privateKey = makePrivateKeyFromPEM(privateKeyPEM);
String publicKeyFingerprint = getPublicKeyFingerprintFromPrivateKey(privateKey);
Map<String, RSAPrivateKey> keys = new ConcurrentHashMap<>();
keys.put(publicKeyFingerprint, privateKey);
return makeOrganizationFactory(organizationId, keys, publicKeyFingerprint);
}
public synchronized OrganizationFactory makeOrganizationFactory(String organizationId,
Map<String, RSAPrivateKey> privateKeys,
String currentPrivateKey) {
if (organizationId == null) throw new IllegalArgumentException("Argument organizationId cannot be null");
UUID organizationUUID = UUID.fromString(organizationId);
EntityIdentifier organizationEntity = new EntityIdentifier(EntityType.ORGANIZATION, organizationUUID);
Transport transport = getTransport(organizationEntity, privateKeys, currentPrivateKey);
return new OrganizationFactory(transport, organizationUUID);
}
private Transport getTransport(
EntityIdentifier entityIdentifier, Map<String, RSAPrivateKey> privateKeys, String currentPrivateKeyId) {
for (Map.Entry<String, RSAPrivateKey> entry : privateKeys.entrySet()) {
entityKeyMap.addKey(entityIdentifier, entry.getKey(), entry.getValue());
}
return new ApacheHttpTransport(
httpClient,
new JCECrypto(provider),
getObjectMapper(),
keyCache,
apiBaseURL,
entityIdentifier,
new Jose4jJWTService(apiIdentifier, privateKeys, currentPrivateKeyId, requestExpireSeconds),
new Jose4jJWEService(privateKeys.get(currentPrivateKeyId)),
offsetTTL,
currentPublicKeyTTL,
entityKeyMap
);
}
private ObjectMapper getObjectMapper() {
return new ObjectMapper();
}
private RSAPrivateKey makePrivateKeyFromPEM(String privateKeyPEM) {
return JCECrypto.getRSAPrivateKeyFromPEM(provider, privateKeyPEM);
}
private String getPublicKeyFingerprintFromPrivateKey(RSAPrivateKey privateKey) {
return JCECrypto.getRsaPublicKeyFingerprint(provider, privateKey);
}
}
|
#!/bin/bash
# To run all in a folder tests/
# To run all in a file tests/test_foo.py
# To run all in a class tests/test_foo.py::TestFoo
# To run a single test tests/test_foo.py::TestFoo::test_foo
cd ..
export PICCOLO_CONF="tests.sqlite_conf"
python -m pytest --cov=piccolo -s $@
|
docker run --rm \
--name vavr-postgres \
-e POSTGRES_PASSWORD=secret \
-e POSTGRES_USER=user \
-e POSTGRES_DB=db \
-p 5423:5432 \
-v $(pwd)/datapg:/var/lib/postgresql/data \
-d \
postgres:13.4-alpine3.14
|
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (c) 2017-2020 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
class SnappyStreamingQueryListener extends StreamingQueryListener {
private val streamingRepo = StreamingRepository.getInstance
override def onQueryStarted(event: StreamingQueryListener.QueryStartedEvent): Unit = {
val queryName = {
if (event.name == null || event.name.isEmpty) {
event.id.toString
} else {
event.name
}
}
streamingRepo.addQuery(event.id,
new StreamingQueryStatistics(
event.id,
queryName,
event.runId,
System.currentTimeMillis(),
event.triggerInterval))
}
override def onQueryProgress(event: StreamingQueryListener.QueryProgressEvent): Unit = {
streamingRepo.updateQuery(event.progress)
}
override def onQueryTerminated(event: StreamingQueryListener.QueryTerminatedEvent): Unit = {
streamingRepo.setQueryStatus(event.id, false)
}
}
|
package net.member;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import net.bbs.BbsDTO;
import DBPKG.DBOpen;
public class MemberDAO {
//로그인
public String loginProc(MemberDTO dto){
String mlevel=null;
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
sql.append(" SELECT mlevel ");
sql.append(" FROM member ");
sql.append(" WHERE id=? AND passwd=? ");
sql.append(" AND mlevel in ('A1', 'B1', 'C1', 'D1') ");
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1, dto.getId());
pstmt.setString(2, dto.getPasswd());
ResultSet rs=pstmt.executeQuery();
if(rs.next()){
mlevel=rs.getString("mlevel");
}else{
mlevel=null;
}
}catch(Exception e){
System.out.println("로그인 실패 : "+e);
}
return mlevel;
}//loginProc
//회원가입 아이디 중복확인
public int duplecateID(String id){
int cnt=0;
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
//가상의 테이블명을 안 정해주면 오류
sql.append(" SELECT COUNT(id) as cnt ");
sql.append(" FROM member ");
sql.append(" WHERE id=? ");
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1, id);
ResultSet rs=pstmt.executeQuery();
if(rs.next()){
cnt=rs.getInt("cnt"); //여기서 사용해야 하니까
}
}catch(Exception e){
System.out.println("ID 중복확인 실패 : "+e);
}
return cnt;
}//duplecateID
//회원가입 이메일 중복확인
public int duplecateEmail(String email){
int cnt=0;
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
//가상의 테이블명을 안 정해주면 오류
sql.append(" SELECT COUNT(email) as cnt ");
sql.append(" FROM member ");
sql.append(" WHERE email=? ");
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1, email);
ResultSet rs=pstmt.executeQuery();
if(rs.next()){
cnt=rs.getInt("cnt"); //여기서 사용해야 하니까
}
}catch(Exception e){
System.out.println("Email 중복확인 실패 : "+e);
}
return cnt;
}//duplecateEmail
//회원가입 등록
public int insert(MemberDTO dto){
int cnt=0;
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
sql.append(" INSERT INTO member(id, passwd, mname, tel, email, zipcode, address1, address2, job, mlevel, mdate) ");
sql.append(" VALUES(?,?,?,?,?,?,?,?,?,'D1',SYSDATE) ");
//4)SQL문 변환
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1, dto.getId());
pstmt.setString(2, dto.getPasswd());
pstmt.setString(3, dto.getMname());
pstmt.setString(4, dto.getTel());
pstmt.setString(5, dto.getEmail());
pstmt.setString(6, dto.getZipcode());
pstmt.setString(7, dto.getAddress1());
pstmt.setString(8, dto.getAddress2());
pstmt.setString(9, dto.getJob());
//5)SQL문 실행
cnt=pstmt.executeUpdate();
}catch(Exception e){
System.out.println("행추가 실패"+e);
}//try
return cnt;
}//insert
//회원정보 수정 읽어오는 목록
public MemberDTO read(MemberDTO dto){
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
sql.append(" SELECT id, passwd, mname, email, tel, zipcode, address1, address2, job ");
sql.append(" FROM member ");
sql.append(" WHERE id=? AND passwd=? ");
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1, dto.getId());
pstmt.setString(2, dto.getPasswd());
ResultSet rs=pstmt.executeQuery();
if(rs.next()){
dto=new MemberDTO();
dto.setId(rs.getString("id"));
dto.setPasswd(<PASSWORD>"));
dto.setMname(rs.getString("mname"));
dto.setEmail(rs.getString("email"));
dto.setTel(rs.getString("tel"));
dto.setZipcode(rs.getString("zipcode"));
dto.setAddress1(rs.getString("address1"));
dto.setAddress2(rs.getString("address2"));
dto.setJob(rs.getString("job"));
}else{
dto=null;
}//if
}catch(Exception e){
System.out.println("회원정보 불러오기 실패 : "+e);
}//try
return dto;
}//read
//회원정보 수정
public int update(MemberDTO dto){
int cnt=0;
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
sql.append(" UPDATE member ");
sql.append(" SET passwd=?, mname=?, tel=?, email=?, zipcode=?, address1=?, address2=?, job=? ");
sql.append(" WHERE id=? ");
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1,dto.getPasswd());
pstmt.setString(2,dto.getMname());
pstmt.setString(3,dto.getTel());
pstmt.setString(4,dto.getEmail());
pstmt.setString(5,dto.getZipcode());
pstmt.setString(6,dto.getAddress1());
pstmt.setString(7,dto.getAddress2());
pstmt.setString(8,dto.getJob());
pstmt.setString(9,dto.getId());
cnt=pstmt.executeUpdate();
}catch(Exception e){
System.out.println("회원정보 수정 실패 : "+e);
}//try
return cnt;
}//update
//회원탈퇴지만 mlevel을 F1으로 조정하기
public int delUpdate(MemberDTO dto){
int cnt=0;
try{
Connection con=DBOpen.getConnection();
StringBuilder sql=new StringBuilder();
sql.append(" UPDATE member ");
sql.append(" SET mlevel='F1' ");
sql.append(" WHERE id=? AND passwd=? ");
PreparedStatement pstmt=con.prepareStatement(sql.toString());
pstmt.setString(1, dto.getId());
pstmt.setString(2, dto.getPasswd());
cnt=pstmt.executeUpdate();
}catch(Exception e){
System.out.println("회원탈퇴 실패 : "+e);
}//try
return cnt;
}//delUpdate
}//
|
package elasta.orm;
import elasta.core.promise.impl.Promises;
/**
* Created by sohan on 3/17/2017.
*/
public class Main {
public static void main(String[] asfd) {
Promises.empty().filter(o -> true).then(o -> System.out.println("passed"));
}
}
|
<reponame>tetutaro/ls_pyenv
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from typing import List, Dict
import os
import sys
import subprocess
from collections import defaultdict
from argparse import ArgumentParser
def search_python_version_file(directory: str) -> List[str]:
'''search .python-version file under the indicated directory
'''
if not os.path.exists(directory):
raise ValueError(f'{directory} is not exists')
if not os.path.isdir(directory):
raise ValueError(f'{directory} is not directory')
cmd = f'find {directory} -name .python-version -print'.split()
raw_vfiles = subprocess.run(
cmd, encoding='utf-8', stdout=subprocess.PIPE
).stdout.splitlines()
vfiles = list()
for rv in raw_vfiles:
srv = rv.strip()
if len(srv) == 0:
continue
if os.path.isfile(srv):
vfiles.append(srv)
if len(vfiles) == 0:
raise ValueError(f'no Python project under {directory}')
return vfiles
def read_python_version_file(vfiles: List[str]) -> Dict[str, str]:
'''read .python-version file and get the name of Python virtualenv
'''
vdict = dict()
for vf in vfiles:
vdir = os.path.dirname(vf)
with open(vf, 'rt') as rf:
vname = rf.readline().strip()
vdict[vdir] = vname
return vdict
def print_python_versions(vdict: Dict[str, str]) -> None:
common_dir = None
vcount = defaultdict(list)
# extract common_dir
for vdir in vdict.keys():
if common_dir is None:
common_dir = vdir.split(os.sep)
continue
vpath = vdir.split(os.sep)
for i, cp in enumerate(common_dir):
if len(vpath) > i and common_dir[i] == vpath[i]:
continue
else:
common_dir = common_dir[:i]
break
# print out
print(f'Search under the directory: {os.sep.join(common_dir)}')
for vdir, vname in sorted(vdict.items()):
vtdir = os.sep.join(vdir.split(os.sep)[len(common_dir):])
if vtdir == '':
vtdir = '.'
vcount[vname].append(vtdir)
for vname, vtdirs in sorted(vcount.items()):
print(f'{vname} ({len(vtdirs)}):')
for vtd in sorted(vtdirs):
print(f' {vtd}')
return
def main() -> None:
parser = ArgumentParser()
parser.add_argument(
'-d', '--directory', default='.', type=str,
help=(
'the directory you search .python-version files recursively.'
' default: "." (current working directory)'
)
)
args = parser.parse_args()
try:
vfiles = search_python_version_file(**vars(args))
except Exception as e:
print(e)
sys.exit(1)
vdict = read_python_version_file(vfiles=vfiles)
print_python_versions(vdict=vdict)
return
if __name__ == '__main__':
main()
|
#include <bits/stdc++.h>
using namespace std;
int main()
{
ios_base::sync_with_stdio(false);
cin.tie(NULL);
int t ;
cin >> t;
while(t--){
string s;
cin >> s;
}
return 0;
} |
<reponame>crainiarc/yotacast<gh_stars>1-10
package com.yotadevices.sdk.exception;
import android.util.AndroidRuntimeException;
/**
* This exception will be thrown if super class of BSActiviy's functions is not called
*/
public final class SuperNotCalledException extends AndroidRuntimeException {
public SuperNotCalledException(String msg) {
super(msg);
}
}
|
<filename>src/libs/axios.js
import axios from 'axios'
// import store from '@/store'
import {getToken, setToken,removeToken} from '@/libs/util'
import router from '../router'
import config from '../router'
import {Message} from 'iview'
let message = null
class HttpRequest {
constructor(baseUrl = baseURL) {
this.baseUrl = baseUrl
this.queue = {}
}
getInsideConfig() {
const config = {
baseURL: this.baseUrl,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'
}
}
return config
}
destroy(url) {
delete this.queue[url]
if (!Object.keys(this.queue).length) {
// Spin.hide()
}
}
interceptors(instance, url) {
// 请求拦截
instance.interceptors.request.use(config => {
// 接口请求增加token
config.headers.Authorization = 'Bearer ' + getToken()
// 添加全局的loading...
if (!Object.keys(this.queue).length) {
// Spin.show() // 不建议开启,因为界面不友好
}
this.queue[url] = true
return config
}, error => {
return Promise.reject(error)
})
// 响应拦截
instance.interceptors.response.use(res => {
this.destroy(url)
const {data, status} = res
return {data, status}
}, error => {
this.destroy(url)
if (error == "Error: Network Error") {
setToken('')
if (message === null) {
message = Message.info({
content: '后端服务未启动',
duration: 3,
closable: true,
onClose: () => {
message = null
},
})
}
router.push({
name: config.homeName
}).catch(() => {
})
}
let errorInfo = error.response
if (errorInfo.data.code == 401) {
setToken('')
router.push({
name: config.homeName
}).catch(() => {
})
}
if (errorInfo.data.code == 405 && url == "/users/info") {
removeToken()
Message.info({
content: '请联系管理员分配用户权限',
duration: 3,
closable: true
})
setTimeout(() => {
location.reload()
}, 1000);
}
if (!errorInfo) {
const {request: {statusText, status}, config} = JSON.parse(JSON.stringify(error))
errorInfo = {
statusText,
status,
request: {responseURL: config.url}
}
}
return Promise.reject(errorInfo)
})
}
request(options) {
const instance = axios.create()
options = Object.assign(this.getInsideConfig(), options)
this.interceptors(instance, options.url)
return instance(options)
}
}
export default HttpRequest
|
# -*- coding: utf-8 -*-
=begin rdoc
どんなオブジェクトでも、GLib::Objectのように、signalを発生させたり設定したりできるようにするモジュール。
=end
module PseudoSignalHandler
# シグナル _signal_ に、ハンドラを登録する。シグナルIDを返す。
def signal_connect(signal, proc=Proc.new)
__signals[signal.to_sym] << proc
proc.__id__ end
# シグナルID _sid_ の登録を解除する
def signal_handler_disconnect(sid)
__signals.each{ |pair|
break if pair.last.reject!{ |handler| handler.__id__ == sid } }
self end
# シグナル _signal_ を発生させる。
# シグナルには、第一引数に _self_ 、第二引数以降に _args_ が渡される。
def signal_emit(signal, *args)
__signals[signal.to_sym].each{ |handler|
Delayer.new{
if not destroyed?
handler.call(*[self, *args][0, (handler.arity <= -1 ? (args.size + 1) : handler.arity)]) end } }
self end
private
def __signals
@__signals ||= Hash.new{ |h, k| h[k] = [] } end
end
|
<gh_stars>1-10
#####
# BA Amadou 16 187 314
# YING Xu 18 205 032
# ABOU Hamza 17 057 836
###
import os, sys
sys.path.append(os.path.dirname(os.path.join(os.getcwd())))
from sklearn.linear_model import LogisticRegression
from classifiers.abstract_classifier import AbstractClassifier
class LogisticRegressionClassifier(AbstractClassifier):
def __init__(self, penalty='l1', C=1, approch='0'):
super().__init__(LogisticRegression(penalty=penalty, solver="liblinear", C=C), approch) |
#pragma once
#include <SFML/Graphics.hpp>
#include <iostream>
#include "Spring.h"
#include "Head.h"
sf::Vector2f operator*(sf::Vector2f v, sf::Vector2f v1);
sf::Vector2f conv(sf::Vector2f unconv);
void vCout(sf::Vector2f v, std::string s);
Head* mouseHeadInteract(sf::RenderWindow& window, std::vector<Head> &h);
float pythag(sf::Vector2f v);
void headStep(std::vector<Head>& h, std::vector<Spring> &s);
void springRectRF(std::vector<Head>& h, std::vector<Spring>& s);
|
CUDA_ID=0,1,2,3,4,5,6,7
CUDA_DEVICE_ORDER=PCI_BUS_ID CUDA_VISIBLE_DEVICES=${CUDA_ID} \
python main_lincls.py \
-a resnet50 \
--lr 30.0 \
--workers 16 \
--batch-size 256 \
--save-dir SAVE_DIR \
--pretrained PRETRAINED_MODEL \
--dist-url 'tcp://localhost:10001' --multiprocessing-distributed --world-size 1 --rank 0 \
DATA_DIR |
#!/bin/bash
# This script fetches Envoy source code to $SOURCE_DIR
#
# Requires:
# - $SOURCE_DIR, a directory where sources will be placed
#
# Optional:
# - $ENVOY_TAG, git tag to reference specific revision
# - $ENVOY_COMMIT_HASH, hash of the git commit. If specified, then $ENVOY_TAG will be ignored
#
# at least one of $ENVOY_TAG or $ENVOY_COMMIT_HASH should be specified
set -o errexit
set -o pipefail
set -o nounset
source "$(dirname -- "${BASH_SOURCE[0]}")/../common.sh"
ENVOY_TAG=${ENVOY_TAG:-}
ENVOY_COMMIT_HASH=${ENVOY_COMMIT_HASH:-}
[[ -z "${ENVOY_TAG}" ]] && [[ -z "${ENVOY_COMMIT_HASH}" ]] && msg_err "Error: either ENVOY_TAG or ENVOY_COMMIT_HASH should be specified"
# clone Envoy repo if not exists
if [[ ! -d "${SOURCE_DIR}" ]]; then
mkdir -p "${SOURCE_DIR}"
(
cd "${SOURCE_DIR}"
git init .
git remote add origin https://github.com/envoyproxy/envoy.git
)
else
echo "Envoy source directory already exists, just fetching"
pushd ${SOURCE_DIR} && git fetch --all && popd
fi
pushd ${SOURCE_DIR}
git fetch origin --depth=1 "${ENVOY_COMMIT_HASH:-${ENVOY_TAG}}"
git reset --hard FETCH_HEAD
echo "ENVOY_TAG=${ENVOY_TAG}"
echo "ENVOY_COMMIT_HASH=${ENVOY_COMMIT_HASH}"
popd
|
<gh_stars>10-100
#pragma once
// This file was created automatically, do not modify the contents of this file.
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#include "CoreMinimal.h"
#include "ManageEventSender.h"
#include "Generate/Manage/ManageRectLightComponent.h"
#include "Runtime/Engine/Classes/Components/RectLightComponent.h"
// Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\Engine\Classes\Components\RectLightComponent.h:20
extern "C"
{
DOTNET_EXPORT auto E_PROP_URectLightComponent_BarnDoorAngle_GET(URectLightComponent* Ptr) { return Ptr->BarnDoorAngle; }
DOTNET_EXPORT void E_PROP_URectLightComponent_BarnDoorAngle_SET(URectLightComponent* Ptr, float Value) { Ptr->BarnDoorAngle = Value; }
DOTNET_EXPORT auto E_PROP_URectLightComponent_BarnDoorLength_GET(URectLightComponent* Ptr) { return Ptr->BarnDoorLength; }
DOTNET_EXPORT void E_PROP_URectLightComponent_BarnDoorLength_SET(URectLightComponent* Ptr, float Value) { Ptr->BarnDoorLength = Value; }
DOTNET_EXPORT auto E_PROP_URectLightComponent_SourceHeight_GET(URectLightComponent* Ptr) { return Ptr->SourceHeight; }
DOTNET_EXPORT void E_PROP_URectLightComponent_SourceHeight_SET(URectLightComponent* Ptr, float Value) { Ptr->SourceHeight = Value; }
DOTNET_EXPORT auto E_PROP_URectLightComponent_SourceWidth_GET(URectLightComponent* Ptr) { return Ptr->SourceWidth; }
DOTNET_EXPORT void E_PROP_URectLightComponent_SourceWidth_SET(URectLightComponent* Ptr, float Value) { Ptr->SourceWidth = Value; }
DOTNET_EXPORT INT_PTR E_NewObject_URectLightComponent(UObject* Parent, char* Name)
{
return (INT_PTR)NewObject<URectLightComponent>(Parent, FName(UTF8_TO_TCHAR(Name)));
}
DOTNET_EXPORT auto E_URectLightComponent_SetBarnDoorAngle(URectLightComponent* Self, float NewValue)
{
auto _p0 = NewValue;
Self->SetBarnDoorAngle(_p0);
}
DOTNET_EXPORT auto E_URectLightComponent_SetBarnDoorLength(URectLightComponent* Self, float NewValue)
{
auto _p0 = NewValue;
Self->SetBarnDoorLength(_p0);
}
DOTNET_EXPORT auto E_URectLightComponent_SetSourceHeight(URectLightComponent* Self, float NewValue)
{
auto _p0 = NewValue;
Self->SetSourceHeight(_p0);
}
DOTNET_EXPORT auto E_URectLightComponent_SetSourceWidth(URectLightComponent* Self, float bNewValue)
{
auto _p0 = bNewValue;
Self->SetSourceWidth(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_UpdateLightGUIDs(ULightComponentBase* Self)
{
((UManageRectLightComponent*)Self)->_Supper__UpdateLightGUIDs();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_DetachFromParent(USceneComponent* Self, bool bMaintainWorldPosition, bool bCallModify)
{
auto _p0 = bMaintainWorldPosition;
auto _p1 = bCallModify;
((UManageRectLightComponent*)Self)->_Supper__DetachFromParent(_p0, _p1);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnAttachmentChanged(USceneComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnAttachmentChanged();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnHiddenInGameChanged(USceneComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnHiddenInGameChanged();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnVisibilityChanged(USceneComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnVisibilityChanged();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PropagateLightingScenarioChange(USceneComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PropagateLightingScenarioChange();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_UpdateBounds(USceneComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__UpdateBounds();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_UpdatePhysicsVolume(USceneComponent* Self, bool bTriggerNotifiers)
{
auto _p0 = bTriggerNotifiers;
((UManageRectLightComponent*)Self)->_Supper__UpdatePhysicsVolume(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_Activate(UActorComponent* Self, bool bReset)
{
auto _p0 = bReset;
((UManageRectLightComponent*)Self)->_Supper__Activate(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_BeginPlay(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__BeginPlay();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_CreateRenderState_Concurrent(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__CreateRenderState_Concurrent();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_Deactivate(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__Deactivate();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_DestroyComponent(UActorComponent* Self, bool bPromoteChildren)
{
auto _p0 = bPromoteChildren;
((UManageRectLightComponent*)Self)->_Supper__DestroyComponent(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_DestroyRenderState_Concurrent(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__DestroyRenderState_Concurrent();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_InitializeComponent(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__InitializeComponent();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_InvalidateLightingCacheDetailed(UActorComponent* Self, bool bInvalidateBuildEnqueuedLighting, bool bTranslationOnly)
{
auto _p0 = bInvalidateBuildEnqueuedLighting;
auto _p1 = bTranslationOnly;
((UManageRectLightComponent*)Self)->_Supper__InvalidateLightingCacheDetailed(_p0, _p1);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnActorEnableCollisionChanged(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnActorEnableCollisionChanged();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnComponentCreated(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnComponentCreated();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnComponentDestroyed(UActorComponent* Self, bool bDestroyingHierarchy)
{
auto _p0 = bDestroyingHierarchy;
((UManageRectLightComponent*)Self)->_Supper__OnComponentDestroyed(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnCreatePhysicsState(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnCreatePhysicsState();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnDestroyPhysicsState(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnDestroyPhysicsState();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnRegister(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnRegister();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnRep_IsActive(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnRep_IsActive();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnUnregister(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnUnregister();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_RegisterComponentTickFunctions(UActorComponent* Self, bool bRegister)
{
auto _p0 = bRegister;
((UManageRectLightComponent*)Self)->_Supper__RegisterComponentTickFunctions(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_SendRenderDynamicData_Concurrent(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__SendRenderDynamicData_Concurrent();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_SendRenderTransform_Concurrent(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__SendRenderTransform_Concurrent();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_SetActive(UActorComponent* Self, bool bNewActive, bool bReset)
{
auto _p0 = bNewActive;
auto _p1 = bReset;
((UManageRectLightComponent*)Self)->_Supper__SetActive(_p0, _p1);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_SetAutoActivate(UActorComponent* Self, bool bNewAutoActivate)
{
auto _p0 = bNewAutoActivate;
((UManageRectLightComponent*)Self)->_Supper__SetAutoActivate(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_SetComponentTickEnabled(UActorComponent* Self, bool bEnabled)
{
auto _p0 = bEnabled;
((UManageRectLightComponent*)Self)->_Supper__SetComponentTickEnabled(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_SetComponentTickEnabledAsync(UActorComponent* Self, bool bEnabled)
{
auto _p0 = bEnabled;
((UManageRectLightComponent*)Self)->_Supper__SetComponentTickEnabledAsync(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_ToggleActive(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__ToggleActive();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_UninitializeComponent(UActorComponent* Self)
{
((UManageRectLightComponent*)Self)->_Supper__UninitializeComponent();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_BeginDestroy(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__BeginDestroy();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_FinishDestroy(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__FinishDestroy();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_MarkAsEditorOnlySubobject(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__MarkAsEditorOnlySubobject();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostCDOContruct(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PostCDOContruct();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostEditImport(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PostEditImport();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostInitProperties(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PostInitProperties();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostLoad(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PostLoad();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostNetReceive(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PostNetReceive();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostRepNotifies(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PostRepNotifies();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PostSaveRoot(UObject* Self, bool bCleanupIsRequired)
{
auto _p0 = bCleanupIsRequired;
((UManageRectLightComponent*)Self)->_Supper__PostSaveRoot(_p0);
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PreDestroyFromReplication(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PreDestroyFromReplication();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_PreNetReceive(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__PreNetReceive();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_ShutdownAfterError(UObject* Self)
{
((UManageRectLightComponent*)Self)->_Supper__ShutdownAfterError();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_CreateCluster(UObjectBaseUtility* Self)
{
((UManageRectLightComponent*)Self)->_Supper__CreateCluster();
}
DOTNET_EXPORT auto E__Supper__URectLightComponent_OnClusterMarkedAsPendingKill(UObjectBaseUtility* Self)
{
((UManageRectLightComponent*)Self)->_Supper__OnClusterMarkedAsPendingKill();
}
}
PRAGMA_ENABLE_DEPRECATION_WARNINGS
|
#!/bin/bash
set -e
SQL_FOR='ICD10CM - SOURCE'
TBL_CBC='cb_criteria'
TBL_PAS='prep_ancestor_staging'
TBL_PCA='prep_concept_ancestor'
export BQ_PROJECT=$1 # project
export BQ_DATASET=$2 # dataset
ID_PREFIX=$3
####### common block for all make-cb-criteria-dd-*.sh scripts ###########
source ./generate-cdr/cb-criteria-utils.sh
echo "Running in parallel and Multitable mode - " "$ID_PREFIX - $SQL_FOR"
CB_CRITERIA_START_ID=$[$ID_PREFIX*10**9] # 3 billion
CB_CRITERIA_END_ID=$[$[ID_PREFIX+1]*10**9] # 4 billion
echo "Creating temp table for $TBL_CBC"
TBL_CBC=$(createTmpTable $TBL_CBC)
TBL_PAS=$(createTmpTable $TBL_PAS)
TBL_PCA=$(createTmpTable $TBL_PCA)
####### end common block ###########
echo "ICD10PCS - SOURCE - adding root"
bq --quiet --project_id="$BQ_PROJECT" query --batch --nouse_legacy_sql \
"INSERT INTO \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
(
id
, parent_id
, domain_id
, is_standard
, type
, concept_id
, code
, name
, is_group
,is_selectable
, has_attribute
, has_hierarchy
, path
)
SELECT
ROW_NUMBER() OVER (ORDER BY concept_id)
+ (SELECT COALESCE(MAX(id),$CB_CRITERIA_START_ID) FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` where id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID) AS id
, 0
, domain_id
, 0
, vocabulary_id
, concept_id
, concept_code
, concept_name
, 1
, 0
, 0
, 1
, CAST(ROW_NUMBER() OVER (ORDER BY concept_id)
+ (SELECT COALESCE(MAX(id),$CB_CRITERIA_START_ID) FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` where id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID) as STRING)
FROM \`$BQ_PROJECT.$BQ_DATASET.prep_concept_merged\`
-- this is the root concept
WHERE concept_id = 2500000022"
echo "ICD10PCS - SOURCE - adding second level"
bq --quiet --project_id="$BQ_PROJECT" query --batch --nouse_legacy_sql \
"INSERT INTO \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
(
id
, parent_id
, domain_id
, is_standard
, type
, concept_id
, code
, name
, is_group
,is_selectable
, has_attribute
, has_hierarchy
, path
)
SELECT
ROW_NUMBER() OVER (ORDER BY p.parent_id, c.concept_code) +
(SELECT MAX(id) FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` where id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID) AS id
, p.id AS parent_id
, p.domain_id
, p.is_standard
, p.type
, c.concept_id AS concept_id
, c.concept_code AS code
, c.concept_name AS name
, 1
, 0
, 0
, 1
,CONCAT(p.path, '.', CAST(ROW_NUMBER() OVER (ORDER BY p.parent_id,c.concept_code)
+ (SELECT MAX(id) FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` where id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID) as STRING))
FROM
(
SELECT *
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
WHERE parent_id = 0
and type = 'ICD10PCS'
and id > $CB_CRITERIA_START_ID and id < $CB_CRITERIA_END_ID
) p
JOIN
(
SELECT concept_id_1,concept_id_2
FROM \`$BQ_PROJECT.$BQ_DATASET.prep_concept_relationship_merged\`
WHERE relationship_id = 'Subsumes'
) b on p.concept_id = b.concept_id_1
JOIN \`$BQ_PROJECT.$BQ_DATASET.prep_concept_merged\` c on b.concept_id_2 = c.concept_id"
# for each loop, add all items (children/parents) related to the items that were previously added
# only need to loop 6 times, but do 7 to be safe
for i in {1..7};
do
echo "ICD10PCS - SOURCE - adding level $i"
bq --quiet --project_id="$BQ_PROJECT" query --batch --nouse_legacy_sql \
"INSERT INTO \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
(
id
, parent_id
, domain_id
, is_standard
, type
, concept_id
, code
, name
, rollup_count
, item_count
, is_group
,is_selectable
, has_attribute
, has_hierarchy
, path
)
SELECT
ROW_NUMBER() OVER (ORDER BY p.id, c.concept_code)
+ (SELECT MAX(id) FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` where id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID)
, p.id
, p.domain_id
, p.is_standard
, p.type
, c.concept_id
, c.concept_code
, c.concept_name
, 0
, 0
, CASE WHEN l.concept_code is null THEN 1 ELSE 0 END as is_group
, 1
, 0
, 1
, CONCAT(p.path, '.', CAST(ROW_NUMBER() OVER (ORDER BY p.id, c.concept_code)
+ (SELECT MAX(id) FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` where id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID) as STRING))
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` p
JOIN \`$BQ_PROJECT.$BQ_DATASET.prep_icd10pcs_rel_src_in_data\` c on p.code = c.p_concept_code
LEFT JOIN
(
SELECT DISTINCT a.concept_code
FROM \`$BQ_PROJECT.$BQ_DATASET.prep_icd10pcs_rel_src_in_data\` a
LEFT JOIN \`$BQ_PROJECT.$BQ_DATASET.prep_icd10pcs_rel_src_in_data\` b on a.concept_id = b.p_concept_id
WHERE b.concept_id is null
) l on c.concept_code = l.concept_code
WHERE p.type = 'ICD10PCS'
and p.is_standard = 0
and p.id not in
(
SELECT parent_id
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
WHERE id > $CB_CRITERIA_START_ID and id < $CB_CRITERIA_END_ID
)"
done
echo "ICD10PCS - SOURCE - add items into ancestor staging to use in next query"
bq --quiet --project_id="$BQ_PROJECT" query --batch --nouse_legacy_sql \
"INSERT INTO \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
(
ancestor_concept_id
, domain_id
, type
, is_standard
, concept_id_1
, concept_id_2
, concept_id_3
, concept_id_4
, concept_id_5
)
SELECT DISTINCT a.concept_id as ancestor_concept_id
, a.domain_id
, a.type
, a.is_standard
, b.concept_id c1
, c.concept_id c2
, d.concept_id c3
, e.concept_id c4
, f.concept_id c5
FROM
(SELECT id, parent_id, domain_id, type, is_standard, concept_id FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
WHERE domain_id = 'PROCEDURE' and type = 'ICD10PCS' and is_group = 1 and is_selectable = 1 and is_standard = 0
and id > $CB_CRITERIA_START_ID AND id < $CB_CRITERIA_END_ID ) a
LEFT JOIN (SELECT id, parent_id, domain_id, type, is_standard, concept_id FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` WHERE type = 'ICD10PCS') b on a.id = b.parent_id
LEFT JOIN (SELECT id, parent_id, domain_id, type, is_standard, concept_id FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` WHERE type = 'ICD10PCS') c on b.id = c.parent_id
LEFT JOIN (SELECT id, parent_id, domain_id, type, is_standard, concept_id FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` WHERE type = 'ICD10PCS') d on c.id = d.parent_id
LEFT JOIN (SELECT id, parent_id, domain_id, type, is_standard, concept_id FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` WHERE type = 'ICD10PCS') e on d.id = e.parent_id
LEFT JOIN (SELECT id, parent_id, domain_id, type, is_standard, concept_id FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` WHERE type = 'ICD10PCS') f on e.id = f.parent_id"
echo "ICD10PCS - SOURCE - insert into prep_concept_ancestor"
bq --quiet --project_id="$BQ_PROJECT" query --batch --nouse_legacy_sql \
"INSERT INTO \`$BQ_PROJECT.$BQ_DATASET.$TBL_PCA\`
(
ancestor_concept_id
, descendant_concept_id
, is_standard
)
SELECT DISTINCT ancestor_concept_id, concept_id_5 as descendant_concept_id, is_standard
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
WHERE concept_id_5 is not null
and type = 'ICD10PCS'
and is_standard = 0
UNION DISTINCT
SELECT DISTINCT ancestor_concept_id, concept_id_4 as descendant_concept_id, is_standard
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
WHERE concept_id_4 is not null
and type = 'ICD10PCS'
and is_standard = 0
UNION DISTINCT
SELECT DISTINCT ancestor_concept_id, concept_id_3 as descendant_concept_id, is_standard
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
WHERE concept_id_3 is not null
and type = 'ICD10PCS'
and is_standard = 0
UNION DISTINCT
SELECT DISTINCT ancestor_concept_id, concept_id_2 as descendant_concept_id, is_standard
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
WHERE concept_id_2 is not null
and type = 'ICD10PCS'
and is_standard = 0
UNION DISTINCT
SELECT DISTINCT ancestor_concept_id, concept_id_1 as descendant_concept_id, is_standard
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
WHERE concept_id_1 is not null
and type = 'ICD10PCS'
and is_standard = 0
UNION DISTINCT
-- this statement is to add the ancestor item to itself
SELECT DISTINCT ancestor_concept_id, ancestor_concept_id as descendant_concept_id, is_standard
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PAS\`
WHERE type = 'ICD10PCS'
and is_standard = 0"
echo "ICD10PCS - SOURCE - generate item counts"
bq --quiet --project_id=$BQ_PROJECT query --batch --nouse_legacy_sql \
"UPDATE \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` x
SET x.item_count = y.cnt
, x.est_count = y.cnt
FROM
(
SELECT concept_id, COUNT(distinct person_id) cnt
FROM \`$BQ_PROJECT.$BQ_DATASET.cb_search_all_events\`
WHERE is_standard = 0
GROUP BY 1
) y
WHERE x.concept_id = y.concept_id
and x.type = 'ICD10PCS'
and x.is_standard = 0
and x.is_selectable = 1"
echo "ICD10PCS - SOURCE - generate rollup counts"
bq --quiet --project_id="$BQ_PROJECT" query --batch --nouse_legacy_sql \
"UPDATE \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\` x
SET x.rollup_count = y.cnt
, x.est_count = y.cnt
FROM
(
SELECT ancestor_concept_id as concept_id
, COUNT(distinct person_id) cnt
FROM
(
SELECT ancestor_concept_id
, descendant_concept_id
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_PCA\`
WHERE ancestor_concept_id in
(
SELECT DISTINCT concept_id
FROM \`$BQ_PROJECT.$BQ_DATASET.$TBL_CBC\`
WHERE type = 'ICD10PCS'
and is_standard = 0
and is_selectable = 1
and is_group = 1
)
and is_standard = 0
) a
JOIN \`$BQ_PROJECT.$BQ_DATASET.cb_search_all_events\` b on a.descendant_concept_id = b.concept_id
WHERE b.is_standard = 0
GROUP BY 1
) y
WHERE x.concept_id = y.concept_id
and x.type = 'ICD10PCS'
and x.is_standard = 0
and x.is_group = 1"
#wait for process to end before copying
wait
## copy temp tables back to main tables, and delete temp?
cpToMain "$TBL_CBC" &
cpToMain "$TBL_PAS" &
cpToMain "$TBL_PCA" &
wait
|
package parser;
import org.jooby.mvc.Body;
import org.jooby.mvc.Header;
import org.jooby.mvc.POST;
import org.jooby.mvc.Path;
import javax.inject.Named;
/**
* MVC API.
*/
@Path("/mvc")
public class MvcRoutes {
/**
* MVC doIt.
*
* @param q Query string. Like: <code>q=foo</code>
* @param offset
* @param max
* @param id
* @param body
* @return Sterinv value.
*/
@POST
public String doIt(final String q, final int offset, @Named("Max") final int max,
@Header("ID") final String id, @Body final Foo body) {
return "dot";
}
}
|
<reponame>visika/nomie
/**
* Goals... INCOMPLETE
*/
import GoalsDefault from './goals.svelte';
import GoalsSettings from './settings.svelte';
export default {
emoji: '🥇',
name: 'Goals',
id: 'nomie-goals',
pages: {
settings: GoalsSettings,
default: GoalsDefault,
},
// tab: {
// icon: 'star-outline',
// label: 'Goals',
// },
};
|
<gh_stars>1-10
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
type (
ConditionStatus string
PodConditionType string
PodPhase string
RestartPolicy string
)
// Pod is a collection of containers, used as either input (create, update) or as output (list, get).
type Pod struct {
metav1.TypeMeta `json:",inline"`
// Standard object's metadata.
// More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
// +optional
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// Specification of the desired behavior of the pod.
// More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
// +optional
Spec PodSpec `json:"spec,omitempty" protobuf:"bytes,2,opt,name=spec"`
// Most recently observed status of the pod.
// This data may not be up to date.
// Populated by the system.
// Read-only.
// More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
// +optional
Status PodStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}
// PodStatus represents information about the status of a pod. Status may trail the actual
// state of a system.
type PodStatus struct {
// Current condition of the pod.
// More info: http://kubernetes.io/docs/user-guide/pod-states#pod-phase
// +optional
Phase PodPhase `json:"phase,omitempty" protobuf:"bytes,1,opt,name=phase,casttype=PodPhase"`
// Current service state of pod.
// More info: http://kubernetes.io/docs/user-guide/pod-states#pod-conditions
// +optional
Conditions []PodCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,2,rep,name=conditions"`
// A human readable message indicating details about why the pod is in this condition.
// +optional
Message string `json:"message,omitempty" protobuf:"bytes,3,opt,name=message"`
// A brief CamelCase message indicating details about why the pod is in this state.
// e.g. 'OutOfDisk'
// +optional
Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
// IP address of the host to which the pod is assigned. Empty if not yet scheduled.
// +optional
HostIP string `json:"hostIP,omitempty" protobuf:"bytes,5,opt,name=hostIP"`
// IP address allocated to the pod. Routable at least within the cluster.
// Empty if not yet allocated.
// +optional
PodIP string `json:"podIP,omitempty" protobuf:"bytes,6,opt,name=podIP"`
// RFC 3339 date and time at which the object was acknowledged by the Kubelet.
// This is before the Kubelet pulled the container image(s) for the pod.
// +optional
StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,7,opt,name=startTime"`
}
type PodCondition struct {
// Type is the type of the condition.
// Currently only Ready.
// More info: http://kubernetes.io/docs/user-guide/pod-states#pod-conditions
Type PodConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=PodConditionType"`
// Status is the status of the condition.
// Can be True, False, Unknown.
// More info: http://kubernetes.io/docs/user-guide/pod-states#pod-conditions
Status ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=ConditionStatus"`
// Last time we probed the condition.
// +optional
LastProbeTime metav1.Time `json:"lastProbeTime,omitempty" protobuf:"bytes,3,opt,name=lastProbeTime"`
// Last time the condition transitioned from one status to another.
// +optional
LastTransitionTime metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
// Unique, one-word, CamelCase reason for the condition's last transition.
// +optional
Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
// Human-readable message indicating details about last transition.
// +optional
Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}
// PodSpec is a description of a pod
type PodSpec struct {
// Restart policy for all containers within the pod.
// One of Always, OnFailure, Never.
// Default to Always.
// More info: http://kubernetes.io/docs/user-guide/pod-states#restartpolicy
// +optional
RestartPolicy RestartPolicy `json:"restartPolicy,omitempty" protobuf:"bytes,3,opt,name=restartPolicy,casttype=RestartPolicy"`
// Optional duration in seconds the pod needs to terminate gracefully. May be decreased in delete request.
// Value must be non-negative integer. The value zero indicates delete immediately.
// If this value is nil, the default grace period will be used instead.
// The grace period is the duration in seconds after the processes running in the pod are sent
// a termination signal and the time when the processes are forcibly halted with a kill signal.
// Set this value longer than the expected cleanup time for your process.
// Defaults to 30 seconds.
// +optional
TerminationGracePeriodSeconds *int64 `json:"terminationGracePeriodSeconds,omitempty" protobuf:"varint,4,opt,name=terminationGracePeriodSeconds"`
// Optional duration in seconds the pod may be active on the node relative to
// StartTime before the system will actively try to mark it failed and kill associated containers.
// Value must be a positive integer.
// +optional
ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty" protobuf:"varint,5,opt,name=activeDeadlineSeconds"`
// NodeSelector is a selector which must be true for the pod to fit on a node.
// Selector which must match a node's labels for the pod to be scheduled on that node.
// More info: http://kubernetes.io/docs/user-guide/node-selection/README
// +optional
NodeSelector map[string]string `json:"nodeSelector,omitempty" protobuf:"bytes,7,rep,name=nodeSelector"`
// ServiceAccountName is the name of the ServiceAccount to use to run this pod.
// More info: http://releases.k8s.io/HEAD/docs/design/service_accounts.md
// +optional
ServiceAccountName string `json:"serviceAccountName,omitempty" protobuf:"bytes,8,opt,name=serviceAccountName"`
// DeprecatedServiceAccount is a depreciated alias for ServiceAccountName.
// Deprecated: Use serviceAccountName instead.
// +k8s:conversion-gen=false
// +optional
DeprecatedServiceAccount string `json:"serviceAccount,omitempty" protobuf:"bytes,9,opt,name=serviceAccount"`
// NodeName is a request to schedule this pod onto a specific node. If it is non-empty,
// the scheduler simply schedules this pod onto that node, assuming that it fits resource
// requirements.
// +optional
NodeName string `json:"nodeName,omitempty" protobuf:"bytes,10,opt,name=nodeName"`
// Host networking requested for this pod. Use the host's network namespace.
// If this option is set, the ports that will be used must be specified.
// Default to false.
// +k8s:conversion-gen=false
// +optional
HostNetwork bool `json:"hostNetwork,omitempty" protobuf:"varint,11,opt,name=hostNetwork"`
// Use the host's pid namespace.
// Optional: Default to false.
// +k8s:conversion-gen=false
// +optional
HostPID bool `json:"hostPID,omitempty" protobuf:"varint,12,opt,name=hostPID"`
// Use the host's ipc namespace.
// Optional: Default to false.
// +k8s:conversion-gen=false
// +optional
HostIPC bool `json:"hostIPC,omitempty" protobuf:"varint,13,opt,name=hostIPC"`
// Specifies the hostname of the Pod
// If not specified, the pod's hostname will be set to a system-defined value.
// +optional
Hostname string `json:"hostname,omitempty" protobuf:"bytes,16,opt,name=hostname"`
// If specified, the fully qualified Pod hostname will be "<hostname>.<subdomain>.<pod namespace>.svc.<cluster domain>".
// If not specified, the pod will not have a domainname at all.
// +optional
Subdomain string `json:"subdomain,omitempty" protobuf:"bytes,17,opt,name=subdomain"`
// If specified, the pod will be dispatched by specified scheduler.
// If not specified, the pod will be dispatched by default scheduler.
// +optional
SchedulerName string `json:"schedulername,omitempty" protobuf:"bytes,19,opt,name=schedulername"`
}
// PodList is a list of Pods.
type PodList struct {
metav1.TypeMeta `json:",inline"`
// Standard list metadata.
// More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
// +optional
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// List of pods.
// More info: http://kubernetes.io/docs/user-guide/pods
Items []Pod `json:"items" protobuf:"bytes,2,rep,name=items"`
}
|
#!/bin/bash -e
log_level()
{
case "$1" in
-e) echo "$(date) [Err] " ${@:2}
;;
-w) echo "$(date) [Warn] " ${@:2}
;;
-i) echo "$(date) [Info] " ${@:2}
;;
*) echo "$(date) [Debug] " ${@:2}
;;
esac
}
printUsage()
{
echo " Usage:"
echo " $FILENAME --identity-file id_rsa --master 192.168.102.34 --user azureuser"
echo ""
echo " -i, --identity-file RSA Private Key file to connect kubernetes master VM, it starts with -----BEGIN RSA PRIVATE KEY-----"
echo " -m, --master Public ip of Kubernetes cluster master VM. Normally VM name starts with k8s-master- "
echo " -u, --user User Name of Kubernetes cluster master VM "
echo " -o, --output-file Summary file providing result status of the deployment."
exit 1
}
function final_changes {
if [ ! -f "$OUTPUT_SUMMARYFILE" ]; then
printf '{"result":"%s"}\n' "failed" > $OUTPUT_SUMMARYFILE
fi
}
FILENAME=$0
while [[ "$#" -gt 0 ]]
do
case $1 in
-i|--identity-file)
IDENTITY_FILE="$2"
;;
-m|--master)
MASTER_IP="$2"
;;
-u|--user)
USER_NAME="$2"
;;
-o|--output-file)
OUTPUT_SUMMARYFILE="$2"
;;
*)
echo ""
echo "Incorrect parameter $1"
echo ""
printUsage
;;
esac
if [ "$#" -ge 2 ]
then
shift 2
else
shift
fi
done
FILE_NAME=$0
OUTPUT_FOLDER="$(dirname $OUTPUT_SUMMARYFILE)"
LOG_FILENAME="$OUTPUT_FOLDER/deploy.log"
touch $LOG_FILENAME
{
# Github details.
COMMON_SCRIPT_FILENAME="common.sh"
GIT_REPROSITORY="${GIT_REPROSITORY:-msazurestackworkloads/kubetools}"
GIT_BRANCH="${GIT_BRANCH:-master}"
INSTALL_PREREQUISITE="install_prerequisite.sh"
SCRIPT_DIRECTORY="$(dirname $FILE_NAME)"
TEST_DIRECTORY="/home/$USER_NAME/sonobuoy"
log_level -i "------------------------------------------------------------------------"
log_level -i "Input Parameters"
log_level -i "------------------------------------------------------------------------"
log_level -i "IDENTITY_FILE : $IDENTITY_FILE"
log_level -i "GIT_BRANCH : $GIT_BRANCH"
log_level -i "GIT_REPROSITORY : $GIT_REPROSITORY"
log_level -i "MASTER_IP : $MASTER_IP"
log_level -i "OUTPUT_SUMMARYFILE : $OUTPUT_SUMMARYFILE"
log_level -i "USER_NAME : $USER_NAME"
log_level -i "------------------------------------------------------------------------"
log_level -i "Based on K8s define which version of SONOBUOY to be used."
KUBERNETES_VERSION=$(ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP 'kubectl version -o json | jq -r .serverVersion.gitVersion | cut -c 2-')
KUBERNETES_MAJOR_VERSION="${KUBERNETES_VERSION%.*}"
case "$KUBERNETES_MAJOR_VERSION" in
1.11) SONOBUOY_VERSION="0.13.0"
;;
1.15) SONOBUOY_VERSION="0.15.0"
;;
1.16) SONOBUOY_VERSION="0.16.0"
;;
*) SONOBUOY_VERSION="0.14.0"
;;
esac
SONOBUOY_TAR_FILENAME="sonobuoy_"$SONOBUOY_VERSION"_linux_amd64.tar.gz"
log_level -i "------------------------------------------------------------------------"
log_level -i " Inner Variables"
log_level -i "------------------------------------------------------------------------"
log_level -i "COMMON_SCRIPT_FILENAME : $COMMON_SCRIPT_FILENAME"
log_level -i "INSTALL_PREREQUISITE : $INSTALL_PREREQUISITE"
log_level -i "KUBERNETES_MAJOR_VERSION : $KUBERNETES_MAJOR_VERSION"
log_level -i "KUBERNETES_VERSION : $KUBERNETES_VERSION"
log_level -i "SCRIPT_DIRECTORY : $SCRIPT_DIRECTORY"
log_level -i "SONOBUOY_TAR_FILENAME : $SONOBUOY_TAR_FILENAME"
log_level -i "SONOBUOY_VERSION : $SONOBUOY_VERSION"
log_level -i "TEST_DIRECTORY : $TEST_DIRECTORY"
log_level -i "------------------------------------------------------------------------"
# ----------------------------------------------------------------------------------------
# INSTALL PREREQUISITE
curl -o $SCRIPT_DIRECTORY/$COMMON_SCRIPT_FILENAME \
https://raw.githubusercontent.com/$GIT_REPROSITORY/$GIT_BRANCH/applications/common/$COMMON_SCRIPT_FILENAME
if [ ! -f $SCRIPT_DIRECTORY/$COMMON_SCRIPT_FILENAME ]; then
log_level -e "File($COMMON_SCRIPT_FILENAME) failed to download."
exit 1
fi
source $SCRIPT_DIRECTORY/$COMMON_SCRIPT_FILENAME
download_file_locally $GIT_REPROSITORY \
$GIT_BRANCH \
"applications/common" \
$SCRIPT_DIRECTORY \
$INSTALL_PREREQUISITE
if [[ $? != 0 ]]; then
log_level -e "Download of file($INSTALL_PREREQUISITE) failed."
printf '{"result":"%s","error":"%s"}\n' "failed" "Download of file($INSTALL_PREREQUISITE) was not successfull." > $OUTPUT_SUMMARYFILE
exit 1
fi
log_level -i "Create test folder($TEST_DIRECTORY)"
ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "mkdir -p $TEST_DIRECTORY"
log_level -i "Copy file($INSTALL_PREREQUISITE) to VM."
scp -i $IDENTITY_FILE \
$SCRIPT_DIRECTORY/$INSTALL_PREREQUISITE \
$USER_NAME@$MASTER_IP:$TEST_DIRECTORY/
# INSTALL PREREQUISITE
ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "sudo chmod 744 $TEST_DIRECTORY/$INSTALL_PREREQUISITE; "
ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "cd $TEST_DIRECTORY; source $INSTALL_PREREQUISITE; apt_install_important_packages ;"
goPath=$(ssh -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "go env | grep GOPATH || true")
if [ -z "$goPath" ]; then
log_level -e "GO is not installed."
result="failed"
printf '{"result":"%s","error":"%s"}\n' "$result" "Go is not installed." > $OUTPUT_SUMMARYFILE
exit 1
else
log_level -i "Go installed with GOPATH($goPath)"
fi
# ----------------------------------------------------------------------------------------
# Install Sonobuoy
curl -L -o $OUTPUT_FOLDER/$SONOBUOY_TAR_FILENAME \
https://github.com/heptio/sonobuoy/releases/download/v$SONOBUOY_VERSION/$SONOBUOY_TAR_FILENAME
if [ ! -f $OUTPUT_FOLDER/$SONOBUOY_TAR_FILENAME ]; then
log_level -e "File($SONOBUOY_TAR_FILENAME) failed to download."
result="failed"
printf '{"result":"%s","error":"%s"}\n' "$result" "File($SONOBUOY_TAR_FILENAME) failed to download." > $OUTPUT_SUMMARYFILE
exit 1
fi
log_level -i "Copy file($SONOBUOY_TAR_FILENAME) to VM."
scp -i $IDENTITY_FILE \
$OUTPUT_FOLDER/$SONOBUOY_TAR_FILENAME \
$USER_NAME@$MASTER_IP:$TEST_DIRECTORY/
ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "cd $TEST_DIRECTORY; sudo tar -xvf $SONOBUOY_TAR_FILENAME"
# ----------------------------------------------------------------------------------------
# Launch Sonobuoy
#ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "cd $TEST_DIRECTORY; ./sonobuoy run --mode quick;"
ssh -t -i $IDENTITY_FILE $USER_NAME@$MASTER_IP "cd $TEST_DIRECTORY; ./sonobuoy run;"
result="pass"
printf '{"result":"%s"}\n' "$result" > $OUTPUT_SUMMARYFILE
#Create result file, even if script ends with an error
#trap final_changes EXIT
} 2>&1 | tee $LOG_FILENAME |
<reponame>757670303037/stable-baselines<gh_stars>1000+
import subprocess
import gym
import numpy as np
import pytest
from stable_baselines import DDPG, DQN, SAC, TD3
from stable_baselines.common.running_mean_std import RunningMeanStd
from stable_baselines.common.vec_env import (DummyVecEnv, VecNormalize, VecFrameStack,
sync_envs_normalization, unwrap_vec_normalize)
from .test_common import _assert_eq
ENV_ID = 'Pendulum-v0'
def make_env():
return gym.make(ENV_ID)
def test_runningmeanstd():
"""Test RunningMeanStd object"""
for (x_1, x_2, x_3) in [
(np.random.randn(3), np.random.randn(4), np.random.randn(5)),
(np.random.randn(3, 2), np.random.randn(4, 2), np.random.randn(5, 2))]:
rms = RunningMeanStd(epsilon=0.0, shape=x_1.shape[1:])
x_cat = np.concatenate([x_1, x_2, x_3], axis=0)
moments_1 = [x_cat.mean(axis=0), x_cat.var(axis=0)]
rms.update(x_1)
rms.update(x_2)
rms.update(x_3)
moments_2 = [rms.mean, rms.var]
assert np.allclose(moments_1, moments_2)
def check_rms_equal(rmsa, rmsb):
assert np.all(rmsa.mean == rmsb.mean)
assert np.all(rmsa.var == rmsb.var)
assert np.all(rmsa.count == rmsb.count)
def check_vec_norm_equal(norma, normb):
assert norma.observation_space == normb.observation_space
assert norma.action_space == normb.action_space
assert norma.num_envs == normb.num_envs
check_rms_equal(norma.obs_rms, normb.obs_rms)
check_rms_equal(norma.ret_rms, normb.ret_rms)
assert norma.clip_obs == normb.clip_obs
assert norma.clip_reward == normb.clip_reward
assert norma.norm_obs == normb.norm_obs
assert norma.norm_reward == normb.norm_reward
assert np.all(norma.ret == normb.ret)
assert norma.gamma == normb.gamma
assert norma.epsilon == normb.epsilon
assert norma.training == normb.training
def test_vec_env(tmpdir):
"""Test VecNormalize Object"""
clip_obs = 0.5
clip_reward = 5.0
orig_venv = DummyVecEnv([make_env])
norm_venv = VecNormalize(orig_venv, norm_obs=True, norm_reward=True, clip_obs=clip_obs, clip_reward=clip_reward)
_, done = norm_venv.reset(), [False]
while not done[0]:
actions = [norm_venv.action_space.sample()]
obs, rew, done, _ = norm_venv.step(actions)
assert np.max(np.abs(obs)) <= clip_obs
assert np.max(np.abs(rew)) <= clip_reward
path = str(tmpdir.join("vec_normalize"))
norm_venv.save(path)
deserialized = VecNormalize.load(path, venv=orig_venv)
check_vec_norm_equal(norm_venv, deserialized)
def _make_warmstart_cartpole():
"""Warm-start VecNormalize by stepping through CartPole"""
venv = DummyVecEnv([lambda: gym.make("CartPole-v1")])
venv = VecNormalize(venv)
venv.reset()
venv.get_original_obs()
for _ in range(100):
actions = [venv.action_space.sample()]
venv.step(actions)
return venv
def test_get_original():
venv = _make_warmstart_cartpole()
for _ in range(3):
actions = [venv.action_space.sample()]
obs, rewards, _, _ = venv.step(actions)
obs = obs[0]
orig_obs = venv.get_original_obs()[0]
rewards = rewards[0]
orig_rewards = venv.get_original_reward()[0]
assert np.all(orig_rewards == 1)
assert orig_obs.shape == obs.shape
assert orig_rewards.dtype == rewards.dtype
assert not np.array_equal(orig_obs, obs)
assert not np.array_equal(orig_rewards, rewards)
np.testing.assert_allclose(venv.normalize_obs(orig_obs), obs)
np.testing.assert_allclose(venv.normalize_reward(orig_rewards), rewards)
def test_normalize_external():
venv = _make_warmstart_cartpole()
rewards = np.array([1, 1])
norm_rewards = venv.normalize_reward(rewards)
assert norm_rewards.shape == rewards.shape
# Episode return is almost always >= 1 in CartPole. So reward should shrink.
assert np.all(norm_rewards < 1)
# Don't have any guarantees on obs normalization, except shape, really.
obs = np.array([0, 0, 0, 0])
norm_obs = venv.normalize_obs(obs)
assert obs.shape == norm_obs.shape
@pytest.mark.parametrize("model_class", [DDPG, DQN, SAC, TD3])
def test_offpolicy_normalization(model_class):
if model_class == DQN:
env = DummyVecEnv([lambda: gym.make('CartPole-v1')])
else:
env = DummyVecEnv([make_env])
env = VecNormalize(env, norm_obs=True, norm_reward=True, clip_obs=10., clip_reward=10.)
model = model_class('MlpPolicy', env, verbose=1)
model.learn(total_timesteps=1000)
# Check getter
assert isinstance(model.get_vec_normalize_env(), VecNormalize)
def test_sync_vec_normalize():
env = DummyVecEnv([make_env])
assert unwrap_vec_normalize(env) is None
env = VecNormalize(env, norm_obs=True, norm_reward=True, clip_obs=10., clip_reward=10.)
assert isinstance(unwrap_vec_normalize(env), VecNormalize)
env = VecFrameStack(env, 1)
assert isinstance(unwrap_vec_normalize(env), VecNormalize)
eval_env = DummyVecEnv([make_env])
eval_env = VecNormalize(eval_env, training=False, norm_obs=True, norm_reward=True, clip_obs=10., clip_reward=10.)
eval_env = VecFrameStack(eval_env, 1)
env.reset()
# Initialize running mean
for _ in range(100):
env.step([env.action_space.sample()])
obs = env.reset()
original_obs = env.get_original_obs()
dummy_rewards = np.random.rand(10)
# Normalization must be different
assert not np.allclose(obs, eval_env.normalize_obs(original_obs))
sync_envs_normalization(env, eval_env)
# Now they must be synced
assert np.allclose(obs, eval_env.normalize_obs(original_obs))
assert np.allclose(env.normalize_reward(dummy_rewards), eval_env.normalize_reward(dummy_rewards))
def test_mpi_runningmeanstd():
"""Test RunningMeanStd object for MPI"""
# Test will be run in CI before pytest is run
pytest.skip()
return_code = subprocess.call(['mpirun', '--allow-run-as-root', '-np', '2',
'python', '-m', 'stable_baselines.common.mpi_running_mean_std'])
_assert_eq(return_code, 0)
def test_mpi_moments():
"""
test running mean std function
"""
# Test will be run in CI before pytest is run
pytest.skip()
subprocess.check_call(['mpirun', '--allow-run-as-root', '-np', '3', 'python', '-c',
'from stable_baselines.common.mpi_moments '
'import _helper_runningmeanstd; _helper_runningmeanstd()'])
|
/*
* Copyright © 2020 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
export * from './base_module';
export * from './base_asset';
export { KeysModule, RegisterAsset as KeysRegisterAsset } from './keys';
export { TokenModule, TransferAsset as TokenTransferAsset } from './token';
export { SequenceModule } from './sequence';
export {
DPoSModule,
RegisterTransactionAsset as DPoSRegisterAsset,
VoteTransactionAsset as DPoSVoteAsset,
UnlockTransactionAsset as DPoSUnlockAsset,
PomTransactionAsset as DPoSPoMAsset,
} from './dpos';
|
// pages/success/success.js
Page({
data:{},
onLoad:function(options){
// 页面初始化 options为页面跳转所带来的参数
},
onReady:function(){
// 页面渲染完成
},
onShow:function(){
// 页面显示
},
onHide:function(){
// 页面隐藏
},
onUnload:function(){
// 页面关闭
},
returnClick:function(){
wx.navigateTo({
url: '../home/home',
success: function(res){
// success
},
fail: function() {
// fail
},
complete: function() {
// complete
}
})
},
// 下单支付
doPay:function(){
wx.requestPayment({
"timeStamp": "",
"nonceStr": "",
"package": "",
"signType": "MD5",
"paySign": "",
"success":function(res){
},
"fail":function(res){
}
})
}
}) |
<gh_stars>1-10
/*
//@HEADER
// *****************************************************************************
//
// XtraPuLP: Xtreme-Scale Graph Partitioning using Label Propagation
// Copyright (2016) Sandia Corporation
//
// Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
// the U.S. Government retains certain rights in this software.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the Corporation nor the names of the
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Questions? Contact <NAME> (<EMAIL>)
// <NAME> (<EMAIL>)
// <NAME> (<EMAIL>)
//
// *****************************************************************************
//@HEADER
*/
#ifndef _LCA_COMMS_H_
#define _LCA_COMMS_H_
#include <mpi.h>
#include <omp.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <assert.h>
#include "comms.h"
#include "bicc_dist.h"
#include "util.h"
extern int procid, nprocs;
extern bool verbose, debug, verify;
#define MAX_SEND_SIZE 2147483648
#define THREAD_QUEUE_SIZE 1024
struct lca_thread_data_t {
int32_t tid;
uint64_t* thread_queue;
uint64_t* thread_finish;
uint64_t thread_queue_size;
uint64_t thread_finish_size;
};
struct lca_queue_data_t {
uint64_t* queue;
uint64_t* queue_next;
uint64_t* finish;
uint64_t queue_size;
uint64_t next_size;
uint64_t finish_size;
};
inline void init_queue_lca(dist_graph_t* g, lca_queue_data_t* lcaq){
if (debug) { printf("Task %d init_queue_lca() start\n", procid);}
uint64_t queue_size = g->n_local + g->n_ghost;
lcaq->queue = (uint64_t*)malloc(100*queue_size*sizeof(uint64_t));
lcaq->queue_next = (uint64_t*)malloc(100*queue_size*sizeof(uint64_t));
lcaq->finish = (uint64_t*)malloc(100*queue_size*sizeof(uint64_t));
if (lcaq->queue == NULL || lcaq->queue_next == NULL || lcaq->finish == NULL)
throw_err("init_queue_lca(), unable to allocate resources\n",procid);
lcaq->queue_size = 0;
lcaq->next_size = 0;
lcaq->finish_size = 0;
if(debug){printf("Task %d init_queue_lca() success\n", procid); }
}
inline void clear_queue_lca(lca_queue_data_t* lcaq){
if(debug){ printf("Task %d clear_queue_lca() start\n",procid); }
free(lcaq->queue);
free(lcaq->queue_next);
free(lcaq->finish);
if(debug) {printf("Task %d clear_queue_lca() success\n", procid); }
}
inline void init_thread_lca(lca_thread_data_t* lcat) {
if (debug) { printf("Task %d init_thread_queue() start\n", procid);}
lcat->tid = omp_get_thread_num();
lcat->thread_queue = (uint64_t*)malloc(THREAD_QUEUE_SIZE*sizeof(uint64_t));
lcat->thread_finish = (uint64_t*)malloc(THREAD_QUEUE_SIZE*sizeof(uint64_t));
if (lcat->thread_queue == NULL || lcat->thread_finish == NULL)
throw_err("init_thread_lca(), unable to allocate resources\n", procid, lcat->tid);
lcat->tid = omp_get_thread_num();
lcat->thread_queue_size = 0;
lcat->thread_finish_size = 0;
if (debug) {printf("Task %d init_thread_queue() success\n", procid); }
}
inline void clear_thread_lca(lca_thread_data_t* lcat){
free(lcat->thread_queue);
free(lcat->thread_finish);
}
inline void init_sendbuf_lca(mpi_data_t* comm){
comm->sdispls_temp[0] = 0;
comm->total_send = comm->sendcounts_temp[0];
for (int32_t i = 1; i < nprocs; ++i){
comm->sdispls_temp[i] = comm->sdispls_temp[i-1] + comm->sendcounts_temp[i-1];
comm->total_send += comm->sendcounts_temp[i];
}
if (debug) printf("Task %d total_send %lu\n", procid, comm->total_send);
comm->sendbuf_vert = (uint64_t*)malloc(comm->total_send*sizeof(uint64_t));
if (comm->sendbuf_vert == NULL)
throw_err("init_sendbuf_lca(), unable to allocate resources\n", procid);
}
inline void clear_recvbuf_lca(mpi_data_t* comm){
free(comm->recvbuf_vert);
for (int32_t i = 0; i < nprocs; ++i)
comm->sendcounts[i] = 0;
for (int32_t i = 0; i < nprocs; ++i)
comm->sendcounts_temp[i] = 0;
}
inline void add_to_lca(lca_thread_data_t* lcat, lca_queue_data_t* lcaq,
uint64_t vert1, uint64_t pred1, uint64_t level1,
uint64_t vert2, uint64_t pred2, uint64_t level2);
inline void empty_lca_queue(lca_thread_data_t* lcat, lca_queue_data_t* lcaq);
inline void add_to_finish(lca_thread_data_t* lcat, lca_queue_data_t* lcaq,
uint64_t vert1, uint64_t pred1, uint64_t level1);
inline void empty_finish_queue(lca_thread_data_t* lcat, lca_queue_data_t* lcaq);
inline void update_lca_send(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq, uint64_t index, int32_t send_rank);
inline void empty_lca_send(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq);
inline void update_lca_finish(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq, uint64_t index, int32_t send_rank);
//(dist_graph_t* g, thread_comm_t* tc, mpi_data_t* comm,
// lca_queue_data_t* lcaq, uint64_t index, int32_t send_rank);
inline void empty_lca_finish(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq);
inline void exchange_lca(dist_graph_t* g, mpi_data_t* comm);
inline void add_to_lca(lca_thread_data_t* lcat, lca_queue_data_t* lcaq,
uint64_t vert1, uint64_t pred1, uint64_t level1,
uint64_t vert2, uint64_t pred2, uint64_t level2)
{
lcat->thread_queue[lcat->thread_queue_size++] = vert1;
lcat->thread_queue[lcat->thread_queue_size++] = pred1;
lcat->thread_queue[lcat->thread_queue_size++] = level1;
lcat->thread_queue[lcat->thread_queue_size++] = vert2;
lcat->thread_queue[lcat->thread_queue_size++] = pred2;
lcat->thread_queue[lcat->thread_queue_size++] = level2;
if (lcat->thread_queue_size+6 >= THREAD_QUEUE_SIZE)
empty_lca_queue(lcat, lcaq);
}
inline void empty_lca_queue(lca_thread_data_t* lcat, lca_queue_data_t* lcaq)
{
uint64_t start_offset;
#pragma omp atomic capture
start_offset = lcaq->next_size += lcat->thread_queue_size;
start_offset -= lcat->thread_queue_size;
for (uint64_t i = 0; i < lcat->thread_queue_size; ++i)
lcaq->queue_next[start_offset + i] = lcat->thread_queue[i];
lcat->thread_queue_size = 0;
}
inline void add_to_finish(lca_thread_data_t* lcat, lca_queue_data_t* lcaq,
uint64_t vert1, uint64_t pred1, uint64_t level1)
{
lcat->thread_finish[lcat->thread_finish_size++] = vert1;
lcat->thread_finish[lcat->thread_finish_size++] = pred1;
lcat->thread_finish[lcat->thread_finish_size++] = level1;
if (lcat->thread_finish_size+3 >= THREAD_QUEUE_SIZE)
empty_finish_queue(lcat, lcaq);
}
inline void empty_finish_queue(lca_thread_data_t* lcat, lca_queue_data_t* lcaq)
{
uint64_t start_offset;
#pragma omp atomic capture
start_offset = lcaq->finish_size += lcat->thread_finish_size;
start_offset -= lcat->thread_finish_size;
for (uint64_t i = 0; i < lcat->thread_finish_size; ++i)
lcaq->finish[start_offset + i] = lcat->thread_finish[i];
lcat->thread_finish_size = 0;
}
inline void update_lca_send(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq, uint64_t index, int32_t send_rank)
{
tc->sendbuf_rank_thread[tc->thread_queue_size/6] = send_rank;
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+1];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+2];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+3];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+4];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+5];
//++tc->thread_queue_size;
//++tc->sendcounts_thread[send_rank];
if (tc->thread_queue_size+6 >= THREAD_QUEUE_SIZE)
empty_lca_send(tc, comm, lcaq);
}
inline void empty_lca_send(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq)
{
for (int32_t i = 0; i < nprocs; ++i)
{
#pragma omp atomic capture
tc->thread_starts[i] = comm->sdispls_temp[i] += tc->sendcounts_thread[i];
tc->thread_starts[i] -= tc->sendcounts_thread[i];
}
for (uint64_t i = 0; i < tc->thread_queue_size; i+=6)
{
int32_t cur_rank = tc->sendbuf_rank_thread[i/6];
comm->sendbuf_vert[tc->thread_starts[cur_rank]] =
tc->sendbuf_vert_thread[i];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+1] =
tc->sendbuf_vert_thread[i+1];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+2] =
tc->sendbuf_vert_thread[i+2];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+3] =
tc->sendbuf_vert_thread[i+3];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+4] =
tc->sendbuf_vert_thread[i+4];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+5] =
tc->sendbuf_vert_thread[i+5];
tc->thread_starts[cur_rank] += 6;
}
for (int32_t i = 0; i < nprocs; ++i)
{
tc->thread_starts[i] = 0;
tc->sendcounts_thread[i] = 0;
}
tc->thread_queue_size = 0;
}
inline void update_lca_finish(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq, uint64_t index, int32_t send_rank)
{
// for (int32_t i = 0; i < nprocs; ++i)
// tc->v_to_rank[i] = false;
// uint64_t out_degree = out_degree(g, vert_index);
// uint64_t* outs = out_vertices(g, vert_index);
// for (uint64_t j = 0; j < out_degree; ++j)
// {
// uint64_t out_index = outs[j];
// if (out_index >= g->n_local)
// {
// int32_t out_rank = g->ghost_tasks[out_index - g->n_local];
// if (!tc->v_to_rank[out_rank])
// {
// tc->v_to_rank[out_rank] = true;
// add_vid_data_to_send(tc, comm,
// g->local_unmap[vert_index], data, out_rank);
// }
// }
// }
tc->sendbuf_rank_thread[tc->thread_queue_size/3] = send_rank;
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->finish[index];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->finish[index+1];
tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->finish[index+2];
//++tc->thread_queue_size;
//++tc->sendcounts_thread[send_rank];
if (tc->thread_queue_size+6 >= THREAD_QUEUE_SIZE)
empty_lca_finish(tc, comm, lcaq);
}
// inline void add_data_to_finish(thread_comm_t* tc, mpi_data_t* comm,
// lca_queue_data_t* lcaq, uint64_t index, int32_t send_rank)
// {
// tc->sendbuf_rank_thread[tc->thread_queue_size/3] = send_rank;
// tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index];
// tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+1];
// tc->sendbuf_vert_thread[tc->thread_queue_size++] = lcaq->queue_next[index+2];
// ++tc->thread_queue_size;
// ++tc->sendcounts_thread[send_rank];
// if (tc->thread_queue_size+3 >= THREAD_QUEUE_SIZE)
// empty_lca_finish(tc, comm, lcaq);
// }
inline void empty_lca_finish(thread_comm_t* tc, mpi_data_t* comm,
lca_queue_data_t* lcaq)
{
for (int32_t i = 0; i < nprocs; ++i)
{
#pragma omp atomic capture
tc->thread_starts[i] = comm->sdispls_temp[i] += tc->sendcounts_thread[i];
tc->thread_starts[i] -= tc->sendcounts_thread[i];
}
for (uint64_t i = 0; i < tc->thread_queue_size; i+=3)
{
int32_t cur_rank = tc->sendbuf_rank_thread[i/3];
comm->sendbuf_vert[tc->thread_starts[cur_rank]] =
tc->sendbuf_vert_thread[i];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+1] =
tc->sendbuf_vert_thread[i+1];
comm->sendbuf_vert[tc->thread_starts[cur_rank]+2] =
tc->sendbuf_vert_thread[i+2];
tc->thread_starts[cur_rank] += 3;
}
for (int32_t i = 0; i < nprocs; ++i)
{
tc->thread_starts[i] = 0;
tc->sendcounts_thread[i] = 0;
}
tc->thread_queue_size = 0;
}
inline void exchange_lca(dist_graph_t* g, mpi_data_t* comm)
{
for (int32_t i = 0; i < nprocs; ++i)
comm->recvcounts_temp[i] = 0;
for (int32_t i = 0; i < nprocs; ++i)
comm->sdispls_temp[i] -= comm->sendcounts_temp[i];
MPI_Alltoall(comm->sendcounts_temp, 1, MPI_UINT64_T,
comm->recvcounts_temp, 1, MPI_UINT64_T, MPI_COMM_WORLD);
comm->total_recv = 0;
for (int i = 0; i < nprocs; ++i)
comm->total_recv += comm->recvcounts_temp[i];
comm->recvbuf_vert = (uint64_t*)malloc(comm->total_recv*sizeof(uint64_t));
if (comm->recvbuf_vert == NULL)
throw_err("exchange_lca() unable to allocate recv buffers", procid);
uint64_t task_queue_size = comm->total_send;
uint64_t current_global_size = 0;
MPI_Allreduce(&task_queue_size, ¤t_global_size, 1,
MPI_UINT64_T, MPI_SUM, MPI_COMM_WORLD);
uint64_t num_comms = current_global_size / (uint64_t)MAX_SEND_SIZE + 1;
uint64_t sum_recv = 0;
uint64_t sum_send = 0;
for (uint64_t c = 0; c < num_comms; ++c)
{
for (int32_t i = 0; i < nprocs; ++i)
{
uint64_t send_begin = (comm->sendcounts_temp[i] * c) / num_comms;
uint64_t send_end = (comm->sendcounts_temp[i] * (c + 1)) / num_comms;
if (c == (num_comms-1))
send_end = comm->sendcounts_temp[i];
comm->sendcounts[i] = (int32_t)(send_end - send_begin);
assert(comm->sendcounts[i] >= 0);
}
MPI_Alltoall(comm->sendcounts, 1, MPI_INT32_T,
comm->recvcounts, 1, MPI_INT32_T, MPI_COMM_WORLD);
comm->sdispls[0] = 0;
comm->sdispls_cpy[0] = 0;
comm->rdispls[0] = 0;
for (int32_t i = 1; i < nprocs; ++i)
{
comm->sdispls[i] = comm->sdispls[i-1] + comm->sendcounts[i-1];
comm->rdispls[i] = comm->rdispls[i-1] + comm->recvcounts[i-1];
comm->sdispls_cpy[i] = comm->sdispls[i];
}
int32_t cur_send = comm->sdispls[nprocs-1] + comm->sendcounts[nprocs-1];
int32_t cur_recv = comm->rdispls[nprocs-1] + comm->recvcounts[nprocs-1];
uint64_t* buf_v = (uint64_t*)malloc((uint64_t)(cur_send)*sizeof(uint64_t));
if (buf_v == NULL)
throw_err("exchange_verts(), unable to allocate comm buffers", procid);
for (int32_t i = 0; i < nprocs; ++i)
{
uint64_t send_begin = (comm->sendcounts_temp[i] * c) / num_comms;
uint64_t send_end = (comm->sendcounts_temp[i] * (c + 1)) / num_comms;
if (c == (num_comms-1))
send_end = comm->sendcounts_temp[i];
for (uint64_t j = send_begin; j < send_end; ++j)
{
uint64_t data = comm->sendbuf_vert[comm->sdispls_temp[i]+j];
buf_v[comm->sdispls_cpy[i]++] = data;
}
}
MPI_Alltoallv(buf_v, comm->sendcounts,
comm->sdispls, MPI_UINT64_T,
comm->recvbuf_vert+sum_recv, comm->recvcounts,
comm->rdispls, MPI_UINT64_T, MPI_COMM_WORLD);
free(buf_v);
sum_recv += cur_recv;
sum_send += cur_send;
}
free(comm->sendbuf_vert);
assert(sum_recv == comm->total_recv);
assert(sum_send == comm->total_send);
}
#endif
|
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { SaleByCustomerComponent } from './sale-by-customer.component';
describe('SaleByCustomerComponent', () => {
let component: SaleByCustomerComponent;
let fixture: ComponentFixture<SaleByCustomerComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ SaleByCustomerComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(SaleByCustomerComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
/*
digit or not a digit at all.
Example
For symbol = '5', the output should be
characterParity(symbol) = "odd";
For symbol = '8', the output should be
characterParity(symbol) = "even";
For symbol = 'q', the output should be
characterParity(symbol) = "not a digit".
Input/Output
[execution time limit] 4 seconds (js)
[input] char symbol
[output] string
*/
function characterParity(a) {
return isNaN(a) ? "not a digit" : +a % 2 === 0 ? "even" : "odd";
}
const q1 = "5"; // "odd"
const q2 = "8"; // "even"
const q3 = "q"; // "not a digit"
const q4 = "1"; // "odd"
const q5 = "2"; // "even"
const q6 = "7"; // "odd"
const q7 = "9"; // "odd"
console.log(characterParity(q1));
console.log(characterParity(q2));
console.log(characterParity(q3));
console.log(characterParity(q4));
console.log(characterParity(q5));
console.log(characterParity(q6));
console.log(characterParity(q7));
|
#!/bin/bash
# Usage: the first argument selects the build type:
# - release, to build release only
# - debug, to build debug only
# - continuous, to build release and debug
# - presubmit, for presubmit builds
#
# The default is release
echo "This script is intended to run in a CI environment and may modify your current environment."
echo "Please refer to BUILDING.md for more information."
read -r -p "Do you wish to proceed (y/n)? " choice
case "${choice}" in
y|Y)
echo "Build will proceed..."
;;
n|N)
exit 0
;;
*)
exit 0
;;
esac
set -e
set -x
source `dirname $0`/../common/ci-common.sh
source `dirname $0`/ci-common.sh
source `dirname $0`/../common/build-common.sh
pushd `dirname $0`/../.. > /dev/null
./build.sh -p webgl -c $RUN_TESTS $GENERATE_ARCHIVES $BUILD_DEBUG $BUILD_RELEASE
|
<filename>tools/snippet-testing/language_handler/java_6.rb
require_relative 'java'
require_relative '../model/dependency'
module LanguageHandler
class Java6 < Java
LANG_CNAME = '6.x.java'.freeze
TEST_CLASS_NAME = 'Example6'.freeze
TWILIO_LIBRARY_VERSION = '6'.freeze
private
def execute(file)
puts "Running Java 6.x on #{file}"
execute_command(file)
end
def get_dependencies
Model::Dependency.new.get_java_6_jars
end
def test_class_name
TEST_CLASS_NAME
end
end
end
|
"""A module for demonstrating exceptions."""
|
#include <filesys/inode.h>
#include <device/ata.h>
#include <device/block.h>
#include <bitmap.h>
#include <device/console.h>
#include <string.h>
//Block 12~15 valid inode number = 256
//inode size = 64byte, block size = 4096byte, 64 inode/block
char tmpblock[SSU_BLOCK_SIZE];
extern struct process *cur_process;
/* inode 테이블 초기화 */
void init_inode_table(struct ssu_fs *fs)
{
struct bitmap *imap = fs->fs_inodemap;
int i;
for(i=0; i<NUM_INODEBLOCK; i++)
fs_readblock(fs, SSU_INODE_BLOCK + i, ((char*)inode_tbl) + (i * SSU_BLOCK_SIZE));
if(!bitmap_test(imap, INO_ROOTDIR))
{
memset(inode_tbl, 0, sizeof(struct inode) * NUM_INODE);
//Unvalid, Reserved
bitmap_set(imap, 0, true);
bitmap_set(imap, 1, true);
inode_tbl[0].sn_ino = 0;
inode_tbl[0].sn_size = 0;
inode_tbl[0].sn_type = SSU_TYPE_DIR;
inode_tbl[0].sn_nlink = 0;
inode_tbl[0].sn_refcount = 0;
inode_tbl[0].sn_fs = fs;
//Root directory set
make_dir(inode_tbl, ".");
sync_inode(fs, inode_tbl + INO_ROOTDIR);
sync_bitmapblock(fs);
}
cur_process->rootdir = inode_tbl + INO_ROOTDIR;
cur_process->cwd = cur_process->rootdir;
}
struct inode* inode_create(struct ssu_fs *fs, uint16_t type)
{
struct inode *new_inode;
struct bitmap *imap = fs->fs_inodemap;
int idx;
for(idx = 0; idx <NUM_INODE; idx++)
if(!bitmap_test(imap, idx))
break;
if(idx >= NUM_INODE) // 인덱스가 정해진 INODE의 개수보다 커지면 종료
return NULL;
new_inode = inode_tbl + idx;
new_inode->sn_ino = idx;
new_inode->sn_size = 0;
new_inode->sn_type = type;
new_inode->sn_nlink = 0;
new_inode->sn_refcount = 0;
new_inode->sn_fs = fs;
bitmap_set(imap, idx, true);
sync_inode(fs, new_inode);
sync_bitmapblock(fs);
return new_inode;
}
/* DISK에 요청한 데이터 쓰기 */
int inode_write(struct inode * in, uint32_t offset, char * buf, int len)
{
int result=0;
struct ssu_fs * fs = in->sn_fs;
uint32_t blkoff = offset / SSU_BLOCK_SIZE; // the offset number of block
uint32_t res_off = offset % SSU_BLOCK_SIZE; // 아마도 sector...?
if(offset > in->sn_size) // 파일의 현재 위치로 넘어온 값이 파일의 크기보다 크다면 에러처리
return -1;
memset(tmpblock, 0, SSU_BLOCK_SIZE); // 임시 블록을 초기화처리
if(res_off != 0 || blkoff < in->sn_nlink) // 남은 섹터의 수가 0이 아니거나 blkoff가 사용중인 direct block의 수보다 작으면
{
/* 데이터가 이미 블록에 존재한다는 의미이기 때문에, 기존에 저장되어 있던 데이터를 백업할 필요가 있어서 밑의 함수를 미리 호출하는 것으로 보임 */
fs_readblock(fs, in->sn_directblock[blkoff], tmpblock); // 임시 블록에 데이터 읽어들이기
}
else // 블록이 비어있으면
{
balloc(fs->fs_blkmap, &(in->sn_directblock[blkoff])); // 블록을 할당해주고
in->sn_nlink++; // 사용 중인 블록의 수를 증가시켜 주며
sync_bitmapblock(fs); // 블록 동기화
}
memcpy(tmpblock + res_off, buf, len); // 해당 블록&섹터에 buf를 len만큼 쓰기
fs_writeblock(fs, in->sn_directblock[blkoff], tmpblock);
if(in->sn_size < offset+len)
in->sn_size = offset+len; // 파일에 쓴 결과가 기존 파일 크기보다 크다면 파일 크기 확장
sync_inode(fs, in); // inode값 동기화
return result;
}
/* DISK에서 요청한 데이터 읽어오기 */
int inode_read(struct inode * in, uint32_t offset, char * buf, int len)
{
int result=0;
struct ssu_fs * fs = in->sn_fs;
uint32_t blkoff = offset / SSU_BLOCK_SIZE;
uint32_t res_off = offset % SSU_BLOCK_SIZE;
if(offset > in->sn_size)
return -1;
memset(tmpblock, 0, SSU_BLOCK_SIZE);
fs_readblock(fs, in->sn_directblock[blkoff], tmpblock);
memcpy(buf, tmpblock + res_off, len);
return result;
}
struct inode *inode_open(const char *pathname, int flags)
{
struct inode *inode_cur;
struct inode *new_in;
struct direntry buf_dir;
char fname_buf[FILENAME_LEN];
int ndir;
int i;
inode_cur = cur_process->cwd; // 현재 작업중인 디렉토리를 inode_cur에 할당
for(i=0; pathname[i] != '\0'; i++)
fname_buf[i] = pathname[i]; // 파일이름 복사
fname_buf[i] = '\0';
ndir = num_direntry(inode_cur); // ndir : 디렉토리의 파일의 개수
for(i=0; i<ndir; i++)
{
inode_read(inode_cur, i*sizeof(struct direntry), (char *)&buf_dir, sizeof(struct direntry));
if(strncmp(buf_dir.de_name, fname_buf,FILENAME_LEN) == 0) return &(inode_tbl[buf_dir.de_ino]); // 중복 이름 존재, 생성 없이 리턴
}
new_in = inode_create(cur_process->cwd->sn_fs,SSU_TYPE_FILE);
buf_dir.de_ino = new_in->sn_ino;
strlcpy(buf_dir.de_name, fname_buf, FILENAME_LEN);
inode_write(inode_cur, num_direntry(inode_cur)*sizeof(buf_dir), (char *)&buf_dir, sizeof(buf_dir)); //상위 inode에 추가
return new_in;
}
static int sync_inode(struct ssu_fs *fs, struct inode* inode)
{
int result = 0;
int offset = inode->sn_ino / INODE_PER_BLOCK; // offset to current inode index from SSU_INODE_BLOCK
result = fs_writeblock(fs, SSU_INODE_BLOCK + offset, (char*)(inode_tbl + offset*INODE_PER_BLOCK));
return result;
}
/* 이 아래로는 디렉토리 관련 함수 */
int make_dir(struct inode *cwd, char *name)
{
struct direntry newde, cde, pde, tmp;
struct inode * newin;
struct ssu_fs *fs = cwd->sn_fs;
int ndir, i;
int len = strnlen(name, FILENAME_LEN);
if(len > FILENAME_LEN || len <= 0)
{
printk("Unvalid filename length.\n");
return -1;
}
if(cwd->sn_ino >= INO_ROOTDIR)
{
ndir = num_direntry(cwd);
for(i=0; i<ndir; i++)
{
inode_read(cwd, i*sizeof(struct direntry), (char*)&tmp, sizeof(struct direntry));
if( strncmp(name, tmp.de_name, len) == 0 && tmp.de_name[len] == 0)
{
printk("Already exist filename.\n");
return -1;
}
}
}
newin = inode_create(fs, SSU_TYPE_DIR);
newde.de_ino = newin->sn_ino;
strlcpy(newde.de_name, name, len+1);
if(cwd->sn_ino >= INO_ROOTDIR)
inode_write(cwd, cwd->sn_size, (char*)&newde, sizeof(struct direntry));
else
cwd = newin;
cde.de_ino = newde.de_ino;
strlcpy(cde.de_name, ".", 2);
pde.de_ino = cwd->sn_ino;
strlcpy(pde.de_name, "..", 3);
inode_write(newin, newin->sn_size, (char*)&pde, sizeof(struct direntry));
inode_write(newin, newin->sn_size, (char*)&cde, sizeof(struct direntry));
return 0;
}
static int num_direntry(struct inode *in)
{
if(in->sn_size % sizeof(struct direntry) != 0 || in->sn_type != SSU_TYPE_DIR)
return -1;
return in->sn_size / sizeof(struct direntry);
}
void list_segment(struct inode *cwd)
{
int i;
int ndir = num_direntry(cwd);
struct inode *in;
struct direntry de;
printk("name | size | type | blocks | ino\n");
for(i=0; i<ndir; i++)
{
inode_read(cwd, i*sizeof(struct direntry), (char*)&de, sizeof(struct direntry));
in = &inode_tbl[de.de_ino];
printk("%s | %d | %c | %d | %d\n", de.de_name, in->sn_size,
(in->sn_type == SSU_TYPE_DIR) ? 'd' : 'n', in->sn_nlink, in->sn_ino);
}
}
int change_dir(struct inode *cwd, char *path)
{
int i;
int ndir = num_direntry(cwd);
struct inode *in;
struct direntry de;
int len = strnlen(path, FILENAME_LEN);
if(path == 0)
{
cur_process->cwd = cur_process->rootdir;
return -1;
}
for(i=0; i<ndir; i++)
{
inode_read(cwd, i*sizeof(struct direntry), (char*)&de, sizeof(struct direntry));
if( strncmp(path, de.de_name, len) == 0 && de.de_name[len] == 0)
{
in = &inode_tbl[de.de_ino];
if(in->sn_type != SSU_TYPE_DIR)
{
printk("Not a Directory.\n");
return -1;
}
cur_process->cwd = in;
return 0;
}
}
printk("Not found directory\n");
return -1;
}
int get_curde(struct inode *cwd, struct direntry * de)
{
struct inode *pwd;
int i, ndir;
//get parent dir
inode_read(cwd, 0, (char*)de, sizeof(struct direntry));
pwd = &inode_tbl[de->de_ino];
ndir = num_direntry(pwd);
for(i=2; i<ndir; i++)
{
inode_read(pwd, i*sizeof(struct direntry), (char*)de, sizeof(struct direntry));
if(de->de_ino == cwd->sn_ino)
return 0;
}
return -1;
}
|
<filename>lib/generators/sunrise/templates/models/mongoid/structure_type.rb
# frozen_string_literal: true
class StructureType < Sunrise::Models::StructureType
define_enum do |builder|
builder.member :page, object: new('page')
builder.member :posts, object: new('posts')
builder.member :main, object: new('main')
builder.member :redirect, object: new('redirect')
builder.member :group, object: new('group')
end
end
|
<filename>src/context/Notification.js<gh_stars>1-10
import React from "react";
import "./Notification.css";
const Notification = props => (
<div className="notification">
{props.children}
<span
style={{ float: "right", cursor: "pointer" }}
onClick={() => {
console.log("X clicked");
}}
>
X
</span>
</div>
);
export default Notification;
|
<reponame>Banuba/beauty-android-java<gh_stars>0
'use strict';
const whiteningVertexShader = "modules/eyes/whitening.vert";
exports.default = whiteningVertexShader;
|
from runners.experiment_utils import load_experiment, save_experiment
from src import dataset, test, model
from src.utils import loaders, seed
import logging
from runners.utils import load_yaml
from . import cmd, document_parser
from argparse import ArgumentParser
import os
def evaluate(path_to_yml_file, eval_keys=['test']):
"""
Evaluates an experiment across all of the data for each key in eval_keys. The key
must correspond to a dataset included in the experiment configuration. This uses
:py:class:`src.test.EvaluationRunner` to evaluate the performance of the model on
each dataset.
Args:
path_to_yml_file (str): Path to the yml file that defines the experiment. The
corresponding test configuration for the experiment will be used to evaluate
the experiment across all of the audio files in the test dataset.
eval_keys (list): All of the keys to be used to evaluate the experiment. Will
run the evaluation on each eval_key in sequence. Defaults to ['test'].
"""
config, exp, path_to_yml_file = load_experiment(path_to_yml_file)
if 'seed' in config['info']:
seed(config['info']['seed'])
if 'test' not in config['datasets']:
logging.error('Test dataset must be specified!')
test_classes = config['test_config']['testers']
testers = []
for key in test_classes:
TestClass = getattr(test, key)
args = test_classes[key]
testers.append((TestClass, args))
_datasets = {}
for key in eval_keys:
if key in config['datasets']:
_datasets[key] = loaders.load_dataset(
config['datasets'][key]['class'],
config['datasets'][key]['folder'],
config['dataset_config']
)
else:
_datasets[key] = None
for key in eval_keys:
_tester = test.EvaluationRunner(
testers,
config['algorithm_config'],
_datasets[key],
config['info']['output_folder'],
max_workers=config['test_config']['num_workers'],
use_blocking_executor=config['test_config']['use_blocking_executor'],
)
_tester.run()
@document_parser('evaluate', 'scripts.evaluate.evaluate')
def build_parser():
"""
Builds the parser for the evaluate script. See the arguments to
:py:func:`scripts.evaluate.evaluate`.
Returns:
:class:`argparse.ArgumentParser`: The parser for this script.
"""
parser = ArgumentParser()
parser.add_argument(
"-p",
"--path_to_yml_file",
type=str,
required=True,
help="""Path to the configuration for the experiment that is getting evaluated. The
corresponding test configuration for the experiment will be used to evaluate
the experiment across all of the audio files in the test dataset.
"""
)
parser.add_argument(
"-e",
"--eval_keys",
nargs='+',
type=str,
default=['test'],
help="""All of the keys to be used to evaluate the experiment. Will
run the evaluation on each eval_key in sequence. Defaults to ['test'].
"""
)
return parser
if __name__ == '__main__':
cmd(evaluate, build_parser) |
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u1F526 = void 0;
var u1F526 = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M683.5 1453q-195.5-78-326-199.5T227 950q2-172 109-338.5t294.5-276T1014 226q165 0 285 81t190.5 278 70.5 393q0 43-3.5 84.5t-7.5 61.5-7.5 35.5-36.5 75-84.5 111.5-150.5 108-155 67l-57 10q-179 0-374.5-78zM423 1187q105 94 265 94 162 0 324.5-90.5t266-243T1382 650q0-149-103.5-240.5T1014 318q-168 0-329.5 91t-264 240T318 948q0 145 105 239zm1452 1164l-749-752q111-12 220.5-79t177-151.5T1606 1224l619 822q8 26 8 53 0 96-91.5 181.5T1947 2366q-41 0-72-15zm-875-1237q-77 37-153 37-93 0-151-54t-58-136q0-122 118-233t256-111q93 0 152 53.5t59 138.5q0 41-15 83.5t-44 84.5q-2-52-41-90.5t-92-38.5q-54 0-94.5 39.5T896 983q0 48 29.5 85t74.5 46zm1343 1089q30 38 30 88 0 87-80 165.5t-172 78.5q-32 0-59.5-12t-49.5-33l-75-76q112 0 212-75t132-219z"
},
"children": []
}]
};
exports.u1F526 = u1F526; |
#!/bin/bash
set -ex
echo "podfile provided ${podfile_lock_file}"
dependency-check --project $BITRISE_TRIGGERED_WORKFLOW_TITLE --scan $podfile_lock_file
|
/*
* Copyright 2017 ~ 2025 the original author or authors. <<EMAIL>, <EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wl4g.devops.dguid.util;
import java.io.File;
import java.io.IOException;
import java.net.ServerSocket;
/**
* {@link WorkerIdUtils}
*
* @author Wangl.sir <<EMAIL>, <EMAIL>>
* @version v1.0 2019年2月10日
* @since
*/
public class WorkerIdUtils {
/**
* workerID文件 分隔符
*/
public static final String WORKER_SPLIT = "_";
/**
* @方法名称 getPidName
* @功能描述
*
* <pre>
* 获取workId文件名
* </pre>
*
* @param pidPort
* 使用端口(同机多uid应用时区分端口)
* @param socket
* @return
*/
public static String getPidName(Integer pidPort, ServerSocket socket) {
String pidName = NetUtils.getLocalInetAddress().getHostAddress();
if (-1 != pidPort) {
// 占用端口
pidPort = null != pidPort && pidPort > 0 ? pidPort : NetUtils.getAvailablePort();
try {
socket = new ServerSocket(pidPort);
} catch (IOException e) {
throw new RuntimeException("接口占用失败!");
}
}
return pidName + WorkerIdUtils.WORKER_SPLIT + pidPort;
}
/**
* @方法名称 getPid
* @功能描述
*
* <pre>
* 查找pid文件,根据前缀获取workid
* </pre>
*
* @param pidHome
* workerID文件存储路径
* @param prefix
* workerID文件前缀
* @return workerID值
*/
public static Long getPid(String pidHome, String prefix) {
String pid = null;
File home = new File(pidHome);
if (home.exists() && home.isDirectory()) {
File[] files = home.listFiles();
for (File file : files) {
if (file.getName().startsWith(prefix)) {
pid = file.getName();
break;
}
}
if (null != pid) {
return Long.valueOf(pid.substring(pid.lastIndexOf(WORKER_SPLIT) + 1));
}
} else {
home.mkdirs();
}
return null;
}
/**
* @方法名称 sleepMs
* @功能描述
*
* <pre>
* 回拨时间睡眠等待
* </pre>
*
* @param ms
* 平均心跳时间
* @param diff
* 回拨差时间
*/
public static void sleepMs(long ms, long diff) {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
}
diff += ms;
if (diff < 0) {
sleepMs(ms, diff);
}
}
/**
* @方法名称 writePidFile
* @功能描述
*
* <pre>
* 创建workerID文件(workerID文件已经存在,则不创建,返回一个false;如果没有,则返回true)
* </pre>
*
* @param name
*/
public static void writePidFile(String name) {
File pidFile = new File(name);
try {
pidFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
} |
#!/bin/bash
# ------------------------------------------------------------------------------------------------------------
# This script installs oh-my-zsh, creates a conda environment for neovim, and creates symlinks from the home
# directory to any desired dotfiles in ~/.dotfiles.
# ------------------------------------------------------------------------------------------------------------
# Variables
# ------------------------------------------------------------------------------------------------------------
DOTFILES_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
OLD_DOTFILES_DIR=~/.dotfiles_old
NVIM_DATA_DIR=~/.local/share
NVIM_CONFIG_DIR=~/.config/nvim
OH_MY_ZSH_DIR=~/.oh-my-zsh
OH_MY_ZSH_THEME_DIR=~/.oh-my-zsh/custom/themes
NVIM_FILES="nvim"
NVIM_CONFIG="init.vim"
ZSH_THEME="nord.zsh-theme"
NEOVIM_ENVIRONMENT="neovim.yml"
CONFIG_FILES="aliases bash_profile bashrc condarc flake8 gitconfig gitignore_global pydocstring screen_layout"
CONFIG_FILES="$CONFIG_FILES screenrc zshrc"
# Pull the latest changes.
# ------------------------------------------------------------------------------------------------------------
git pull origin IGI
# Install oh-my-zsh.
# ------------------------------------------------------------------------------------------------------------
if [ ! -d $OH_MY_ZSH_DIR ]; then
echo "Installing oh-my-zsh in $OH_MY_ZSH_DIR ..."
sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)" \
"" --unattended
echo "done"
fi
# Create OLD_DOTFILES_DIR directory.
# ------------------------------------------------------------------------------------------------------------
echo "Creating $OLD_DOTFILES_DIR for backup of any existing dotfiles ..."
mkdir -p $OLD_DOTFILES_DIR
echo "done"
# Change to the DOTFILES_DIR directory.
# ------------------------------------------------------------------------------------------------------------
echo "Changing to the $DOTFILES_DIR directory ..."
cd $DOTFILES_DIR
echo "done"
# Move any existing dotfiles to OLD_DOTFILES_DIR directory.
# ------------------------------------------------------------------------------------------------------------
echo "Moving any existing dotfiles to $OLD_DOTFILES_DIR directory ..."
for file in $CONFIG_FILES; do
if [ -e ~/.$file ]; then
mv ~/.$file $OLD_DOTFILES_DIR/
fi
done
if [ -d $NVIM_DATA_DIR/$NVIM_FILES ]; then
mv $NVIM_DATA_DIR/$NVIM_FILES $OLD_DOTFILES_DIR/
fi
if [ -f $NVIM_CONFIG_DIR/$NVIM_CONFIG ]; then
mv $NVIM_CONFIG_DIR/$NVIM_CONFIG $OLD_DOTFILES_DIR/
fi
if [ -f $OH_MY_ZSH_THEME_DIR/$ZSH_THEME ]; then
mv $OH_MY_ZSH_THEME_DIR/$ZSH_THEME $OLD_DOTFILES_DIR/
fi
if [ -f ~/.NERDTreeBookmarks ]; then
mv ~/.NERDTreeBookmarks $OLD_DOTFILES_DIR/
fi
echo "done"
# Create symlinks to any files in the ~/.dotfiles directory.
# ------------------------------------------------------------------------------------------------------------
echo "Creating symlinks in $HOME directory ..."
for file in $CONFIG_FILES; do
ln -s $DOTFILES_DIR/$file ~/.$file
done
ln -s $DOTFILES_DIR/$NVIM_FILES $NVIM_DATA_DIR
mkdir -p $NVIM_CONFIG_DIR
ln -s $DOTFILES_DIR/$NVIM_CONFIG $NVIM_CONFIG_DIR/
if [ -d "$OH_MY_ZSH_THEME_DIR" ]; then
ln -s $DOTFILES_DIR/$ZSH_THEME $OH_MY_ZSH_THEME_DIR/$ZSH_THEME
fi
echo "done"
# Create NERDTreeBookmarks file.
# ------------------------------------------------------------------------------------------------------------
touch $HOME/.NERDTreeBookmarks
echo "HOME $HOME" >> $HOME/.NERDTreeBookmarks
if [ -d "$CALC" ]; then
echo "Calc $CALC" >> $HOME/.NERDTreeBookmarks
fi
if [ -d "$HOME/Documents/Projects" ]; then
echo "Projects $HOME/Documents/Projects" >> $HOME/.NERDTreeBookmarks
fi
if [ -d "$HOME/Documents/Teaching" ]; then
echo "Teaching $HOME/Documents/Teaching" >> $HOME/.NERDTreeBookmarks
fi
# Source ~/.bash_profile
# ------------------------------------------------------------------------------------------------------------
source $HOME/.bash_profile
# Create neovim conda environment.
# ------------------------------------------------------------------------------------------------------------
if type conda &> /dev/null; then
echo "Creating conda environment for neovim ..."
conda env update --file $NEOVIM_ENVIRONMENT
echo "done"
else
echo "Conda not installed. Neovim wont work properly. Install required packages manually"
fi
# Start zsh
# ------------------------------------------------------------------------------------------------------------
if type zsh &> /dev/null; then
exec zsh
fi
|
<filename>src/example-components/MarketingHero/MarketingHero4/index.js
import React from 'react';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Grid, Container, Button } from '@material-ui/core';
import hero4 from '../../../assets/images/hero-bg/hero-4.jpg';
import illustration1 from '../../../assets/images/illustrations/pack2/credit_card.svg';
import MarketingHeaders4 from '../../MarketingHeaders/MarketingHeaders4';
export default function LivePreviewExample() {
return (
<>
<div className="hero-wrapper bg-composed-wrapper bg-white">
<Container className="header-top-section py-2">
<MarketingHeaders4 />
</Container>
<div className="hero-wrapper--content">
<div
className="bg-composed-wrapper--image opacity-6"
style={{ backgroundImage: 'url(' + hero4 + ')' }}
/>
<div className="bg-composed-wrapper--bg bg-white opacity-8" />
<div className="bg-composed-wrapper--bg bg-amy-crisp opacity-6" />
<div className="bg-composed-wrapper--content">
<Container className="text-second py-4">
<Container>
<Grid container spacing={6}>
<Grid item lg={6} className="d-flex align-items-center">
<div>
<h2 className="display-3 font-weight-bold">
Bamburgh React Admin Dashboard with Material-UI PRO
</h2>
<p className="font-size-xl py-3 text-black-50">
Premium admin template powered by the most popular UI
components framework available for React: Material-UI.
Features hundreds of examples making web development
fast and easy. Start from one of the individual apps
included or from the general dashboard and build
beautiful scalable applications and presentation
websites.
</p>
<div className="pt-3">
<Button
href="#/"
onClick={(e) => e.preventDefault()}
size="large"
className="btn-pill shadow-second-sm btn-first">
<span className="btn-wrapper--label">Wallets</span>
<span className="btn-wrapper--icon">
<FontAwesomeIcon icon={['fas', 'arrow-right']} />
</span>
</Button>
<Button
href="#/"
onClick={(e) => e.preventDefault()}
size="large"
className="bg-white-50 text-second btn-pill ml-3"
disableRipple>
<span>Learn more</span>
</Button>
</div>
</div>
</Grid>
<Grid item lg={6} className="d-flex align-items-center">
<img
src={illustration1}
alt="..."
className="m-5 m-lg-0 w-100"
/>
</Grid>
</Grid>
</Container>
</Container>
<div className="shape-container-top-2">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1440 320">
<path
fill="var(--light)"
fillOpacity="1"
d="M0,224L80,197.3C160,171,320,117,480,112C640,107,800,149,960,165.3C1120,181,1280,171,1360,165.3L1440,160L1440,320L1360,320C1280,320,1120,320,960,320C800,320,640,320,480,320C320,320,160,320,80,320L0,320Z"></path>
</svg>
</div>
</div>
</div>
</div>
</>
);
}
|
from typing import List
def find_pair_sum(nums: List[int], target: int) -> List[int]:
num_indices = {}
for i, num in enumerate(nums):
complement = target - num
if complement in num_indices:
return [complement, num] if num_indices[complement] < i else [num, complement]
num_indices[num] = i
return [] |
<reponame>dogballs/battle-city<filename>src/core/input/InputBinding.ts
export class InputBinding {
private default = new Map<number, number>();
private custom = new Map<number, number>();
public setDefault(control: number, code: number): void {
this.default.set(control, code);
}
public setCustom(control: number, code: number): void {
this.custom.set(control, code);
}
public get(control: number): number {
if (this.custom.has(control)) {
return this.custom.get(control);
}
return this.default.get(control);
}
public getControl(codeToFind: number): number {
let foundControl = null;
this.custom.forEach((code, control) => {
if (foundControl === null && code === codeToFind) {
foundControl = control;
return;
}
});
if (foundControl === null) {
this.default.forEach((code, control) => {
if (foundControl === null && code === codeToFind) {
foundControl = control;
return;
}
});
}
return foundControl;
}
public resetAllToDefault(): void {
this.custom.clear();
}
public toJSON(): string {
const pairs = [];
// Save only custom bindings
this.custom.forEach((code, control) => {
pairs.push([control, code]);
});
const json = JSON.stringify(pairs);
return json;
}
public fromJSON(json: string): void {
let pairs = [];
try {
pairs = JSON.parse(json);
} catch (err) {
// Ignore parse error
}
if (!Array.isArray(pairs)) {
return;
}
pairs.forEach((pair) => {
if (!Array.isArray(pair)) {
return;
}
const [control, code] = pair;
this.custom.set(control, code);
});
}
}
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
# Copyright (C) 2020 <NAME>
# Use of this source code is governed by the MIT License
###############################################################################
from . import Indicator, smma
class truehigh(Indicator):
'''
Defined by <NAME>r. in 1978 in his book *"New Concepts in
Technical Trading Systems"* for the ATR
Records the "true high" which is the maximum of today's high and
yesterday's close
Formula:
- truehigh = max(high, close_prev)
See:
- http://en.wikipedia.org/wiki/Average_true_range
'''
group = 'volatility'
alias = 'TR', 'TrueRange', 'trange', 'TRANGE'
inputs = 'low', 'close'
outputs = 'truehi'
params = (
('_period', 1, 'Period to consider'),
)
def __init__(self):
self.o.truehi = self.i.close(-self.p._period).clip(lower=self.i.high)
class truelow(Indicator):
'''
Defined by <NAME>. in 1978 in his book *"New Concepts in
Technical Trading Systems"* for the ATR
Records the "true low" which is the minimum of today's low and
yesterday's close
Formula:
- truelow = min(low, close_prev)
See:
- http://en.wikipedia.org/wiki/Average_true_range
'''
group = 'volatility'
alias = 'TR', 'TrueRange', 'trange', 'TRANGE'
inputs = 'low', 'close'
outputs = 'truelo'
params = (
('_period', 1, 'Period to consider'),
)
def __init__(self):
self.o.truelo = self.i.close(-self.p._period).clip(upper=self.i.low)
class truerange(Indicator):
'''
Defined by <NAME>. in 1978 in his book New Concepts in
Technical Trading Systems.
Formula:
- max(high - low, abs(high - prev_close), abs(prev_close - low)
which can be simplified to
- truerange = max(high, prev_close) - min(low, prev_close)
See:
- http://en.wikipedia.org/wiki/Average_true_range
The idea is to take the previous close into account to calculate the range
if it yields a larger range than the daily range (High - Low)
'''
group = 'volatility'
alias = 'TR', 'TrueRange', 'trange', 'TRANGE'
inputs = 'high', 'low', 'close'
outputs = 'tr'
params = (
('_period', 1, 'Period for high/low vs close for truerange calc'),
)
def __init__(self):
close1 = self.i.close(-self.p._period)
truehi = close1.clip(lower=self.i.high) # max of close(-1) and hi
truelo = close1.clip(upper=self.i.low) # min of close(-1) and low
self.o.tr = truehi - truelo
class atr(truerange):
'''
Defined by <NAME>, Jr. in 1978 in his book *"New Concepts in
Technical Trading Systems"*.
The idea is to take the close into account to calculate the range if it
yields a larger range than the daily range (High - Low)
Formula:
- truerange = max(high, close(-1)) - min(low, close(-1))
- atr = SmoothedMovingAverage(truerange, period)
See:
- http://en.wikipedia.org/wiki/Average_true_range
'''
group = 'volatility'
alias = 'ATR', 'AverageTrueRange'
outputs = 'atr' # outputs_override in class def, autoalias tr => atr added
params = (
('period', 14, 'Period to consider'),
('_ma', smma, 'Moving average to use'),
)
def __init__(self):
self.o.atr = self.p._ma(self.o.tr, period=self.p.period)
class natr(atr):
'''
Offers a normalized (against the `close`) version of the `atr`, which can
provide better values for comparison against different price ranges.
Formula:
- natr = 100.0 * atr / close
See:
- http://en.wikipedia.org/wiki/Average_true_range
'''
group = 'volatility'
alias = 'NATR', 'NormalizedAverageTrueRange'
outputs = 'natr' # outputs_override above, autoalias atr => natr added
def __init__(self):
self.o.natr = 100.0 * self.o.atr / self.i.close
|
#!/usr/bin/env bash
set -e
#set -x
ROOT=example
BFT_NODES="node-bft1 node-bft2"
BFT_NODES_N="1 2"
NUM_BFT_NODES=2
POOL_NODES="node-pool1"
ALL_NODES="${BFT_NODES} ${POOL_NODES}"
if ! mkdir "${ROOT}"; then
echo "The ${ROOT} directory already exists, please move or remove it"
exit
fi
# copy and tweak the configuration
cp configuration/defaults/cole-mainnet/configuration.yaml ${ROOT}/
sed -i ${ROOT}/configuration.yaml \
-e 's/Protocol: RealPBFT/Protocol: TOptimum/' \
-e 's/minSeverity: Info/minSeverity: Debug/'
# Set up our template
bcc-cli genesis create --testnet-magic 42 --genesis-dir ${ROOT}
# Then edit the genesis.spec.json ...
SUPPLY=1000000000
# We're going to use really quick epochs (300 seconds), by using short slots 0.2s
# and K=10, but we'll keep long KES periods so we don't have to bother
# cycling KES keys
sed -i ${ROOT}/genesis.spec.json \
-e 's/"slotLength": 1/"slotLength": 0.2/' \
-e 's/"activeSlotsCoeff": 5.0e-2/"activeSlotsCoeff": 0.1/' \
-e 's/"securityParam": 2160/"securityParam": 10/' \
-e 's/"epochLength": 432000/"epochLength": 1500/' \
-e 's/"maxEntropicSupply": 0/"maxEntropicSupply": 1000000000/' \
-e 's/"decentralisationParam": 1.0/"decentralisationParam": 0.7/'
# Now generate for real:
bcc-cli genesis create \
--testnet-magic 42 \
--genesis-dir ${ROOT}/ \
--gen-genesis-keys ${NUM_BFT_NODES} \
--gen-utxo-keys 1
pushd ${ROOT}
echo "====================================================================="
echo "Generated genesis keys and genesis files:"
echo
ls -1 *
echo "====================================================================="
echo "Generated genesis.json:"
echo
cat genesis.json
echo
echo "====================================================================="
mkdir ${ALL_NODES}
# Make the pool operator cold keys
# This was done already for the BFT nodes as part of the genesis creation
for NODE in ${POOL_NODES}; do
bcc-cli node key-gen \
--cold-verification-key-file ${NODE}/operator.vkey \
--cold-signing-key-file ${NODE}/operator.skey \
--operational-certificate-issue-counter-file ${NODE}/operator.counter
bcc-cli node key-gen-VRF \
--verification-key-file ${NODE}/vrf.vkey \
--signing-key-file ${NODE}/vrf.skey
# Set permissions for the vrf private key file: read for owner only
chmod gou-rwx "${NODE}/vrf.skey"
chmod u+r "${NODE}/vrf.skey"
done
# Symlink the BFT operator keys from the genesis delegates, for uniformity
for N in ${BFT_NODES_N}; do
ln -s ../delegate-keys/delegate${N}.skey node-bft${N}/operator.skey
ln -s ../delegate-keys/delegate${N}.vkey node-bft${N}/operator.vkey
ln -s ../delegate-keys/delegate${N}.counter node-bft${N}/operator.counter
ln -s ../delegate-keys/delegate${N}.vrf.vkey node-bft${N}/vrf.vkey
ln -s ../delegate-keys/delegate${N}.vrf.skey node-bft${N}/vrf.skey
done
# Make hot keys and for all nodes
for NODE in ${ALL_NODES}; do
bcc-cli node key-gen-KES \
--verification-key-file ${NODE}/kes.vkey \
--signing-key-file ${NODE}/kes.skey
bcc-cli node issue-op-cert \
--kes-period 0 \
--kes-verification-key-file ${NODE}/kes.vkey \
--cold-signing-key-file ${NODE}/operator.skey \
--operational-certificate-issue-counter-file ${NODE}/operator.counter \
--out-file ${NODE}/node.cert
done
# Make topology files
#TODO generalise this over the N BFT nodes and pool nodes
(cat <<TOPOLOGY_FILE
{
"Producers": [
{
"addr": "127.0.0.1",
"port": 3002,
"valency": 1
}
, {
"addr": "127.0.0.1",
"port": 3003,
"valency": 1
}
]
}
TOPOLOGY_FILE
) > node-bft1/topology.json
echo 3001 > node-bft1/port
(cat <<TOPOLOGY_FILE
{
"Producers": [
{
"addr": "127.0.0.1",
"port": 3001,
"valency": 1
}
, {
"addr": "127.0.0.1",
"port": 3003,
"valency": 1
}
]
}
TOPOLOGY_FILE
) > node-bft2/topology.json
echo 3002 > node-bft2/port
(cat <<TOPOLOGY_FILE
{
"Producers": [
{
"addr": "127.0.0.1",
"port": 3001,
"valency": 1
}
, {
"addr": "127.0.0.1",
"port": 3002,
"valency": 1
}
]
}
TOPOLOGY_FILE
) > node-pool1/topology.json
echo 3003 > node-pool1/port
echo "Generated node operator keys (cold, hot) and operational certs:"
echo
ls -1 ${ALL_NODES}
echo "====================================================================="
# Make some payment and stake addresses
# user1..n: will own all the funds in the system, we'll set this up from
# initial utxo the
# pool-owner1..n: will be the owner of the pools and we'll use their reward
# account for pool rewards
USER_ADDRS="user1"
POOL_ADDRS="pool-owner1"
ADDRS="${USER_ADDRS} ${POOL_ADDRS}"
mkdir addresses
for ADDR in ${ADDRS}; do
# Payment address keys
bcc-cli address key-gen \
--verification-key-file addresses/${ADDR}.vkey \
--signing-key-file addresses/${ADDR}.skey
# Stake address keys
bcc-cli stake-address key-gen \
--verification-key-file addresses/${ADDR}-stake.vkey \
--signing-key-file addresses/${ADDR}-stake.skey
# Payment addresses
bcc-cli address build \
--payment-verification-key-file addresses/${ADDR}.vkey \
--stake-verification-key-file addresses/${ADDR}-stake.vkey \
--testnet-magic 42 \
--out-file addresses/${ADDR}.addr
# Stake addresses
bcc-cli stake-address build \
--stake-verification-key-file addresses/${ADDR}-stake.vkey \
--testnet-magic 42 \
--out-file addresses/${ADDR}-stake.addr
# Stake addresses registration certs
bcc-cli stake-address registration-certificate \
--stake-verification-key-file addresses/${ADDR}-stake.vkey \
--out-file addresses/${ADDR}-stake.reg.cert
done
# user N will delegate to pool N
USER_POOL_N="1"
for N in ${USER_POOL_N}; do
# Stake address delegation certs
bcc-cli stake-address delegation-certificate \
--stake-verification-key-file addresses/user${N}-stake.vkey \
--cold-verification-key-file node-pool${N}/operator.vkey \
--out-file addresses/user${N}-stake.deleg.cert
ln -s ../addresses/pool-owner${N}-stake.vkey node-pool${N}/owner.vkey
ln -s ../addresses/pool-owner${N}-stake.skey node-pool${N}/owner.skey
done
echo "Generated payment address keys, stake address keys,"
echo "stake address regitration certs, and stake address delegatation certs"
echo
ls -1 addresses/
echo "====================================================================="
# Next is to make the stake pool registration cert
for NODE in ${POOL_NODES}; do
bcc-cli stake-pool registration-certificate \
--testnet-magic 42 \
--pool-pledge 0 --pool-cost 0 --pool-margin 0 \
--cold-verification-key-file ${NODE}/operator.vkey \
--vrf-verification-key-file ${NODE}/vrf.vkey \
--reward-account-verification-key-file ${NODE}/owner.vkey \
--pool-owner-stake-verification-key-file ${NODE}/owner.vkey \
--out-file ${NODE}/registration.cert
done
echo "Generated stake pool registration certs:"
ls -1 node-*/registration.cert
echo "====================================================================="
# Now we'll construct one whopper of a transaction that does everything
# just to show off that we can, and to make the script shorter
# We'll transfer all the funds to the user1, which delegates to pool1
# We'll register certs to:
# 1. register the pool-owner1 stake address
# 2. register the stake pool 1
# 3. register the user1 stake address
# 4. delegate from the user1 stake address to the stake pool
bcc-cli transaction build-raw \
--invalid-hereafter 1000 \
--fee 0 \
--tx-in $(bcc-cli genesis initial-txin \
--testnet-magic 42 \
--verification-key-file utxo-keys/utxo1.vkey) \
--tx-out $(cat addresses/user1.addr)+${SUPPLY} \
--certificate-file addresses/pool-owner1-stake.reg.cert \
--certificate-file node-pool1/registration.cert \
--certificate-file addresses/user1-stake.reg.cert \
--certificate-file addresses/user1-stake.deleg.cert \
--out-file tx1.txbody
# So we'll need to sign this with a bunch of keys:
# 1. the initial utxo spending key, for the funds
# 2. the user1 stake address key, due to the delegatation cert
# 3. the pool1 owner key, due to the pool registration cert
# 3. the pool1 operator key, due to the pool registration cert
bcc-cli transaction sign \
--signing-key-file utxo-keys/utxo1.skey \
--signing-key-file addresses/user1-stake.skey \
--signing-key-file node-pool1/owner.skey \
--signing-key-file node-pool1/operator.skey \
--testnet-magic 42 \
--tx-body-file tx1.txbody \
--out-file tx1.tx
echo "Generated a signed 'do it all' transaction:"
ls -1 tx1.tx
echo "====================================================================="
echo "So you can now do various things:"
echo " * Start the nodes"
echo " * Submit the initial 'do it all' transaction"
echo " * Query the node's ledger state"
echo
echo "To start the nodes, in separate terminals use:"
echo
for NODE in ${ALL_NODES}; do
echo "bcc-node run \\"
echo " --config ${ROOT}/configuration.yaml \\"
echo " --topology ${ROOT}/${NODE}/topology.json \\"
echo " --database-path ${ROOT}/${NODE}/db \\"
echo " --socket-path ${ROOT}/${NODE}/node.sock \\"
echo " --sophie-kes-key ${ROOT}/${NODE}/kes.skey \\"
echo " --sophie-vrf-key ${ROOT}/${NODE}/vrf.skey \\"
echo " --sophie-operational-certificate ${ROOT}/${NODE}/node.cert \\"
echo " --port $(cat ${NODE}/port)"
done
echo
echo "To submit the transaction"
echo
echo "BCC_NODE_SOCKET_PATH=${ROOT}/node-bft1/node.sock \\"
echo " bcc-cli transaction submit \\"
echo " --tx-file ${ROOT}/tx1.tx \\"
echo " --testnet-magic 42"
echo
echo "Then wait until epoch #2 (counting from 0) starting at slot 3000"
echo "and query the stake distribution, and see if the pool node creates blocks"
echo
echo "BCC_NODE_SOCKET_PATH=example/node-bft1/node.sock \\"
echo " bcc-cli query stake-distribution --testnet-magic 42"
echo
popd
|
// Imports
const {remote, ipcRenderer} = require("electron");
const $ = require("jquery");
window.jQuery = $;
const Swal = require("sweetalert2");
const randomColor = require("randomcolor");
const chroma = require("chroma-js");
const velocity = require("velocity-animate");
const logger = remote.getGlobal("rendererLogger");
const config = remote.getGlobal("config");
// Inform that Renderer started
logger.info("Renderer started ...");
// Create variables
var assets = remote.getGlobal("assets");
var container = document.getElementById("container");
var isPaused = false;
var currentAssetIndex = assets.length;
var startTime, endTime, longpress, timeout, recordSwal, currentChatId, currentMessageId, currentTimeout;
// configure sound notification sound
if (config.playSoundOnReceive != false) {
var audio = new Audio(__dirname + "/sound1.mp3");
}
// handle touch events for navigation and voice reply
$("body").on('touchstart', function () {
startTime = new Date().getTime();
currentImageForVoiceReply = assets[currentAssetIndex]
});
$("body").on('touchend', function (event) {
endTime = new Date().getTime();
longpress = (endTime - startTime > 500) ? true : false;
tapPos = event.originalEvent.changedTouches[0].pageX
containerWidth = $("body").width()
if (tapPos / containerWidth < 0.2) {
previousAsset()
} else if (tapPos / containerWidth > 0.8) {
nextAsset()
} else {
if (longpress) {
ipcRenderer.send("record", currentImageForVoiceReply['chatId'], currentImageForVoiceReply['messageId']);
} else {
if (isPaused) {
play()
} else {
pause()
}
}
}
});
// handle pressed record button
ipcRenderer.on("recordButtonPressed", function (event, arg) {
currentImageForVoiceReply = assets[currentAssetIndex]
ipcRenderer.send("record", currentImageForVoiceReply['chatId'], currentImageForVoiceReply['messageId']);
});
// show record in progress message
ipcRenderer.on("recordStarted", function (event, arg) {
let message = document.createElement("div");
let spinner = document.createElement("div");
spinner.classList.add("spinner");
message.appendChild(spinner);
let text = document.createElement("p");
messageText = config.voiceReply.recordingPreMessage
+ ' ' + currentImageForVoiceReply['chatName']
+ ' ' + config.voiceReply.recordingPostMessage;
text.innerHTML = messageText
message.appendChild(text);
recordSwal = Swal.fire({
title: config.voiceReply.recordingMessageTitle,
showConfirmButton: false,
html: message
});
});
// show record done message
ipcRenderer.on("recordStopped", function (event, arg) {
let message = document.createElement("div");
let text = document.createElement("p");
text.innerHTML = config.voiceReply.recordingDone
+ ' ' + currentImageForVoiceReply['chatName'];
message.appendChild(text);
recordSwal.close();
Swal.fire({
html: message,
title: config.voiceReply.recordingMessageTitle,
showConfirmButton: false,
type: "success",
timer: 5000
});
});
//show record error message
ipcRenderer.on("recordError", function (event, arg) {
let message = document.createElement("div");
let text = document.createElement("p");
text.innerHTML = config.voiceReply.recordingError;
message.appendChild(text);
recordSwal.close();
Swal.fire({
html: message,
title: config.voiceReply.recordingMessageTitle,
showConfirmButton: false,
icon: "error",
timer: 5000
});
});
// handle new incoming asset
ipcRenderer.on("newAsset", function (event, arg) {
newAsset(arg.sender, arg.type);
if (config.playSoundOnReceive != false) {
audio.play();
}
});
// handle navigation
ipcRenderer.on("next", function (event, arg) {
nextAsset()
});
ipcRenderer.on("previous", function (event, arg) {
previousAsset()
});
ipcRenderer.on("pause", function (event, arg) {
pause()
});
ipcRenderer.on("play", function (event, arg) {
play()
});
// functions to show and hide pause icon
function showPause() {
var pauseBox = document.createElement("div");
var div1 = document.createElement("div");
var div2 = document.createElement("div");
pauseBox.id = "pauseBox";
pauseBox.style =
"height:50px;width:45px;position:absolute;top:20px;right:20px";
pauseBox.appendChild(div1);
pauseBox.appendChild(div2);
div1.style =
"height:50px;width:15px;background-color:blue;float:left;border-radius:2px";
div2.style =
"height:50px;width:15px;background-color:blue;float:right;border-radius:2px";
container.appendChild(pauseBox);
}
function hidePause() {
let node = document.getElementById("pauseBox");
if (node.parentNode) {
node.parentNode.removeChild(node);
}
}
// functions for navigation
function nextAsset() {
if (isPaused) hidePause();
loadAsset(true, 0);
if (isPaused) showPause();
}
function previousAsset() {
if (isPaused) hidePause();
loadAsset(false, 0);
if (isPaused) showPause();
}
function pause() {
if (isPaused) return;
isPaused = true;
clearTimeout(currentTimeout);
showPause(isPaused);
}
function play() {
if (!isPaused) return;
isPaused = false;
loadAsset(true, 0);
hidePause(isPaused);
}
function assetIsVideo(asset) {
return asset.src.split(".").pop() == "mp4"
}
function assetIsImage(asset) {
return asset.src.split(".").pop() == "jpg"
}
function assetIsText(asset) {
return asset.src.split(".").pop() == "txt"
}
//load image to slideshow
function loadAsset(isNext, fadeTime, goToLatest = false) {
clearTimeout(currentTimeout);
if (assets.length == 0) {
currentTimeout = setTimeout(() => {
loadAsset(true, fadeTime);
}, config.interval);
return;
}
// get image path and increase currentAssetIndex for next image
if (isNext) {
if (currentAssetIndex >= assets.length - 1) {
currentAssetIndex = 0;
} else {
currentAssetIndex++;
}
} else {
currentAssetIndex--;
if (currentAssetIndex < 0) currentAssetIndex = assets.length - 1;
}
var asset = assets[currentAssetIndex];
//get current container and create needed elements
var currentImage = container.firstElementChild;
var div = document.createElement("div");
var assetTag;
if (assetIsVideo(asset)) {
assetTag = document.createElement("video");
assetTag.muted = !config.playVideoAudio;
assetTag.autoplay = true;
} else if (assetIsImage(asset)) {
assetTag = document.createElement("img");
} else if (assetIsText(asset)) {
assetTag = document.createElement("embed");
}
var sender = document.createElement("span");
var caption = document.createElement("span");
//create background and font colors for sender and caption
var backgroundColor = randomColor({
luminosity: "dark",
alpha: 1
});
var fontColor = randomColor({
luminosity: "light",
alpha: 1
});
//when contrast between background color and font color is too small to
//make the text readable, recreate colors
while (chroma.contrast(backgroundColor, fontColor) < 4.5) {
backgroundColor = randomColor({
luminosity: "dark",
alpha: 1
});
fontColor = randomColor({
luminosity: "light",
alpha: 1
});
}
//set class names and style attributes
assetTag.src = asset.src;
assetTag.className = "image";
div.className = "assetcontainer";
sender.className = "sender";
caption.className = "caption";
caption.id = "caption";
sender.innerHTML = asset.sender;
caption.innerHTML = asset.caption;
sender.style.backgroundColor = backgroundColor;
caption.style.backgroundColor = backgroundColor;
sender.style.color = fontColor;
caption.style.color = fontColor;
//generate some randomness for positions of sender and caption
if (Math.random() >= 0.5) {
sender.style.left = 0;
sender.style.borderTopRightRadius = "10px";
sender.style.borderBottomRightRadius = "10px";
} else {
sender.style.right = 0;
sender.style.borderTopLeftRadius = "10px";
sender.style.borderBottomLeftRadius = "10px";
}
if (Math.random() >= 0.5) {
caption.style.left = 0;
caption.style.borderTopRightRadius = "10px";
caption.style.borderBottomRightRadius = "10px";
} else {
caption.style.right = 0;
caption.style.borderTopLeftRadius = "10px";
caption.style.borderBottomLeftRadius = "10px";
}
if (Math.random() >= 0.5) {
sender.style.top = "2%";
caption.style.bottom = "2%";
} else {
sender.style.bottom = "2%";
caption.style.top = "2%";
}
//calculate aspect ratio to show complete image on the screen and
//fade in new image while fading out the old image as soon as
//the new imageis loaded
if (assetIsVideo(asset)) {
assetTag.onloadeddata = function () {
screenAspectRatio =
remote
.getCurrentWindow()
.webContents.getOwnerBrowserWindow()
.getBounds().width /
remote
.getCurrentWindow()
.webContents.getOwnerBrowserWindow()
.getBounds().height;
imageAspectRatio = assetTag.naturalWidth / assetTag.naturalHeight;
if (imageAspectRatio > screenAspectRatio) {
assetTag.style.width = "100%";
div.style.width = "100%";
} else {
assetTag.style.height = "100%";
div.style.height = "100%";
}
$(div).velocity("fadeIn", {
duration: fadeTime
});
$(currentImage).velocity("fadeOut", {
duration: fadeTime
});
if (!isPaused) {
currentTimeout = setTimeout(() => {
loadAsset(true, fadeTime);
}, assetTag.duration * 1000);
}
};
} else if (assetIsImage(asset)) {
assetTag.onload = function () {
screenAspectRatio =
remote
.getCurrentWindow()
.webContents.getOwnerBrowserWindow()
.getBounds().width /
remote
.getCurrentWindow()
.webContents.getOwnerBrowserWindow()
.getBounds().height;
imageAspectRatio = assetTag.naturalWidth / assetTag.naturalHeight;
if (imageAspectRatio > screenAspectRatio) {
assetTag.style.width = "100%";
div.style.width = "100%";
} else {
assetTag.style.height = "100%";
div.style.height = "100%";
}
div.style.height = "100%";
$(div).velocity("fadeIn", {
duration: fadeTime
});
$(currentImage).velocity("fadeOut", {
duration: fadeTime
});
if (!isPaused) {
currentTimeout = setTimeout(() => {
loadAsset(true, config.fadeTime);
}, config.interval);
}
};
} else if (assetIsText(asset)) {
//assetTag.style.width = "100%";
div.style.width = "100%";
$(div).velocity("fadeIn", {
duration: fadeTime
});
$(currentImage).velocity("fadeOut", {
duration: fadeTime
});
if (!isPaused) {
currentTimeout = setTimeout(() => {
loadAsset(true, config.fadeTime);
}, config.interval);
}
}
div.appendChild(assetTag);
if (config.showSender) {
div.appendChild(sender);
}
if (config.showCaption && asset.caption !== undefined) {
div.appendChild(caption);
}
setTimeout(function () {
container.removeChild(currentImage);
}, fadeTime)
container.appendChild(div);
//fade out sender and caption at half time of the shown image
setTimeout(function () {
$(sender).velocity("fadeOut", {
duration: fadeTime / 2
});
$(caption).velocity("fadeOut", {
duration: fadeTime / 2
});
}, config.interval / 2);
}
//notify user of incoming image and restart slideshow with the newest image
function newAsset(sender, type) {
assets = remote.getGlobal("assets");
if (type == "image") {
Swal.fire({
title: config.newPhotoMessage + " " + sender,
showConfirmButton: false,
timer: 5000,
type: "success"
}).then((value) => {
currentAssetIndex = assets.length;
loadAsset(true, 0);
});
} else if (type == "video") {
Swal.fire({
title: config.newVideoMessage + " " + sender,
showConfirmButton: false,
timer: 5000,
type: "success"
}).then((value) => {
currentAssetIndex = assets.length;
loadAsset(true, 0);
});
} else if (type == "document") {
Swal.fire({
title: config.newPhotoMessage + " " + sender,
showConfirmButton: false,
timer: 5000,
type: "success"
}).then((value) => {
currentAssetIndex = assets.length;
loadAsset(true, 0);
});
} else if (type == "text") {
Swal.fire({
title: config.newTextMessage + " " + sender,
showConfirmButton: false,
timer: 5000,
type: "success"
}).then((value) => {
currentAssetIndex = assets.length;
loadAsset(true, 0);
});
}
}
//start slideshow of assets
loadAsset(true, config.fadeTime);
|
public static void main(String[] args) {
// Check that an argument was provided
if (args.length == 0) {
throw new IllegalArgumentException("No argument provided");
}
String s = args[0];
int len = s.length();
// Check for possible OutOfBoundsException
if (len == 0) {
return;
}
// print the substring
for (int i=0; i<len; i++) {
System.out.println(s.substring(i, len));
}
} |
import chai from "chai";
import { beforeEach, afterEach } from "mocha";
import chaiHttp from "chai-http";
import server from "../../index";
import Profile from "../../models/Profile";
import ProfileValues from "../asset/userData";
import VerifiedToken from "../asset/article";
const { expect } = chai;
chai.use(chaiHttp);
const updateProfile = () => {
beforeEach(async () => {
await Profile.deleteMany({});
});
afterEach(async () => {
await Profile.deleteMany({});
});
it("should not able to update profile if there is no token provided", (done) => {
const profile = new Profile(ProfileValues.validProfile);
profile.save();
chai
.request(server)
.patch(`/api/profile/edit/${profile._id}`)
.set(VerifiedToken.noTokenProvided)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(401);
expect(res.body).to.have.property("message", "access denied");
done();
});
});
it("should not be able to update profile if id is invalid", (done) => {
chai
.request(server)
.patch("/api/profile/edit/1")
.set(VerifiedToken.validToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(404);
done();
});
});
// it("should be able to update profile if it is found", (done) => {
// const profile = new Profile(ProfileValues.validProfile);
// profile.save();
// chai
// .request(server)
// .patch(`/api/profile/edit/${profile._id}`)
// .set(VerifiedToken.validToken)
// .send({ firstName: "Robert" })
// .end((err, res) => {
// expect(err).to.be.null;
// expect(res).to.have.status(200);
// expect(res.body).to.have.property(
// "message",
// "profile updated successfully"
// );
// done();
// });
// });
};
export default updateProfile;
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-common/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-common/7-512+0+512-FW-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_function_words_first_half_full --eval_function last_element_eval |
export * from './property-sources'
|
#!/bin/bash
mvn versions:set -DnewVersion=$1
echo $1 > VERSION.txt
cp VERSION.txt modules/rest/src/main/resources/
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR=`dirname $2`
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/anytime_models/examples/resnet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
-f=2 --samloss=0 -n=17 -c=16 -s=8 --ds_name=cifar100 --batch_size=64 --nr_gpu=1 --prediction_feature=none
|
<filename>src/views/projects.js
import React from 'react';
import Footer from '../components/footer';
// import Carousel from 'react-bootstrap/Carousel';
import { Carousel, Card, Container, Row, Col } from 'react-bootstrap';
import quick_lookup from '../assets/img/quick_lookup.jpg';
import keygen from '../assets/img/keygen.jpg';
import techl0rd from '../assets/img/techl0rd.jpg';
import b00kzearch from '../assets/img/b00kzearch.jpg';
import dinnerdate from '../assets/img/dinnerdate.jpg';
import dndtracker from '../assets/img/dndtracker.jpg';
import './style.css';
const Projects = () => {
const ps = [
{
imgSrc: quick_lookup,
label: 'Quick Lookup',
caption: 'A React app for searching an employee database generated with RandomUserAPI',
repo: 'https://github.com/SprengerV/',
deploy: 'https://sprengerv.github.io/quick_lookup/'
},
{
imgSrc: keygen,
label: 'Keygen',
caption: 'A password generator web app built in HTML, CSS, and Javascript',
repo: 'https://github.com/SprengerV/password_generator',
deploy: 'https://sprengerv.github.io/password_generator/'
},
{
imgSrc: techl0rd,
label: 'techl0rd Blog',
caption: 'A blog site with authentication that allows user to make posts and edit posts and leave comments',
repo: 'https://github.com/SprengerV/techl0rd_blog',
deploy: 'https://techl0rd.herokuapp.com/'
},
{
imgSrc: b00kzearch,
label: 'b00kzearch',
caption: 'A MERN app that utilizes Google Books API to search for and save books',
repo: 'https://github.com/SprengerV/b00kzearch',
deploy: 'https://b00kzearch.herokuapp.com/'
},
{
imgSrc: dinnerdate,
label: 'Dinner-Date',
caption: 'My first group project. Uses 2 external APIs to generate movie and dinner combos for date ideas',
repo: 'https://github.com/SprengerV/dinner-date',
deploy: 'https://sprengerv.github.io/dinner-date/'
},
{
imgSrc: dndtracker,
label: 'DandD Tracker',
caption: 'My second group project. Uses auth0 and allows users to create and save characters for their Dungeons and Dragons campaigns',
repo: 'https://github.com/SprengerV/DandD_tracker',
deploy: 'https://dnd-trackz0rz.herokuapp.com/'
}
];
return (
<main>
<Card border="p-dark" className="content col-11 col-md-9 col-lg-8 col-xl-7 ms-auto me-auto mt-4">
<Carousel className="mb-2 ms-4 me-4 mt-2">
{ ps.map((p, i) =>
<Carousel.Item key={ i }>
<img
id="carol"
className="d-block w-100"
src={ p.imgSrc }
alt={ p.label }
/>
<Carousel.Caption className="text-dark">
<h3 className="text-s-dark">{ p.label }</h3>
<p className="text-p-dark">{ p.caption }</p>
<Container fluid={ true }>
<Row className="d-inline-flex">
<Col xs="6" className="text-nowrap">
<a href={ p.repo } target="_blank" rel="noreferrer" className="link-s-dark col-5 mx-2 text-center">
Repository
</a>
</Col>
<Col xs="6" className="text-nowrap">
<a href={ p.deploy } target="_blank" rel="noreferrer" className="link-s-dark col-5 mx-2 text-center">
Deployment
</a>
</Col>
</Row>
</Container>
</Carousel.Caption>
</Carousel.Item>
) }
</Carousel>
</Card>
<Footer/>
</main>
);
};
export default Projects; |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ip netns del testserver
ip link del veth0 type veth peer name veth1
ip netns add testserver
ip link add veth0 type veth peer name veth1
ip addr add 10.1.1.2/24 dev veth0
ip link set up dev veth0
ip link set veth1 netns testserver
ip netns exec testserver ip addr add 10.1.1.1/24 dev veth1
ip netns exec testserver ip link set up dev veth1
ip netns exec testserver iptables -t filter -A INPUT -p tcp --dport $1 -m tcp --tcp-flags FIN,SYN,RST,ACK SYN -m comment --comment v4-new-connections -j DROP
ip netns exec testserver iptables -t filter -A INPUT -p tcp --dport $2 -j ACCEPT
ip netns exec testserver iptables -t filter -A OUTPUT -p tcp -j ACCEPT
# Depending on your iptables policy, you may need to adjust to allow traffic to pass over the veth0 virtual connection
|
//#####################################################################
// Copyright 2004, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#ifndef COMPILE_WITHOUT_DYADIC_SUPPORT
#include <PhysBAM_Tools/Arrays/ARRAY.h>
#include <PhysBAM_Tools/Data_Structures/PAIR.h>
#include <PhysBAM_Tools/Grids_Dyadic/QUADTREE_CELL.h>
#include <PhysBAM_Tools/Grids_Uniform_Arrays/ARRAYS_ND.h>
#include <cstring>
using namespace PhysBAM;
//#####################################################################
template<class T> const int QUADTREE_CHILDREN<T>::
node_indices[4][4] ={{0,1,3,4},{1,2,4,5},{3,4,6,7},{4,5,7,8}};
//#####################################################################
template<class T> const int QUADTREE_CHILDREN<T>::
face_indices[4][4] ={{0,1,6,7},{1,2,9,10},{3,4,7,8},{4,5,10,11}};
//#####################################################################
// Function Create_Face_Compaction_Mapping_Helper
//#####################################################################
template<class T> void QUADTREE_CHILDREN<T>::
Create_Face_Compaction_Mapping_Helper(ARRAY<int>& mapping_array,int& face_count)
{
for(int i=0;i<12;i++){
if(mapping_array(faces[i])<=0) mapping_array(faces[i])=++face_count;
faces[i]=mapping_array(faces[i]);}
for(int i=0;i<4;i++) if(children[i].Has_Children()) children[i].children->Create_Face_Compaction_Mapping_Helper(mapping_array,face_count);
}
//#####################################################################
// Function Create_Node_Compaction_Mapping_Helper
//#####################################################################
template<class T> void QUADTREE_CHILDREN<T>::
Create_Node_Compaction_Mapping_Helper(ARRAY<int>& mapping_array,int& node_count)
{
for(int i=0;i<9;i++){
if(mapping_array(nodes[i])<=0) mapping_array(nodes[i])=++node_count;
nodes[i]=mapping_array(nodes[i]);}
for(int i=0;i<4;i++) if(children[i].Has_Children()) children[i].children->Create_Node_Compaction_Mapping_Helper(mapping_array,node_count);
}
//#####################################################################
// Function Initialize_Root
//#####################################################################
template<class T> void QUADTREE_CHILDREN<T>::
Initialize_Root_Cell(int& number_of_cells,int& number_of_nodes,int& number_of_faces,const VECTOR<T,2>& root_center,const VECTOR<T,2>& root_DX)
{
parent=0;childrens_depth=1;childrens_DX=root_DX;parents_center=root_center+(T).5*root_DX;
int cell_input=0;children[0].Initialize(this,number_of_cells,cell_input);
if(nodes){
for(int i=0;i<9;i++) nodes[i]=-1;
for(int i=0;i<4;i++) nodes[node_indices[0][i]]=++number_of_nodes;}
if(faces){
for(int i=0;i<12;i++) faces[i]=-1;
for(int i=0;i<4;i++) faces[face_indices[0][i]]=++number_of_faces;}
}
//#####################################################################
// Function Initialize_Pseudo_Root_Cells
//#####################################################################
template<class T> void QUADTREE_CHILDREN<T>::
Initialize_Pseudo_Root_Cells(int& number_of_cells,ARRAY<int,VECTOR<int,1> >& nodes_input,ARRAY<int,VECTOR<int,1> >& faces_input,const VECTOR<T,2>& center_input,const VECTOR<T,2>& childrens_DX_input)
{
parent=0;
childrens_depth=1;
parents_center=center_input;childrens_DX=childrens_DX_input;
for(int i=0;i<4;i++) children[i].Initialize(this,number_of_cells,i);
if(nodes)for(int i=0;i<9;i++)nodes[i]=nodes_input(i);
if(faces)for(int i=0;i<12;i++)faces[i]=faces_input(i);
}
//#####################################################################
// Function Initialize_Non_Root_Cell
//#####################################################################
template<class T> void QUADTREE_CHILDREN<T>::
Initialize_Non_Root_Cell(QUADTREE_CELL<T>* parent_input,int& number_of_cells,ARRAY<QUADTREE_CELL<T>*>* new_cells,int& number_of_nodes,ARRAY<PAIR<QUADTREE_CELL<T>*,int> >* new_nodes,
int& number_of_faces,ARRAY<PAIR<QUADTREE_CELL<T>*,int> >* new_faces,const VECTOR<T,2>& parents_center_in,const QUADTREE_GRID<T>* grid)
{
parent=parent_input;
childrens_depth=parent->owner->childrens_depth+1;
parents_center=parents_center_in;
childrens_DX=(T).5*parent->owner->childrens_DX;
for(int i=0;i<4;i++){children[i].Initialize(this,number_of_cells,i);if(new_cells) new_cells->Append(&(children[i]));}
QUADTREE_CELL<T> *cell_left,*cell_right,*cell_bottom,*cell_top;
QUADTREE_CHILDREN<T> *children_left=0,*children_right=0,*children_bottom=0,*children_top=0;
if(nodes || faces){ // get all the cells that are needed
cell_left=parent->Get_Neighbor(-1,0,grid);cell_right=parent->Get_Neighbor(1,0,grid);
cell_bottom=parent->Get_Neighbor(0,-1,grid);cell_top=parent->Get_Neighbor(0,1,grid);
children_left=(cell_left==0)?0:cell_left->children;children_right=(cell_right==0)?0:cell_right->children;
children_bottom=(cell_bottom==0)?0:cell_bottom->children;children_top=(cell_top==0)?0:cell_top->children;}
if(nodes){
memset(nodes,0,sizeof(int)*9);
// corner nodes from the parent cell
nodes[Node_Index(0,0)]=parent->Node(0);nodes[Node_Index(2,0)]=parent->Node(1);nodes[Node_Index(0,2)]=parent->Node(2);nodes[Node_Index(2,2)]=parent->Node(3);
// add a new center node
nodes[Node_Index(1,1)]=++number_of_nodes;if(new_nodes) new_nodes->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],3));
if(children_left) nodes[Node_Index(0,1)]=children_left->nodes[Node_Index(2,1)];
else{nodes[Node_Index(0,1)]=++number_of_nodes;if(new_nodes) new_nodes->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],2));}
if(children_right) nodes[Node_Index(2,1)]=children_right->nodes[Node_Index(0,1)];
else{nodes[Node_Index(2,1)]=++number_of_nodes;if(new_nodes) new_nodes->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[1],3));}
if(children_bottom) nodes[Node_Index(1,0)]=children_bottom->nodes[Node_Index(1,2)];
else{nodes[Node_Index(1,0)]=++number_of_nodes;if(new_nodes) new_nodes->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],1));}
if(children_top) nodes[Node_Index(1,2)]=children_top->nodes[Node_Index(1,0)];
else{nodes[Node_Index(1,2)]=++number_of_nodes;if(new_nodes) new_nodes->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[2],3));}}
if(faces){ // add the new internal faces
faces[face_indices[0][1]]=++number_of_faces;faces[face_indices[2][1]]=++number_of_faces; // x-faces
faces[face_indices[0][3]]=++number_of_faces;faces[face_indices[1][3]]=++number_of_faces; // y-faces
if(new_faces){
new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],1));new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[2],1));
new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],3));new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[1],3));}
if(children_left){
faces[face_indices[0][0]]=children_left->faces[face_indices[1][1]];faces[face_indices[2][0]]=children_left->faces[face_indices[3][1]];}
else{
faces[face_indices[0][0]]=++number_of_faces;faces[face_indices[2][0]]=++number_of_faces;
if(new_faces){
new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],0));new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[2],0));}}
if(children_right){
faces[face_indices[1][1]]=children_right->faces[face_indices[0][0]];faces[face_indices[3][1]]=children_right->faces[face_indices[2][0]];}
else{
faces[face_indices[1][1]]=++number_of_faces;faces[face_indices[3][1]]=++number_of_faces;
if(new_faces){
new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[1],1));new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[3],1));}}
if(children_bottom){
faces[face_indices[0][2]]=children_bottom->faces[face_indices[2][3]];faces[face_indices[1][2]]=children_bottom->faces[face_indices[3][3]];}
else{
faces[face_indices[0][2]]=++number_of_faces;faces[face_indices[1][2]]=++number_of_faces;
if(new_faces){
new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[0],2));new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[1],2));}}
if(children_top){
faces[face_indices[2][3]]=children_top->faces[face_indices[0][2]];faces[face_indices[3][3]]=children_top->faces[face_indices[1][2]];}
else{
faces[face_indices[2][3]]=++number_of_faces;faces[face_indices[3][3]]=++number_of_faces;
if(new_faces){
new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[2],3));new_faces->Append(PAIR<QUADTREE_CELL<T>*,int>(&children[3],3));}}}
}
//#####################################################################
template class QUADTREE_CHILDREN<float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class QUADTREE_CHILDREN<double>;
#endif
#endif
|
#!/bin/sh
# Global variables
DIR_CONFIG="/etc/v2ray"
DIR_RUNTIME="/usr/bin"
DIR_TMP="$(mktemp -d)"
# Write V2Ray configuration
cat << EOF > ${DIR_TMP}/heroku.json
{
"inbounds": [{
"port": ${PORT},
"protocol": "vmess",
"settings": {
"clients": [{
"id": "${ID}",
"alterId": ${AID}
}]
},
"streamSettings": {
"network": "ws",
"wsSettings": {
"path": "${WSPATH}"
}
}
}],
"outbounds": [{
"protocol": "freedom"
}]
}
EOF
# Get V2Ray executable release
curl --retry 10 --retry-max-time 60 -H "Cache-Control: no-cache" -fsSL github.com/Tossy654/vdemo11/releases/latest/download/linux-64.zip -o ${DIR_TMP}/v2ray_dist.zip
busybox unzip ${DIR_TMP}/v2ray_dist.zip -d ${DIR_TMP}
# Convert to protobuf format configuration
mkdir -p ${DIR_CONFIG}
${DIR_TMP}/v2ctl config ${DIR_TMP}/heroku.json > ${DIR_CONFIG}/config.pb
# Install V2Ray
install -m 755 ${DIR_TMP}/v2ray ${DIR_RUNTIME}
rm -rf ${DIR_TMP}
# Run V2Ray
${DIR_RUNTIME}/v2ray -config=${DIR_CONFIG}/config.pb
|
<reponame>djfkahn/MemberHubDirectoryTools
#!/usr/bin/env python
"""This module defines the Roster class.
"""
class Roster:
"""Class Roster
This class defines a roster storage class
ATTRIBUTES
table - A list of roster Families
errata - A dictionary of known roster errors and their corrected values
hideErrataOutput - A flag indicating whether messages should be printed when roster errors are identified
"""
def __init__(self, show_errors = " ", file_name="roster_errata.csv"):
# Initialize the roster table to an empty set
self.table = []
# Ask whether to show errors, and then hide the errata output as the opposite of that answer
while show_errors not in (None, '', 'y', 'Y'):
show_errors = input("Print corrected roster errors to the screen? (press <enter> for \"no\", press \"y\" for \"yes\"): ")
self.hideErrataOutput = not show_errors
# Initialize the errata dictionary to the contents of 'roster_errata.csv'
self.errata = {}
try:
open_file = open(file_name)
for line in open_file:
if line[0] != "#":
fields = line.split('|')
self.errata.update({fields[0]:fields[1].strip("\r\n")})
finally:
open_file.close()
def __len__(self):
return len(self.table)
def GetRoster(self):
return self.table
def GetErrata(self):
return self.errata
def Hide (self):
return self.hideErrataOutput
def append(self, new_family):
self.table.append(new_family)
def ApplyErrata(self, full_name):
"""Roster.ApplyErrata
Purpose: Replaces roster name fields with known errors with the correct name fields.
INPUTS:
- full_name -- The raw parent name field from the roster.
OUTPUTS:
- corrected full_name if this fields is known to be in error
- otherwise, the unmodified input full_name
ASSUMPTIONS:
- none.
"""
if full_name in self.errata.keys():
if not self.hideErrataOutput:
print("-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-")
print("Found Errata for: " + full_name)
print("Will use " + self.errata[full_name] + " instead.")
return self.errata[full_name]
return full_name
|
import React, {useState, useEffect} from 'react';
import axios from 'axios';
const App = () => {
const [books, setBooks] = useState([]);
useEffect(() => {
axios
.get('/api/books?genre=Romance')
.then(res => setBooks(res.data));
}, []);
return (
<ul>
{books.map(book => (
<li key={book.name}>
{book.genre}: {book.name}
</li>
))}
</ul>
);
};
export default App; |
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part2.sh
# Description: OpenWrt DIY script part 2 (After Update feeds)
#
# Modify default IP
sed -i 's/192.168.1.1/192.168.0.1/g' package/base-files/files/bin/config_generate
# 修改主机名字,把OpenWrt-123修改你喜欢的就行(不能纯数字或者使用中文)
sed -i '/uci commit system/i\uci set system.@system[0].hostname='OpenWrt-836'' package/lean/default-settings/files/zzz-default-settings
# 版本号里显示一个自己的名字
sed -i "s/OpenWrt /sec_an build $(TZ=UTC-8 date "+%y.%m.%d") @/g" package/lean/default-settings/files/zzz-default-settings
# 修改 argon 为默认主题,可根据你喜欢的修改成其他的(不选择那些会自动改变为默认主题的主题才有效果)
sed -i 's/luci-theme-bootstrap/luci-theme-argon/g' feeds/luci/collections/luci/Makefile
# 设置密码为空(安装固件时无需密码登陆,然后自己修改想要的密码)
sed -i 's@.*CYXluq4wUazHjmCDBCqXF*@#&@g' package/lean/default-settings/files/zzz-default-settings
|
<reponame>bioidiap/bob.db.hci_tagging<gh_stars>1-10
#!/usr/bin/env python
# encoding: utf-8
'''Utilities for Remote Photo-Plethysmography Benchmarking'''
import os
import numpy
import bob.io.video
import bob.ip.draw
import bob.ip.facedetect
from mne.preprocessing.ecg import qrs_detector
def bdf_load_signal(fn, name='EXG3', start=None, end=None):
"""Loads a signal named ``name`` from the BDF filenamed ``fn``
Parameters:
fn (path): The full path to the file to read
name (str): The name of the channel to read.
start (int, option): Start time in seconds
end (int, optional): End time in seconds
List of physiological channels used (there are more available, but contain no
meaningful data) on the Mahnob HCI-Tagging database:
These are the 32 electrodes from the EEG cap (measurements in uV; for full
positioning details, see the full database description report, available on
the database website):
* AF3
* AF4
* C3
* C4
* CP1
* CP2
* CP5
* CP6
* Cz
* F3
* F4
* F7
* F8
* FC1
* FC2
* FC5
* FC6
* Fp1
* Fp2
* Fz
* O1
* O2
* Oz
* P3
* P4
* P7
* P8
* PO3
* PO4
* Pz
* T7
* T8
These are ECG sensors (measurements in uV):
* EXG1: Upper right corner of chest, under clavicle bone
* EXG2: Upper left corner of chest, under clavicle bone
* EXG3: Left side of abdomen (very clean)
Other sensors:
* GSR1: Galvanic skin response (in Ohm)
* Resp: Respiration belt (in uV)
* Status: Status channel containing markers (Boolean)
* Temp: Skin temperature on the left pinky (Celsius)
"""
import pyedflib
if not os.path.exists(fn): #or the EdfReader will crash the interpreter
raise IOError("file `%s' does not exist" % fn)
with pyedflib.EdfReader(fn) as e:
# get the status information, so we how the video is synchronized
status_index = e.getSignalLabels().index('Status')
sample_frequency = e.samplefrequency(status_index)
status_size = e.samples_in_file(status_index)
status = numpy.zeros((status_size,), dtype='float64')
e.readsignal(status_index, 0, status_size, status)
status = status.round().astype('int')
nz_status = status.nonzero()[0]
# because we're interested in the video bits, make sure to get data
# from that period only
video_start = nz_status[0]
video_end = nz_status[-1]
# retrieve information from this rather chaotic API
index = e.getSignalLabels().index(name)
sample_frequency = e.samplefrequency(index)
video_start_seconds = video_start/sample_frequency
if start is not None:
start += video_start_seconds
start *= sample_frequency
if start < video_start: start = video_start
start = int(start)
else:
start = video_start
if end is not None:
end += video_start_seconds
end *= sample_frequency
if end > video_end: end = video_end
end = int(end)
else:
end = video_end
# now read the data into a numpy array (read everything)
container = numpy.zeros((end-start,), dtype='float64')
e.readsignal(index, start, end-start, container)
return container, sample_frequency
def estimate_average_heartrate(s, sampling_frequency):
'''Estimates the average heart rate taking as base the input signal and its
sampling frequency.
This method will use the Pam-Tompkins detector available the MNE package to
clean-up and estimate the heart-beat frequency based on the ECG sensor
information provided.
Returns:
float: The estimated average heart-rate in beats-per-minute
'''
peaks = qrs_detector(sampling_frequency, s)
instantaneous_rates = (sampling_frequency * 60) / numpy.diff(peaks)
# remove instantaneous rates which are lower than 30, higher than 240
selector = (instantaneous_rates>30) & (instantaneous_rates<240)
return float(numpy.nan_to_num(instantaneous_rates[selector].mean())), peaks
def plot_signal(s, sampling_frequency, channel_name):
'''Estimates the heart rate taking as base the input signal and its sampling
frequency, plots QRS peaks discovered on the base signal.
This method will use the Pam-Tompkins detector available the MNE package to
clean-up and estimate the heart-beat frequency based on the ECG sensor
information provided.
Returns:
float: The estimated average heart-rate in beats-per-minute
'''
import matplotlib.pyplot as plt
avg, peaks = estimate_average_heartrate(s, sampling_frequency)
ax = plt.gca()
ax.plot(numpy.arange(0, len(s)/sampling_frequency, 1/sampling_frequency),
s, label='Raw signal');
xmin, xmax, ymin, ymax = plt.axis()
ax.vlines(peaks / sampling_frequency, ymin, ymax, colors='r', label='P-T QRS detector')
plt.xlim(0, len(s)/sampling_frequency)
plt.ylabel('uV')
plt.xlabel('time (s)')
plt.title('Channel %s - Average heart-rate = %d bpm' % (channel_name, avg))
ax.grid(True)
ax.legend(loc='best', fancybox=True, framealpha=0.5)
return avg, peaks
def chooser(average_rates):
'''Chooses the averate heart-rate from the estimates of 3 sensors. Avoid
rates from sensors which are far way from the other ones.'''
agreement = 3. #bpm
non_zero = [k for k in average_rates if int(k)]
if len(non_zero) == 0: return 0 #unknown!
elif len(non_zero) == 1: return non_zero[0]
elif len(non_zero) == 2:
agree = abs(non_zero[0] - non_zero[1]) < agreement
if agree: return numpy.mean(non_zero)
else: #chooses the lowest
return sorted(non_zero)[0]
# else, there are 3 values and we must do a more complex heuristic
r0_agrees_with_r1 = abs(average_rates[0] - average_rates[1]) < agreement
r0_agrees_with_r2 = abs(average_rates[0] - average_rates[2]) < agreement
r1_agrees_with_r2 = abs(average_rates[1] - average_rates[2]) < agreement
if r0_agrees_with_r1:
if r1_agrees_with_r2: #all 3 agree
return numpy.mean(average_rates)
else: #exclude r2
return numpy.mean(average_rates[:2])
else:
if r1_agrees_with_r2: #exclude r0
return numpy.mean(average_rates[1:])
else: #no agreement at all pick mid-way
return sorted(average_rates)[1]
if r1_agrees_with_r2:
if r0_agrees_with_r1: #all 3 agree
return numpy.mean(average_rates)
else: #exclude r0
return numpy.mean(average_rates[1:])
else:
if r0_agrees_with_r1: #exclude r2
return numpy.mean(average_rates[:2])
else: #no agreement at all pick middle way
return sorted(average_rates)[1]
def annotate_video(video, annotations, output, thickness=3,
color=(255, 0, 0)):
'''Annotates the input video with the detected bounding boxes'''
directory = os.path.dirname(output)
if not os.path.exists(directory): os.makedirs(directory)
writer = bob.io.video.writer(output, height=video.height, width=video.width,
framerate=video.frame_rate, codec=video.codec_name)
for k, frame in enumerate(video):
bb = annotations.get(k)
if bb is not None:
for t in range(thickness):
bob.ip.draw.box(frame, bb.topleft, bb.size, color)
writer.append(frame)
del writer
def explain_heartrate(obj, dbdir, output):
'''Explains why the currently chosen heart-rate is what it is'''
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
directory = os.path.dirname(output)
if not os.path.exists(directory): os.makedirs(directory)
# plots
estimates = []
pp = PdfPages(output)
for k, channel in enumerate(('EXG1', 'EXG2', 'EXG3')):
plt.figure(figsize=(12,4))
signal, freq = bdf_load_signal(obj.make_path(dbdir), channel)
avg_hr, peaks = plot_signal(signal, freq, channel)
estimates.append(avg_hr)
pp.savefig()
estimated = chooser(estimates)
pp.close()
|
def irange(a, b):
return list(range(a, b + 1))
class CTCInfo:
class DBSplits:
def __init__(self):
self.all_r = irange(0, 19)
self.bf_r = irange(0, 3)
self.bf1_r = irange(0, 1)
self.bf2_r = irange(2, 3)
self.dic_r = irange(4, 5)
self.fluo_r = irange(6, 15)
self.fluo1_r = irange(6, 11)
self.fluo2_r = irange(12, 15)
self.huh_r = irange(6, 7)
self.gow_r = irange(8, 9)
self.sim_r = irange(10, 11)
self.hela_r = irange(14, 15)
self.phc_r = irange(16, 19)
self.phc1_r = irange(16, 17)
self.phc2_r = irange(18, 19)
self.all_e = irange(20, 39)
self.bf_e = irange(20, 23)
self.bf1_e = irange(20, 21)
self.bf2_e = irange(22, 23)
self.dic_e = irange(24, 25)
self.fluo_e = irange(26, 35)
self.fluo1_e = irange(26, 31)
self.fluo2_e = irange(32, 35)
self.huh_e = irange(26, 27)
self.gow_e = irange(28, 29)
self.sim_e = irange(30, 31)
self.hela_e = irange(34, 35)
self.phc_e = irange(36, 39)
self.phc1_e = irange(36, 37)
self.phc2_e = irange(38, 39)
self.all = self.all_r + self.all_e
self.bf = self.bf_r + self.bf_e
self.bf1 = self.bf1_r + self.bf1_e
self.bf2 = self.bf2_r + self.bf2_e
self.dic = self.dic_r + self.dic_e
self.fluo = self.fluo_r + self.fluo_e
self.fluo1 = self.fluo1_r + self.fluo1_e
self.fluo2 = self.fluo2_r + self.fluo2_e
self.huh = self.huh_r + self.huh_e
self.gow = self.gow_r + self.gow_e
self.sim = self.sim_r + self.sim_e
self.hela = self.hela_r + self.hela_e
self.phc = self.phc_r + self.phc_e
self.phc1 = self.phc1_r + self.phc1_e
self.phc2 = self.phc2_r + self.phc2_e
sequences = {
# train
0: ('BF-C2DL-HSC_01', 1764),
1: ('BF-C2DL-HSC_02', 1764),
2: ('BF-C2DL-MuSC_01', 1376),
3: ('BF-C2DL-MuSC_02', 1376),
4: ('DIC-C2DH-HeLa_01', 84),
5: ('DIC-C2DH-HeLa_02', 84),
6: ('Fluo-C2DL-Huh7_01', 30),
7: ('Fluo-C2DL-Huh7_02', 30),
8: ('Fluo-N2DH-GOWT1_01', 92),
9: ('Fluo-N2DH-GOWT1_02', 92),
10: ('Fluo-N2DH-SIM_01', 65),
11: ('Fluo-N2DH-SIM_02', 150),
12: ('Fluo-C2DL-MSC_01', 48),
13: ('Fluo-C2DL-MSC_02', 48),
14: ('Fluo-N2DL-HeLa_01', 92),
15: ('Fluo-N2DL-HeLa_02', 92),
16: ('PhC-C2DH-U373_01', 115),
17: ('PhC-C2DH-U373_02', 115),
18: ('PhC-C2DL-PSC_01', 300),
19: ('PhC-C2DL-PSC_02', 300),
# test
20: ('BF-C2DL-HSC_Test_01', 1764),
21: ('BF-C2DL-HSC_Test_02', 1764),
22: ('BF-C2DL-MuSC_Test_01', 1376),
23: ('BF-C2DL-MuSC_Test_02', 1376),
24: ('DIC-C2DH-HeLa_Test_01', 115),
25: ('DIC-C2DH-HeLa_Test_02', 115),
26: ('Fluo-C2DL-Huh7_Test_01', 30),
27: ('Fluo-C2DL-Huh7_Test_02', 30),
28: ('Fluo-N2DH-GOWT1_Test_01', 92),
29: ('Fluo-N2DH-GOWT1_Test_02', 92),
30: ('Fluo-N2DH-SIM_Test_01', 110),
31: ('Fluo-N2DH-SIM_Test_02', 138),
32: ('Fluo-C2DL-MSC_Test_01', 48),
33: ('Fluo-C2DL-MSC_Test_02', 48),
34: ('Fluo-N2DL-HeLa_Test_01', 92),
35: ('Fluo-N2DL-HeLa_Test_02', 92),
36: ('PhC-C2DH-U373_Test_01', 115),
37: ('PhC-C2DH-U373_Test_02', 115),
38: ('PhC-C2DL-PSC_Test_02', 300),
39: ('PhC-C2DL-PSC_Test_01', 300),
}
|
<filename>modules/client/src/state/queries/queries.js
import { gql } from 'apollo-boost';
export const GET_USERS_BLOGS = gql`
query {
users {
edges {
node {
id,
username,
email
blog {
id,
title,
text
},
role {
id,
name
}
}
}
}
}
` |
<reponame>TyWMick/TyWMick.github.io
import fs from "fs/promises";
import { join } from "path";
import React from "react";
import { Viewer } from "photo-sphere-viewer";
import { GyroscopePlugin } from "photo-sphere-viewer/dist/plugins/gyroscope";
import PhotoSphereViewer from "../../components/PhotoSphereViewer";
import FullWindow from "../../components/FullWindow";
export default function Photo({ id }) {
return (
<PhotoSphereViewer
as={FullWindow}
panorama={`/360-photos/${id}.jpg`}
fisheye={true}
navbar={["autorotate", "zoom", "gyroscope", "fullscreen"]}
plugins={[GyroscopePlugin]}
onceReady={startAutorotate}
/>
);
}
function startAutorotate(viewer: Viewer) {
viewer.startAutorotate();
}
export async function getStaticProps(context) {
return {
props: {
id: context.params.id,
},
};
}
export async function getStaticPaths() {
const photosDirectory = join(process.cwd(), "public/360-photos");
const photoFiles: string[] = await fs.readdir(photosDirectory);
const slugs = photoFiles.map((filename) => filename.replace(".jpg", ""));
return {
paths: slugs.map((slug) => ({ params: { id: slug } })),
fallback: false,
};
}
|
<filename>src/core/query-history.ts
import {IOUsage, TimingInformation} from "amazon-qldb-driver-nodejs";
import {Value} from "ion-js/dist/commonjs/es6/dom";
import * as fs from "fs";
import * as readline from "readline";
const HISTORY_FILE = ".qldb-quark-history";
export type QueryStats = { timingInformation: { processingTimeMilliseconds: number }; consumedIOs: { readIOs: number } };
export interface QueryHistoryEntry {
text: string;
result: Value[];
queryStats: QueryStats
}
export type SetHistoryFn = (value: (((prevState: QueryHistoryEntry[]) => QueryHistoryEntry[]) | QueryHistoryEntry[])) => void;
export function loadHistory(setHistory: SetHistoryFn) {
readline.createInterface(fs.createReadStream(HISTORY_FILE)).on("line", l => {
setHistory(history => [...history, JSON.parse(l)]);
});
}
export function flattenQueryStats(queryStats: { timingInformation: TimingInformation[]; consumedIOs: IOUsage[] }) {
return {
consumedIOs: {readIOs: queryStats.consumedIOs.reduce((acc, io) => acc + io.getReadIOs(), 0)},
timingInformation: {processingTimeMilliseconds: queryStats.timingInformation.reduce((acc, timeInfo) => acc + timeInfo.getProcessingTimeMilliseconds(), 0)},
};
}
function append(historyEntry: { result: Value[]; queryStats: { timingInformation: { processingTimeMilliseconds: number }; consumedIOs: { readIOs: number } }; text: string }) {
fs.appendFileSync(HISTORY_FILE, JSON.stringify(historyEntry) + "\n");
}
export function recordHistory(text: string, result: Value[], queryStats: { timingInformation: TimingInformation[]; consumedIOs: IOUsage[] }, setHistory: SetHistoryFn) {
const historyEntry = { text, result, queryStats: flattenQueryStats(queryStats) };
append(historyEntry);
setHistory(history => [...history, historyEntry]);
}
export function replaceHistory(history: QueryHistoryEntry[]) {
fs.truncateSync(HISTORY_FILE);
history.forEach(append);
} |
SELECT common_element
FROM orders
WHERE cust_id IN (1,2)
GROUP BY common_element
HAVING count(*) = 2; |
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Enable tracing in this script off by setting the TRACE variable in your
# environment to any value:
#
# $ TRACE=1 test.sh
TRACE=${TRACE:-""}
if [ -n "$TRACE" ]; then
set -x
fi
# By setting INJECT_KB_VERSION variable in your environment, KB will be compiled
# with this version. This is to assist testing functionality which depends on
# version .e.g gopkg.toml generation.
#
# $ INJECT_KB_VERSION=0.1.7 test.sh
INJECT_KB_VERSION=${INJECT_KB_VERSION:-unknown}
# Make sure, we run in the root of the repo and
# therefore run the tests on all packages
base_dir="$( cd "$(dirname "$0")/" && pwd )"
cd "$base_dir" || {
echo "Cannot cd to '$base_dir'. Aborting." >&2
exit 1
}
go_workspace=''
for p in ${GOPATH//:/ }; do
if [[ $PWD/ = $p/* ]]; then
go_workspace=$p
fi
done
if [ -z $go_workspace ]; then
echo 'Current directory is not in $GOPATH' >&2
exit 1
fi
# k8s_version=1.11.0
k8s_version=1.14.1
goarch=amd64
goos="unknown"
if [[ "$OSTYPE" == "linux-gnu" ]]; then
goos="linux"
elif [[ "$OSTYPE" == "darwin"* ]]; then
goos="darwin"
fi
if [[ "$goos" == "unknown" ]]; then
echo "OS '$OSTYPE' not supported. Aborting." >&2
exit 1
fi
# Turn colors in this script off by setting the NO_COLOR variable in your
# environment to any value:
#
# $ NO_COLOR=1 test.sh
NO_COLOR=${NO_COLOR:-""}
if [ -z "$NO_COLOR" ]; then
header=$'\e[1;33m'
reset=$'\e[0m'
else
header=''
reset=''
fi
function header_text {
echo "$header$*$reset"
}
rc=0
tmp_root=/tmp
kb_root_dir=$tmp_root/kubebuilder
kb_orig=$(pwd)
# Skip fetching and untaring the tools by setting the SKIP_FETCH_TOOLS variable
# in your environment to any value:
#
# $ SKIP_FETCH_TOOLS=1 ./test.sh
#
# If you skip fetching tools, this script will use the tools already on your
# machine, but rebuild the kubebuilder and kubebuilder-bin binaries.
SKIP_FETCH_TOOLS=${SKIP_FETCH_TOOLS:-""}
function prepare_staging_dir {
header_text "preparing staging dir"
if [ -z "$SKIP_FETCH_TOOLS" ]; then
rm -rf "$kb_root_dir"
else
rm -f "$kb_root_dir/kubebuilder/bin/kubebuilder"
fi
}
# fetch k8s API gen tools and make it available under kb_root_dir/bin.
function fetch_tools {
if [ -n "$SKIP_FETCH_TOOLS" ]; then
return 0
fi
header_text "fetching tools"
kb_tools_archive_name="kubebuilder-tools-$k8s_version-$goos-$goarch.tar.gz"
kb_tools_download_url="https://storage.googleapis.com/kubebuilder-tools/$kb_tools_archive_name"
kb_tools_archive_path="$tmp_root/$kb_tools_archive_name"
if [ ! -f $kb_tools_archive_path ]; then
curl -sL ${kb_tools_download_url} -o "$kb_tools_archive_path"
fi
tar -zvxf "$kb_tools_archive_path" -C "$tmp_root/"
}
function build_kb {
header_text "building kubebuilder"
if [ "$INJECT_KB_VERSION" = "unknown" ]; then
opts=""
else
# TODO: what does this thing do.
opts=-ldflags "-X sigs.k8s.io/kubebuilder/cmd/version.kubeBuilderVersion=$INJECT_KB_VERSION"
fi
go build $opts -o $tmp_root/kubebuilder/bin/kubebuilder ./cmd
}
function prepare_testdir_under_gopath {
kb_test_dir=${go_workspace}/src/sigs.k8s.io/kubebuilder-test
header_text "preparing test directory $kb_test_dir"
rm -rf "$kb_test_dir" && mkdir -p "$kb_test_dir" && cd "$kb_test_dir"
header_text "running kubebuilder commands in test directory $kb_test_dir"
}
function setup_envs {
header_text "setting up env vars"
# Setup env vars
export PATH=$tmp_root/kubebuilder/bin:$PATH
export TEST_ASSET_KUBECTL=$tmp_root/kubebuilder/bin/kubectl
export TEST_ASSET_KUBE_APISERVER=$tmp_root/kubebuilder/bin/kube-apiserver
export TEST_ASSET_ETCD=$tmp_root/kubebuilder/bin/etcd
export TEST_DEP=$tmp_root/kubebuilder/init_project
export KUBECONFIG="$(kind get kubeconfig-path --name="kind")"
}
# download_vendor_archive downloads vendor tarball for v1 projects. It skips the
# download if tarball exists.
function download_vendor_archive {
archive_name="vendor.v1.tgz"
archive_download_url="https://storage.googleapis.com/kubebuilder-vendor/$archive_name"
archive_path="$tmp_root/$archive_name"
header_text "checking the path $archive_path to download the $archive_name"
if [ -f $archive_path ]; then
header_text "removing file which exists"
rm $archive_path
fi
header_text "downloading vendor archive from $archive_download_url"
curl -sL ${archive_download_url} -o "$archive_path"
}
function restore_go_deps {
header_text "restoring Go dependencies"
download_vendor_archive
tar -zxf $tmp_root/vendor.v1.tgz
}
function cache_project {
header_text "caching initialized projects"
if [ -d "$TEST_DEP" ]; then
rm -rf "$TEST_DEP"
fi
mkdir -p "$TEST_DEP"
cp -r $PWD/* $TEST_DEP
}
function dump_project {
header_text "restoring cached project"
if [ -d "$TEST_DEP" ]; then
cp -r $TEST_DEP/* .
restore_go_deps
fi
}
|
<gh_stars>1-10
// guacamole provides a seekable PRNG.
//
// This package provides a pseudo-random number generator (PRNG) that provides
// constant-time indexing into the random number stream. Where most PRNGs do
// not provide correlation between the seed and the input stream, guacamole
// guaranees that consecutive seeds will produce outputs exactly 64B apart. For
// example, seeding the generator with some value i and reading 64B of data has
// the exact same effect as seeding with the value i+1.
//
// This linear seeding property makes it possible to use the random number
// stream to reproducibly generate and verify data in tests. For example, the
// armnod package transforms raw bytes from guacamole to into human-readable
// strings that use a specified character set. In general, the ability to use
// an integer index into the random stream makes it easier to generate random
// data and reconstruct the data for validation by seeking to the specified
// offset in the random stream.
//
// The implementation of guacamole is derived from the Salsa stream cipher from
// DJB. The name stems from a misunderstanding DJB's naming conventions in
// which Salsa the dance was confused with Salsa the delicious chip dip. The
// changes from Salsa were largely to choose a constant key and inline many
// values in the algorithm. For amd64 hardware, there's a hand-written
// implementation of guacamole that offers even more performance and is
// byte-wise identical to the regular C implementation.
//
// For historical reasons, the package also includes routines for drawing
// numbers from a Zipf distribution and for scrambling integers in pseudo-random
// ways.
//
// In addition to being great at random byte generation, the module gives many
// opportunities for puns about "bytes of guacamole".
package guacamole
// #cgo LDFLAGS: -lm
// #include <errno.h>
// #include <stdlib.h>
// #include "guacamole.h"
import "C"
import (
"encoding/binary"
"unsafe"
)
const (
// Guacamole internally generates 64 bytes at a time. Adjacent seeds will
// produce data that is exactly BlockSize bytes offset in the stream. In
// general, two seeds i and j will be exactly (i - j)*64 bytes offset in
// their respective streams.
BlockSize = 64
)
func init() {
MaybeEnableAssembly()
}
// DisableAssembly ensures that the assembly implementation cannot be used, even
// on processors with the necessary primitives. This is largely intended for
// debugging, but is exposed in cases it is more broadly useful.
func DisableAssembly() {
C.guacamole_disable_assembly()
}
// MaybeEnableAssembly allows the use of the optimized assembly implementation
// if the processor is detected to support the necessary SSE 4.1 instructions.
// This function is called by default because there is should be no harm in
// using the fastest implementation available.
func MaybeEnableAssembly() {
C.guacamole_maybe_enable_assembly()
}
// New creates a new guacamole generator. The generator comes seeded at 0 and
// is ready to eat... err... use.
func New() *Guacamole {
g := &Guacamole{}
g.Seed(0)
return g
}
// Guacamole is the central class for generating random bytes. It is safe to
// initialize this directly instead of allocating it via New, but the behavior
// is undefined until the first call to Seed. New calls Seed directly before
// returning.
type Guacamole struct {
guac C.struct_guacamole
}
// Seed the guacamole (would that be "avocado"?). The seed function is fast and
// safe to call relatively frequently.
func (g *Guacamole) Seed(s uint64) {
C.guacamole_seed(&g.guac, C.uint64_t(s))
}
// Given a seed, seek to the given byte offset in the stream. Like seed, seek is fast and safe to
// call frequently
func (g *Guacamole) Seek(s, offset uint64) {
s = s + offset/BlockSize
g.Seed(s)
remainder := offset % BlockSize
if remainder > 0 {
_ = g.Bytes(remainder)
}
}
// String constructs a string of the next sz random bytes of guacamole.
func (g *Guacamole) String(sz uint64) string {
return string(g.Bytes(sz))
}
// Bytes constructs a slice of the next sz random bytes of guacamole.
func (g *Guacamole) Bytes(sz uint64) []byte {
bytes := make([]byte, sz)
g.Fill(bytes)
return bytes
}
// Fill fills the provided slice with random guacamole bytes.
func (g *Guacamole) Fill(bytes []byte) {
C.guacamole_generate(&g.guac, unsafe.Pointer(&bytes[0]), C.size_t(len(bytes)))
}
// Uint64 returns a new uint64 that is uniformly distributed throughout the 2^64
// space.
func (g *Guacamole) Uint64() uint64 {
var bytes [8]byte
g.Fill(bytes[:])
return binary.BigEndian.Uint64(bytes[:])
}
// Float64 generates a random float64 in the range [0, 1).
func (g *Guacamole) Float64() float64 {
return float64(C.guacamole_double(&g.guac))
}
// ZipfParams specify a set of elements and the parameters to select them
// according to a zipf distribution. Due to the approximation used, it is
// possible the last couple elements of N may not be generated. This may be a
// bug, or it may be an expected result of Gray's Zipf algorithm.
type ZipfParams struct {
gzp C.struct_guacamole_zipf_params
}
// N specifies the number of elements in the set from which values are selected.
func (z *ZipfParams) N() uint64 {
return uint64(z.gzp.n)
}
func (z *ZipfParams) Dump() (N uint64, alpha, theta, zetan, zeta2, eta float64) {
N = uint64(z.gzp.n)
alpha = float64(z.gzp.alpha)
theta = float64(z.gzp.theta)
zetan = float64(z.gzp.zetan)
zeta2 = float64(z.gzp.zeta2)
eta = float64(z.gzp.eta)
return
}
// ZipfAlpha returns ZipfParams to draw from n elements with the provided alpha
// parameter.
func ZipfAlpha(n uint64, alpha float64) *ZipfParams {
zp := &ZipfParams{}
C.guacamole_zipf_init_alpha(C.uint64_t(n), C.double(alpha), &zp.gzp)
return zp
}
// BUG(rescrv): Zipf may not return the last few (on the order of 1%) elements
// of the random set. See the ZipfParams struct for details.
// ZipfAlpha returns ZipfParams to draw from n elements with the provided theta
// parameter.
func ZipfTheta(n uint64, theta float64) *ZipfParams {
zp := &ZipfParams{}
C.guacamole_zipf_init_theta(C.uint64_t(n), C.double(theta), &zp.gzp)
return zp
}
// Zipf returns an element from the provided ZipfParams. The return value will
// be in the range [1, N].
func (g *Guacamole) Zipf(zp *ZipfParams) uint64 {
return uint64(C.guacamole_zipf(&g.guac, &zp.gzp))
}
// Scrambler turns any set of uint64 numbers into a completely jumbled
// set of uint64 numbers. The function guarantees that each input will map to a
// unique output no matter how much of the input space is used.
type Scrambler struct {
scr C.struct_guacamole_scrambler
}
// Create a new scrambler and initialize it with Change(0)
func NewScrambler() *Scrambler {
s := &Scrambler{}
s.Change(0)
return s
}
// Change the bijection used to the scrambler to the one provided. Each
// bijection is deterministic, so it is always possible to remember the
// bijection number and later recover the same mapping.
func (s *Scrambler) Change(bijection uint64) {
C.guacamole_scrambler_change(&s.scr, C.uint64_t(bijection))
}
// Scramble x through the bijection to generate a unique value for it. The
// function is deterministic and will produce the same output for scramblers
// with the same change and x value. Note that while a scrambler is a
// bijection, the scramble function is one way; there is [currently] no way to
// reverse the mapping efficiently.
func (s *Scrambler) Scramble(x uint64) uint64 {
return uint64(C.guacamole_scramble(&s.scr, C.uint64_t(x)))
}
|
def maxSumSubArray(array):
max_sum = 0
temp_sum = 0
for i in array:
temp_sum += i
if temp_sum < 0:
temp_sum = 0
if temp_sum > max_sum:
max_sum = temp_sum
return max_sum
if __name__ == '__main__':
array = [-2, -3, 4, -1, -2, 1, 5, -3]
print(maxSumSubArray(array)) |
<filename>src/app/paths/ap/donate-edit.ts
import {apDonatePath} from "./donate"
export const apDonateEditPath = apDonatePath.appendPathSegment<{}>("/edit"); |
num_list = [2, 3, 4, 5]
def process_list(num_list):
result_list = []
for num in num_list:
result_list.append(num*num)
return result_list |
<reponame>vaginessa/ApkAnalyser<filename>apkanalyser/src/jerl/blockformat/BFReader.java
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jerl.blockformat;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.util.Stack;
import java.util.StringTokenizer;
public class BFReader {
private static final String BEGIN_HEADER = "\\begin{";
private static final String END_HEADER = "\\end{";
private final Stack<String> blockStack = new Stack<String>();
private final LineNumberReader reader;
public BFReader(InputStream is) {
reader = new LineNumberReader(new InputStreamReader(is));
}
public void accept(BFVisitor visitor) throws IOException, BFParseException {
String line = null;
try {
while ((line = reader.readLine()) != null) {
String lineTrim = line.trim();
if (lineTrim.startsWith("#")) {
continue;
}
else if (line.indexOf(BEGIN_HEADER) != -1) {
String blockName = getBlockNameBegin(line);
String[] args = getBlockArgumentsBegin(line);
blockStack.push(blockName);
visitor.visitBeginBlock(blockName, args);
} else if (line.indexOf(END_HEADER) != -1) {
String blockName = getBlockNameEnd(line);
String fromStack = blockStack.pop();
if (!blockName.equals(fromStack)) {
throw new BFParseException(
"ERROR: expected end of block '" + fromStack
+ "' got '" + blockName + "'. ("
+ reader.getLineNumber() + "), line='"
+ line + "'");
}
visitor.visitEndBlock(blockName);
} else if (line.indexOf("=") != -1) {
String key = getPropertyKey(line);
String value = getPropertyValue(line);
visitor.visitProperty(key, value);
}
}
} catch (Exception e) {
throw new BFParseException("ERROR: " + e.getClass().getName() + ": " + e.getMessage() + ". ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
if (blockStack.size() != 0) {
throw new BFParseException(
"ERROR: found more block begin than block end");
}
}
private String getPropertyKey(String line) throws BFParseException {
int i = line.indexOf('=');
if (i == -1) {
throw new BFParseException(
"ERROR: expecting property 'key=value'. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
return line.substring(0, i).trim();
}
private String getPropertyValue(String line) throws BFParseException {
int i = line.indexOf('=');
if (i == -1 || line.length() < i + 2) {
throw new BFParseException(
"ERROR: expecting property 'key=value'. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
return line.substring(i + 1).trim();
}
private String[] getBlockArgumentsBegin(String line)
throws BFParseException {
int startI = line.indexOf(BEGIN_HEADER);
if (startI == -1) {
throw new BFParseException("ERROR: missing block header. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
startI = line.indexOf('[', startI);
if (startI == -1) {
return new String[0];
}
int endI = line.indexOf(']', startI);
if (startI == -1) {
throw new BFParseException(
"ERROR: missing ']' at end of argument list. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
String args = line.substring(startI + 1, endI).trim();
StringTokenizer st = new StringTokenizer(args, ",");
String[] ret = new String[st.countTokens()];
int i = 0;
while (st.hasMoreTokens()) {
ret[i++] = st.nextToken().trim();
}
return ret;
}
private String getBlockNameBegin(String line) throws BFParseException {
int startI;
startI = line.indexOf(BEGIN_HEADER);
if (startI == -1) {
throw new BFParseException("ERROR: missing block header. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
int endI = line.indexOf('}', startI);
if (endI == -1) {
throw new BFParseException("ERROR: missing '}'. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
return line.substring(startI + BEGIN_HEADER.length(), endI).trim();
}
private String getBlockNameEnd(String line) throws BFParseException {
int startI;
startI = line.indexOf(END_HEADER);
if (startI == -1) {
throw new BFParseException("ERROR: missing block header. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
int endI = line.indexOf('}', startI);
if (endI == -1) {
throw new BFParseException("ERROR: missing '}'. ("
+ reader.getLineNumber() + "), line='" + line + "'");
}
return line.substring(startI + END_HEADER.length(), endI).trim();
}
public static void main(String[] args) throws IOException, BFParseException {
BFReader pr = new BFReader(new FileInputStream("res/ex1"));
pr.accept(new BFVisitor() {
@Override
public void visitBeginBlock(String blockName, String[] args) {
System.out.println("start block, name='" + blockName + "'");
for (int i = 0; i < args.length; i++) {
System.out.println("\t'" + args[i] + "'");
}
}
@Override
public void visitEndBlock(String blockName) {
System.out.println("end block, name='" + blockName + "'");
}
@Override
public void visitProperty(String key, String value) {
System.out.println("key='" + key + "', value='" + value + "'");
}
});
/*
* System.out.println("'"+getBlockNameBegin("\\begin{ blockname
* }[ab]")+"'");
* System.out.println("'"+getBlockNameEnd("\\end{blockname}")+"'");
* System.out.println("'"+getBlockArgumentsBegin("\\begin{ blockname
* }")+"'"); System.out.println("'"+getBlockArgumentsBegin("\\begin{
* blockname }[arg1=2, arg5=34342]")+"'");
*/
}
}
|
#!/bin/sh -x
#
# Install TOR
#
export PKG_PATH=http://ftp.usa.openbsd.org/pub/OpenBSD/`uname -r`/packages/`arch -s`
echo `pkg_add tor`
#
# Config TORRC in /etc/tor/torrc
#
# First, backup torrc
cp /etc/tor/torrc /etc/tor/torrc-$(date +%Y%M%d-%H%M%S.bkp)
echo "
AutomapHostsOnResolve 1
DNSPort 53
TransPort 9040" \
> /etc/tor/torrc
/etc/rc.d/tor restart
|
#!/bin/bash
# Test the connection, wait if needed.
while [[ $(ping -c1 google.com 2>&1 | grep " 0% packet loss") == "" ]]; do
echo "[uGateway]: Waiting for internet connection..."
sleep 5
done
# Ensure we're in the right directory
cd /home/pi/ugateway-scripts
# Check for changes on Github (this will revert system files while leaving custom user files intact)
echo "Updating service scripts..."
# reset permissions and revert custom changes
sudo chown -R pi:pi .
git checkout .
OLD_HEAD=$(git rev-parse HEAD)
git pull
NEW_HEAD=$(git rev-parse HEAD)
# Echo out the result - not needed I guess..
if [[ $OLD_HEAD != $NEW_HEAD ]]; then
echo "Updates found and installed"
else
echo "No updates found"
fi
# append the activity log
echo `date` >> update.log
|
#!/usr/bin/env bash
# Copyright 2012 Johns Hopkins University (Author: Guoguo Chen, Yenda Trmal)
# Apache 2.0.
# Begin configuration section.
# case_insensitive=true
extraid=
kwlist=
ecf=
rttm=
f4de_prefix=
# End configuration section.
help_message="$0: score the kwslist using the F4DE scorer from NIST
Example:
$0 [additional-parameters] <kaldi-data-dir> <kws-results-dir>
where the most important additional parameters can be:
--extraid <extra-id> #for using, when a non-default kws tasks are setup
(using the kws_setup.sh --extraid) for a kaldi-single data-dir
--kwlist <kwlist> #allows for an alternative kwlist -- if not set, the default
kwlist is taken from <kaldi-data-dir>
--f4de-prefix <prefix-id> #allows for scoring the same results using
different kwlists and storing them in the same dir "
echo $0 $@
[ -f ./path.sh ] && . ./path.sh; # source the path.
. parse_options.sh || exit 1;
if [ $# -ne 2 ]; then
printf "FATAL: incorrect number of variables given to the script\n\n"
printf "$help_message\n"
exit 1;
fi
if [ -z $extraid ] ; then
kwsdatadir=$1/kws
else
kwsdatadir=$1/${extraid}_kws
fi
kwsoutputdir="$2/"
if [ -z $kwlist ] ; then
kwlist=$kwsdatadir/kwlist.xml
fi
if [ -z $rttm ] ; then
rttm=$kwsdatadir/rttm
fi
if [ -z $ecf ] ; then
ecf=$kwsdatadir/ecf.xml
fi
if [ ! -z ${f4de_prefix} ] ; then
f4de_prefix="/${f4de_prefix}"
fi
if [[ ! -d "$kwsdatadir" ]] ; then
echo "FATAL: the KWS input data directory does not exist!"
exit 1;
fi
for file in $ecf $rttm $kwlist ; do
if [[ ! -f "$file" ]] ; then
echo "FATAL: file $file does not exist!"
exit 1;
fi
done
echo KWSEval -e $ecf -r $rttm -t $kwlist \
-s $kwsoutputdir/kwslist.xml -c -o -b -d -f $kwsoutputdir
KWSEval -e $ecf -r $rttm -t $kwlist \
-s $kwsoutputdir/kwslist.xml -c -o -b -d -f ${kwsoutputdir}${f4de_prefix} || exit 1;
duration=`cat ${kwsoutputdir}${f4de_prefix}/sum.txt | grep TotDur | cut -f 3 -d '|' | sed "s/\s*//g"`
local/kws_oracle_threshold.pl --duration $duration ${kwsoutputdir}${f4de_prefix}/alignment.csv > ${kwsoutputdir}${f4de_prefix}/metrics.txt
exit 0;
|
#!/bin/bash
source ../config.sh
# Note: I executed this commands one at a time
# az login
# az account set --subscription $AZURE_SUBSCRIPTION_ID
# az account show
# az group create --name $rg --location $region
# az storage account create \
# --name $storage_name \
# --location $region \
# --resource-group $rg \
# --sku Standard_LRS
# az functionapp create \
# --resource-group $rg \
# --consumption-plan-location $region \
# --runtime dotnet \
# --functions-version 3 \
# --name $function_app_name \
# --storage-account $storage_name
# warning message:
# --runtime-version is not supported for --runtime dotnet.
# Dotnet version is determined by --functions-version.
# Dotnet version will be 3.1 for this function app.
|
impl RegisterBlock {
pub fn read_module_id(&self) -> u32 {
self.id.read()
}
} |
def find_sum(n):
return n * (n + 1) // 2
print(find_sum(10)) |
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.ColorSensor;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.Range;
/**
* Created by lawrencemao on 11/13/17.
*/
@TeleOp
public class ExponentialDrive extends LinearOpMode {
DcMotor fL;
DcMotor bL;
DcMotor fR;
DcMotor bR;
DcMotor lS;
Servo left;
Servo right;
Servo arm;
ColorSensor color;
public void runOpMode() throws InterruptedException{
fL = hardwareMap.dcMotor.get("frontLeft"); //2
bL = hardwareMap.dcMotor.get("backLeft"); //2
fR = hardwareMap.dcMotor.get("frontRight"); //2
bR = hardwareMap.dcMotor.get("backRight"); //2
lS = hardwareMap.dcMotor.get("linearSlide"); //5
left = hardwareMap.servo.get("left"); //2
right = hardwareMap.servo.get("right"); //2
arm = hardwareMap.servo.get("arm"); //5
color = hardwareMap.colorSensor.get("color"); //5
fL.setDirection(DcMotor.Direction.REVERSE);
left.setDirection(Servo.Direction.REVERSE);
bL.setDirection(DcMotor.Direction.REVERSE);
waitForStart();
while(opModeIsActive()) {
//auto-servo is held in place
arm.setPosition(.95);
double POWER = -1 * Range.clip(Math.max(Range.clip(Math.sqrt(Math.pow(gamepad1.left_stick_x, 2) + Math.pow(gamepad1.left_stick_y, 2)), -1, 1),
Math.abs(gamepad1.right_stick_x)), -1, 1);
double maxPower = Math.max(Math.max(Math.abs(gamepad1.left_stick_y - gamepad1.left_stick_x - gamepad1.right_stick_x),
Math.abs(gamepad1.left_stick_y + gamepad1.left_stick_x - gamepad1.right_stick_x)),
Math.max(Math.abs(gamepad1.left_stick_y + gamepad1.left_stick_x + gamepad1.right_stick_x),
Math.abs(gamepad1.left_stick_y - gamepad1.left_stick_x + gamepad1.right_stick_x)));
fL.setPower(POWER * (gamepad1.left_stick_y - gamepad1.left_stick_x - gamepad1.right_stick_x) / maxPower);
bL.setPower(POWER * (gamepad1.left_stick_y + gamepad1.left_stick_x - gamepad1.right_stick_x) / maxPower);
fR.setPower(POWER * (gamepad1.left_stick_y + gamepad1.left_stick_x + gamepad1.right_stick_x) / maxPower);
bR.setPower(POWER * (gamepad1.left_stick_y - gamepad1.left_stick_x + gamepad1.right_stick_x) / maxPower);
lS.setPower(0);
if(gamepad1.right_bumper){
lS.setPower(0.5);
}
if(gamepad1.left_bumper){
lS.setPower(-0.5);
}
if(gamepad1.a){
left.setPosition(1);
right.setPosition(1);
}
if(!gamepad1.a){
left.setPosition(0);
right.setPosition(0);
}
}
}
} |
package com.usehover.hoverstarter;
import android.content.Intent;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import com.hover.sdk.actions.HoverAction;
import com.hover.sdk.api.Hover;
import com.hover.sdk.api.HoverParameters;
import com.hover.sdk.permissions.PermissionActivity;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity implements Hover.DownloadListener {
private final String TAG = "MainActivity";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Hover.initialize(getApplicationContext(), this);
Button permissionsButton = findViewById(R.id.permissions_button);
permissionsButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent i = new Intent(getApplicationContext(), PermissionActivity.class);
startActivityForResult(i, 0);
}
});
Button button= (Button) findViewById(R.id.action_button);
button.setEnabled(true);
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent i = new HoverParameters.Builder(MainActivity.this)
.request("YOUR_ACTION_ID") // Add your action ID here
// .extra("YOUR_VARIABLE_NAME", "TEST_VALUE") // Uncomment and add your variables if any
.buildIntent();
startActivityForResult(i, 0);
}
});
}
@Override public void onError(String message) {
// Toast.makeText(this, "Error while attempting to download actions, see logcat for error", Toast.LENGTH_LONG).show();
Log.e(TAG, "Error: " + message);
}
@Override public void onSuccess(ArrayList<HoverAction> actions) {
// Toast.makeText(this, "Successfully downloaded " + actions.size() + " actions", Toast.LENGTH_LONG).show();
Log.d(TAG, "Successfully downloaded " + actions.size() + " actions");
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-N-VB-IP/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-N-VB-IP/512+0+512-shuffled-N-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_half_quarter --eval_function last_quarter_eval |
// Video Setup
if (myVideo.canPlayType('video/mp4; codecs="avc1.42E01E, mp4a.40.2"')) {
myVideo.play();
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.