text
stringlengths 1
1.05M
|
|---|
module.exports = db => async (req, res, next) => {
try {
// Must sanitize all input first (see sanitize-get-reports middleware fn)
const { verified, fromDate, toDate, locations, fullReport } = req.query
// Init where clause
const whereText = []
const whereValues = []
// Add verified filter if true
if (verified) {
whereText.push(`r.verified = $${whereValues.length + 1}`)
whereValues.push(verified)
}
// Add locations to where clause if provided
if (locations.length) {
const geoFilter = locations
.map((_, i) => `t.geohash LIKE $${i + whereValues.length + 1} || '%'`)
.join(' OR ')
whereText.push(`(
r.trace_list_id IS NULL OR EXISTS(
SELECT * FROM traces t
WHERE t.trace_list_id = r.trace_list_id
AND (${geoFilter})
)
)`)
whereValues.push(...locations)
}
// Add fromDate to where clause if provided
if (fromDate) {
whereText.push(`r.reported_at >= to_timestamp($${whereValues.length + 1})`)
whereValues.push(fromDate)
}
// Add toDate to where clause if provided
if (toDate) {
whereText.push(`r.reported_at <= to_timestamp($${whereValues.length + 1})`)
whereValues.push(toDate)
}
const { rows: queryReports } = await db.query({
text: `
SELECT DISTINCT r.report, r.signature FROM reports r
${whereText.length ? `WHERE ${whereText.join(' AND ')}` : ''}
`,
values: whereValues,
rowMode: 'array',
})
const reports = queryReports.map(([report, signature]) => {
if (fullReport) {
const reportBuffer = Buffer.from(report, 'base64')
const signatureBuffer = Buffer.from(signature, 'base64')
return Buffer.concat([reportBuffer, signatureBuffer]).toString('base64')
}
return report
})
res.json({ reports })
} catch (err) {
return next(err)
}
}
|
package de.rieckpil.blog.springbootwithopenliberty;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringBootWithOpenLibertyApplication {
public static void main(String[] args) {
SpringApplication.run(SpringBootWithOpenLibertyApplication.class, args);
}
}
|
<reponame>bosxixi/sortingtables
var ComparableRow = (function () {
function ComparableRow(key, element) {
this.key = key;
this.element = element;
}
return ComparableRow;
}());
var OrderBy;
(function (OrderBy) {
OrderBy[OrderBy["Ascending"] = 0] = "Ascending";
OrderBy[OrderBy["Descending"] = 1] = "Descending";
})(OrderBy || (OrderBy = {}));
var RowComparer = (function () {
function RowComparer() {
}
RowComparer.prototype.Descending = function (a, b) {
var aNumber = Number(a.key);
var bNumber = Number(b.key);
if (!isNaN(aNumber) && !isNaN(bNumber)) {
return aNumber - bNumber;
}
else {
return a.key.toLowerCase().localeCompare(b.key.toLowerCase());
}
};
RowComparer.prototype.Ascending = function (a, b) {
var aNumber = Number(a.key);
var bNumber = Number(b.key);
if (!isNaN(aNumber) && !isNaN(bNumber)) {
return bNumber - aNumber;
}
else {
return b.key.toLowerCase().localeCompare(a.key.toLowerCase());
}
};
return RowComparer;
}());
var SortingTableOptions = (function () {
function SortingTableOptions(comparer, excludeColumns, includeColums, descending, ascending) {
if (excludeColumns != null && includeColums != null) {
console.log("warning: setting both excludeColumns and includeColumns.");
}
if (descending == null) {
var span = document.createElement("span");
span.className = "glyphicon glyphicon-triangle-top";
this.descending = span;
}
else {
this.descending = descending;
}
if (ascending == null) {
var span = document.createElement("span");
span.className = "glyphicon glyphicon-triangle-bottom";
this.ascending = span;
}
else {
this.ascending = ascending;
}
this.excludeColumns = excludeColumns;
this.includeColumns = includeColums;
this.comparer = comparer;
}
return SortingTableOptions;
}());
var SortingTable = (function () {
function SortingTable(table, options) {
this.rowsBeginIndex = 1;
if (table.getAttribute("data-ignore") != "true") {
this.options = options;
this.table = table;
this.tbody = table.querySelector("tbody");
var thead = table.querySelector("thead");
if (thead == null) {
this.hasThead = false;
this.theadEmulate = this.tbody.children.item(0);
}
else {
this.removeTheadTagFromTable();
this.theadEmulate = this.tbody.children.item(0);
}
this.headColumnNames = this.getHeaderColumns();
this.rows = this.getRows();
this.addHeadColumnNamesToEachRow();
this.bindThead();
}
}
SortingTable.prototype.removeTheadTagFromTable = function () {
this.hasThead = true;
var theThead = this.table.children.item(0);
this.theadClone = theThead.cloneNode();
var theTheadTrClone = theThead.children.item(0).cloneNode(true);
this.table.removeChild(theThead);
this.table.children.item(0).insertBefore(theTheadTrClone, this.table.children.item(0).children.item(0));
};
SortingTable.prototype.bindThead = function () {
var ths = this.theadEmulate.children;
for (var i = 0; i < ths.length; i++) {
var column = ths[i];
if (column.textContent == "") {
continue;
}
if (this.options.excludeColumns != null && this.options.excludeColumns.indexOf(column.textContent.trim()) != -1) {
continue;
}
if (this.options.includeColumns != null) {
if (this.options.includeColumns.indexOf(column.textContent.trim()) != -1) {
this.setStyleAddEventListener(column);
}
}
else {
this.setStyleAddEventListener(column);
}
}
};
SortingTable.prototype.bringBackTheadToTable = function () {
this.theadClone.appendChild(this.tbody.children.item(0));
this.table.insertBefore(this.theadClone, this.table.children.item(0));
};
SortingTable.prototype.setStyleAddEventListener = function (column) {
var _this = this;
column.setAttribute("style", "cursor: pointer;");
column.addEventListener("click", function (e) {
var ct = e.currentTarget;
var columnName = ct.textContent.trim();
_this.toggleSorting(columnName);
}, false);
};
SortingTable.prototype.removeOrderingSapn = function (column) {
for (var i_1 = 0; i_1 < column.children.length; i_1++) {
column.removeChild(column.children.item(i_1));
}
};
SortingTable.prototype.addElementToTheadColumn = function (column, orderBy) {
this.removeOrderingSapn(column);
if (orderBy == OrderBy.Ascending) {
column.appendChild(this.options.ascending);
}
else {
column.appendChild(this.options.descending);
}
};
SortingTable.prototype.toggleSorting = function (columnName) {
var ths = this.theadEmulate.children;
for (var i = 0; i < ths.length; i++) {
var column = ths[i];
if (column.textContent.trim() === columnName) {
var orderby = column.getAttribute("data-orderby");
if (orderby == OrderBy.Ascending.toString()) {
column.setAttribute("data-orderby", OrderBy.Descending.toString());
this.addElementToTheadColumn(column, OrderBy.Descending);
this.orderBy(columnName, OrderBy.Descending);
}
else {
column.setAttribute("data-orderby", OrderBy.Ascending.toString());
this.addElementToTheadColumn(column, OrderBy.Ascending);
this.orderBy(columnName, OrderBy.Ascending);
}
}
else {
column.removeAttribute("data-orderby");
this.removeOrderingSapn(column);
}
}
};
SortingTable.prototype.orderBy = function (columnName, orderBy) {
var orderedRows = this.getOrderedRows(columnName, orderBy);
this.tbody.innerHTML = "";
this.tbody.appendChild(this.theadEmulate);
for (var i = 0; i < orderedRows.length; i++) {
this.tbody.appendChild(orderedRows[i]);
}
if (this.hasThead) {
this.bringBackTheadToTable();
}
};
SortingTable.prototype.getOrderedRows = function (columnName, orderBy) {
if (orderBy === void 0) { orderBy = OrderBy.Descending; }
var orderedRows = [];
var unordered = this.getComparableRows(columnName);
if (orderBy == OrderBy.Descending) {
unordered.sort(this.options.comparer.Descending).forEach(function (row) {
orderedRows.push(row.element);
});
}
else {
unordered.sort(this.options.comparer.Ascending).forEach(function (row) {
orderedRows.push(row.element);
});
}
return orderedRows;
};
SortingTable.prototype.getComparableRows = function (columnName) {
var map = [];
for (var i = 0; i < this.rows.length; i++) {
var value = this.getSingleRowValue(this.rows[i], columnName);
map.push(new ComparableRow(value, this.rows[i]));
}
return map;
};
SortingTable.prototype.getSingleRowValue = function (row, columnName) {
var columns = row.children;
for (var i = 0; i < columns.length; i++) {
var column = columns[i];
if (column.getAttribute("data-columnName") === columnName) {
if (column.getAttribute("data-value") != null) {
return column.getAttribute("data-value");
}
return column.textContent.trim();
}
}
};
SortingTable.prototype.addHeadColumnNamesToEachRow = function () {
for (var i = 0; i < this.rows.length; i++) {
var row = this.rows[i];
var rowChilds = row.children;
for (var j = 0; j < rowChilds.length; j++) {
var column = rowChilds[j];
column.setAttribute("data-columnName", this.headColumnNames[j]);
}
}
};
SortingTable.prototype.getRows = function () {
var allRowsIncludingHead = this.tbody.children;
var elements = [];
for (var i = this.rowsBeginIndex; i < allRowsIncludingHead.length; i++) {
var e = allRowsIncludingHead.item(i);
elements.push(e);
}
return elements;
};
SortingTable.prototype.getHeaderColumns = function () {
var first = this.theadEmulate.children;
var headerColumns = [];
for (var i = 0; i < first.length; i++) {
var e = first.item(i);
headerColumns.push(e.textContent.trim());
}
return headerColumns;
};
return SortingTable;
}());
//# sourceMappingURL=sortingtables.js.map
|
<gh_stars>0
const path = require('path');
const merge = require('webpack-merge');
const webpack = require('webpack');
const os = require('os');
const WRMPlugin = require('atlassian-webresource-webpack-plugin');
const providedDependencies = require('./providedDependencies');
const PLUGIN_TARGET_DIR = path.join(__dirname, '..', '..', '..', 'target');
const OUTPUT_DIR = path.join(PLUGIN_TARGET_DIR, 'classes');
const SRC_DIR = path.join(__dirname, 'src');
const getWrmPlugin = (watch = false, watchPrepare = false) => {
return new WRMPlugin({
pluginKey: 'com.ntnguyen.app.confluence.license-tracking-app',
webresourceKeyMap: {
// Take the entry "mltMacroEntry" and create a resource with key "mlt-macro-resource"
'mltMacroEntry': 'mlt-macro-resource',
'mltConfigEntry': 'mlt-config-resource',
},
xmlDescriptors: path.join(OUTPUT_DIR, 'META-INF', 'plugin-descriptors',
'wr-webpack-bundles.xml'),
providedDependencies: providedDependencies,
watch: watch,
watchPrepare: watchPrepare,
});
};
const webpackConfig = {
entry: {
mltMacroEntry: path.join(SRC_DIR, 'entry/mlt-macro.entry.js'),
mltConfigEntry: path.join(SRC_DIR, 'entry/mlt-config.entry.js'),
},
output: {
filename: '[name].[chunkhash].js',
path: OUTPUT_DIR,
chunkFilename: '[name].[chunkhash].js',
},
mode: 'development',
module: {
rules: [
{
test: /\.css$/,
use: [
'style-loader',
'css-loader',
],
},
{
test: /\.(png|svg|jpg|gif|woff|woff2|eot|ttf|otf)$/,
use: [
'file-loader',
],
},
{
// Only pack file js or jsx (test the file)
test: /\.(js|jsx)$/,
exclude: /node_modules/,
// User babel loader. Loader can be considered as a bridge between babel and Webpack
use: [
'babel-loader',
],
},
],
},
optimization: {
splitChunks: false,
runtimeChunk: false,
},
devtool: 'cheap-module-source-map',
resolve: {
modules: [
'node_modules',
SRC_DIR,
],
},
plugins: [new webpack.NamedChunksPlugin()],
};
const hostname = os.hostname();
const devServerPort = '3333';
const watchPrepareConfig = {
output: {
publicPath: `http://${hostname}:${devServerPort}/`,
filename: '[name].js',
chunkFilename: '[name].chunk.js',
},
plugins: [
getWrmPlugin(true, true),
],
};
const watchConfig = {
output: {
publicPath: `http://${hostname}:${devServerPort}/`,
filename: '[name].js',
chunkFilename: '[name].chunk.js',
},
devServer: {
host: hostname,
port: devServerPort,
overlay: true,
hot: true,
headers: { 'Access-Control-Allow-Origin': '*' },
},
plugins: [
new webpack.NamedModulesPlugin(),
new webpack.HotModuleReplacementPlugin(),
getWrmPlugin(true),
],
};
const devConfig = {
optimization: {
splitChunks: {
minSize: 0,
chunks: 'all',
maxInitialRequests: Infinity,
},
runtimeChunk: true,
},
plugins: [
getWrmPlugin(),
],
};
module.exports = (env) => {
if (env === 'watch:prepare') {
return merge([webpackConfig, watchPrepareConfig]);
}
if (env === 'watch') {
return merge([webpackConfig, watchConfig, watchPrepareConfig]);
}
return merge([webpackConfig, devConfig]);
};
|
import { Ressurs, RessursStatus } from '@navikt/familie-typer';
import { useState, useRef, useEffect } from 'react';
import ReactDOM from 'react-dom';
import { inputId } from '.';
import { ISøkeresultat } from '..';
import { søkKnappId, tømKnappId } from './Søk';
export interface Props {
nullstillSøkeresultater: () => void;
søk: (value: string) => void;
søkeresultatOnClick: (søkResultat: ISøkeresultat) => void;
søkeresultater: Ressurs<ISøkeresultat[]>;
}
const useSøk = ({ nullstillSøkeresultater, søk, søkeresultatOnClick, søkeresultater }: Props) => {
const [ident, settIdent] = useState('');
const [identSistSøktPå, settIdentSistSøktPå] = useState('');
const [anker, settAnker] = useState<HTMLElement | undefined>(undefined);
const [valgtSøkeresultat, settValgtSøkeresultat] = useState(-1);
const [erGyldig, settErGyldig] = useState(false);
const ankerRef = useRef<HTMLElement>();
useEffect(() => {
if (erGyldig) {
utløserSøk();
}
}, [erGyldig, ident]);
useEffect(() => {
window.addEventListener('keydown', handleGlobalKeydown);
window.addEventListener('click', handleGlobalClick);
return () => {
window.removeEventListener('keydown', handleGlobalKeydown);
window.removeEventListener('click', handleGlobalClick);
};
}, []);
const nullstillInput = (lukkPopover = false) => {
settIdent('');
settIdentSistSøktPå('');
settErGyldig(false);
lukkPopover && settAnker(undefined);
nullstillSøkeresultater();
};
const settAnkerPåInput = () => {
const ankerElement = document.getElementById(inputId) as HTMLElement;
settAnker(ankerElement);
ankerRef.current = ankerElement;
};
const utløserSøk = () => {
const identUtenWhitespace = ident.replace(/ /g, '');
søk(identUtenWhitespace);
settIdentSistSøktPå(identUtenWhitespace);
settAnkerPåInput();
};
const handleGlobalKeydown = (event: KeyboardEvent) => {
if (ankerRef.current === undefined) return;
if (event.key === 'Escape') nullstillInput(true);
};
const handleGlobalClick = () => {
if (
ankerRef.current !== undefined &&
!ReactDOM.findDOMNode(ankerRef.current)?.contains(document.activeElement) &&
!document.getElementById(søkKnappId)?.contains(document.activeElement) &&
!document.getElementById(tømKnappId)?.contains(document.activeElement)
) {
nullstillInput(true);
}
};
const onInputChange = (event: React.ChangeEvent) => {
const nyVerdi = (event.target as HTMLInputElement).value;
settIdent(nyVerdi);
if (nyVerdi === '') {
nullstillSøkeresultater();
settAnker(undefined);
}
};
const onInputKeyDown = (event: React.KeyboardEvent) => {
switch (event.key) {
case 'ArrowUp':
settValgtSøkeresultat(
valgtSøkeresultat === -1
? søkeresultater.status === RessursStatus.SUKSESS
? søkeresultater.data.length - 1
: -1
: valgtSøkeresultat - 1,
);
break;
case 'ArrowDown':
settValgtSøkeresultat(
valgtSøkeresultat <
(søkeresultater.status === RessursStatus.SUKSESS
? søkeresultater.data.length - 1
: -1)
? valgtSøkeresultat + 1
: -1,
);
break;
case 'Enter':
if (søkeresultater.status === RessursStatus.SUKSESS) {
if (
identSistSøktPå === ident &&
valgtSøkeresultat === -1 &&
søkeresultater.data.length === 1
) {
søkeresultatOnClick(søkeresultater.data[0]);
} else if (valgtSøkeresultat !== -1) {
søkeresultatOnClick(søkeresultater.data[valgtSøkeresultat]);
} else {
utløserSøk();
}
} else {
utløserSøk();
}
break;
}
};
return {
anker,
ident,
nullstillInput,
onInputChange,
onInputKeyDown,
settErGyldig,
settValgtSøkeresultat,
utløserSøk,
valgtSøkeresultat,
};
};
export default useSøk;
|
#!/bin/bash
#
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
# if version not passed in, default to latest released version
export VERSION=1.3.0
# if ca version not passed in, default to latest released version
export CA_VERSION=$VERSION
# current version of thirdparty images (couchdb, kafka and zookeeper) released
export THIRDPARTY_IMAGE_VERSION=0.4.13
export ARCH=$(echo "$(uname -s|tr '[:upper:]' '[:lower:]'|sed 's/mingw64_nt.*/windows/')-$(uname -m | sed 's/x86_64/amd64/g')")
export MARCH=$(uname -m)
printHelp() {
echo "Usage: bootstrap.sh [version [ca_version [thirdparty_version]]] [options]"
echo
echo "options:"
echo "-h : this help"
echo "-d : bypass docker image download"
echo "-s : bypass fabric-samples repo clone"
echo "-b : bypass download of platform-specific binaries"
echo
echo "e.g. bootstrap.sh 1.3.0 -s"
echo "would download docker images and binaries for version 1.3.0"
}
dockerFabricPull() {
local FABRIC_TAG=$1
for IMAGES in peer orderer ccenv javaenv tools; do
echo "==> FABRIC IMAGE: $IMAGES"
echo
docker pull hyperledger/fabric-$IMAGES:$FABRIC_TAG
docker tag hyperledger/fabric-$IMAGES:$FABRIC_TAG hyperledger/fabric-$IMAGES
done
}
dockerThirdPartyImagesPull() {
local THIRDPARTY_TAG=$1
for IMAGES in couchdb kafka zookeeper; do
echo "==> THIRDPARTY DOCKER IMAGE: $IMAGES"
echo
docker pull hyperledger/fabric-$IMAGES:$THIRDPARTY_TAG
docker tag hyperledger/fabric-$IMAGES:$THIRDPARTY_TAG hyperledger/fabric-$IMAGES
done
}
dockerCaPull() {
local CA_TAG=$1
echo "==> FABRIC CA IMAGE"
echo
docker pull hyperledger/fabric-ca:$CA_TAG
docker tag hyperledger/fabric-ca:$CA_TAG hyperledger/fabric-ca
}
samplesInstall() {
# clone (if needed) hyperledger/fabric-samples and checkout corresponding
# version to the binaries and docker images to be downloaded
if [ -d first-network ]; then
# if we are in the fabric-samples repo, checkout corresponding version
echo "===> Checking out v${VERSION} of hyperledger/fabric-samples"
git checkout v${VERSION}
elif [ -d fabric-samples ]; then
# if fabric-samples repo already cloned and in current directory,
# cd fabric-samples and checkout corresponding version
echo "===> Checking out v${VERSION} of hyperledger/fabric-samples"
cd fabric-samples && git checkout v${VERSION}
else
echo "===> Cloning hyperledger/fabric-samples repo and checkout v${VERSION}"
git clone -b master https://github.com/hyperledger/fabric-samples.git && cd fabric-samples && git checkout v${VERSION}
fi
}
# Incrementally downloads the .tar.gz file locally first, only decompressing it
# after the download is complete. This is slower than binaryDownload() but
# allows the download to be resumed.
binaryIncrementalDownload() {
local BINARY_FILE=$1
local URL=$2
curl -f -s -C - ${URL} -o ${BINARY_FILE} || rc=$?
# Due to limitations in the current Nexus repo:
# curl returns 33 when there's a resume attempt with no more bytes to download
# curl returns 2 after finishing a resumed download
# with -f curl returns 22 on a 404
if [ "$rc" = 22 ]; then
# looks like the requested file doesn't actually exist so stop here
return 22
fi
if [ -z "$rc" ] || [ $rc -eq 33 ] || [ $rc -eq 2 ]; then
# The checksum validates that RC 33 or 2 are not real failures
echo "==> File downloaded. Verifying the md5sum..."
localMd5sum=$(md5sum ${BINARY_FILE} | awk '{print $1}')
remoteMd5sum=$(curl -s ${URL}.md5)
if [ "$localMd5sum" == "$remoteMd5sum" ]; then
echo "==> Extracting ${BINARY_FILE}..."
tar xzf ./${BINARY_FILE} --overwrite
echo "==> Done."
rm -f ${BINARY_FILE} ${BINARY_FILE}.md5
else
echo "Download failed: the local md5sum is different from the remote md5sum. Please try again."
rm -f ${BINARY_FILE} ${BINARY_FILE}.md5
exit 1
fi
else
echo "Failure downloading binaries (curl RC=$rc). Please try again and the download will resume from where it stopped."
exit 1
fi
}
# This will attempt to download the .tar.gz all at once, but will trigger the
# binaryIncrementalDownload() function upon a failure, allowing for resume
# if there are network failures.
binaryDownload() {
local BINARY_FILE=$1
local URL=$2
echo "===> Downloading: " ${URL}
# Check if a previous failure occurred and the file was partially downloaded
if [ -e ${BINARY_FILE} ]; then
echo "==> Partial binary file found. Resuming download..."
binaryIncrementalDownload ${BINARY_FILE} ${URL}
else
curl ${URL} | tar xz || rc=$?
if [ ! -z "$rc" ]; then
echo "==> There was an error downloading the binary file. Switching to incremental download."
echo "==> Downloading file..."
binaryIncrementalDownload ${BINARY_FILE} ${URL}
else
echo "==> Done."
fi
fi
}
binariesInstall() {
echo "===> Downloading version ${FABRIC_TAG} platform specific fabric binaries"
binaryDownload ${BINARY_FILE} https://nexus.hyperledger.org/content/repositories/releases/org/hyperledger/fabric/hyperledger-fabric/${ARCH}-${VERSION}/${BINARY_FILE}
if [ $? -eq 22 ]; then
echo
echo "------> ${FABRIC_TAG} platform specific fabric binary is not available to download <----"
echo
fi
echo "===> Downloading version ${CA_TAG} platform specific fabric-ca-client binary"
binaryDownload ${CA_BINARY_FILE} https://nexus.hyperledger.org/content/repositories/releases/org/hyperledger/fabric-ca/hyperledger-fabric-ca/${ARCH}-${CA_VERSION}/${CA_BINARY_FILE}
if [ $? -eq 22 ]; then
echo
echo "------> ${CA_TAG} fabric-ca-client binary is not available to download (Available from 1.1.0-rc1) <----"
echo
fi
}
dockerInstall() {
which docker >& /dev/null
NODOCKER=$?
if [ "${NODOCKER}" == 0 ]; then
echo "===> Pulling fabric Images"
dockerFabricPull ${FABRIC_TAG}
echo "===> Pulling fabric ca Image"
dockerCaPull ${CA_TAG}
echo "===> Pulling thirdparty docker images"
dockerThirdPartyImagesPull ${THIRDPARTY_TAG}
echo
echo "===> List out hyperledger docker images"
docker images | grep hyperledger*
else
echo "========================================================="
echo "Docker not installed, bypassing download of Fabric images"
echo "========================================================="
fi
}
DOCKER=true
SAMPLES=true
BINARIES=true
# Parse commandline args pull out
# version and/or ca-version strings first
if [ ! -z $1 -a ${1:0:1} != "-" ]; then
VERSION=$1;shift
if [ ! -z $1 -a ${1:0:1} != "-" ]; then
CA_VERSION=$1;shift
if [ ! -z $1 -a ${1:0:1} != "-" ]; then
THIRDPARTY_IMAGE_VERSION=$1;shift
fi
fi
fi
# prior to 1.2.0 architecture was determined by uname -m
if [[ $VERSION =~ ^1\.[0-1]\.* ]]; then
export FABRIC_TAG=${MARCH}-${VERSION}
export CA_TAG=${MARCH}-${CA_VERSION}
export THIRDPARTY_TAG=${MARCH}-${THIRDPARTY_IMAGE_VERSION}
else
# starting with 1.2.0, multi-arch images will be default
: ${CA_TAG:="$CA_VERSION"}
: ${FABRIC_TAG:="$VERSION"}
: ${THIRDPARTY_TAG:="$THIRDPARTY_IMAGE_VERSION"}
fi
BINARY_FILE=hyperledger-fabric-${ARCH}-${VERSION}.tar.gz
CA_BINARY_FILE=hyperledger-fabric-ca-${ARCH}-${CA_VERSION}.tar.gz
# then parse opts
while getopts "h?dsb" opt; do
case "$opt" in
h|\?)
printHelp
exit 0
;;
d) DOCKER=false
;;
s) SAMPLES=false
;;
b) BINARIES=false
;;
esac
done
if [ "$SAMPLES" == "true" ]; then
echo
echo "Installing hyperledger/fabric-samples repo"
echo
samplesInstall
fi
if [ "$BINARIES" == "true" ]; then
echo
echo "Installing Hyperledger Fabric binaries"
echo
binariesInstall
fi
if [ "$DOCKER" == "true" ]; then
echo
echo "Installing Hyperledger Fabric docker images"
echo
dockerInstall
fi
|
#!/bin/bash
# -----------------------------------------------------------------------------
#
# Copyright (C) The BioDynaMo Project.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# See the LICENSE file distributed with this work for details.
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# -----------------------------------------------------------------------------
if [[ $# -ne 1 ]]; then
echo "Wrong number of arguments.
Description:
Run a travis installation test
Usage:
installation-test.sh OS
Arguments:
OS OS id of the container
"
exit 1
fi
BDM_OS=$1
BDM_PROJECT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/../.."
set -e -x
# git describe does not work if last commit tag is not checked out
git fetch --unshallow &>/dev/null || true
if [ $BDM_OS != "osx" ]; then
util/run-inside-docker.sh $BDM_OS util/travis-ci/docker-installation-test-wrapper.sh
else
if [ `uname` != "Darwin" ]; then
echo "ERROR: Installation tests for OSX can only be done on an OSX operating system"
exit 1
fi
git config --system user.name "Test User"
git config --system user.email user@test.com
# hide the fact that this is running on travis so DetectOs detects "osx"
unset TRAVIS
# don't unset TRAVIS_OS_NAME, because it is needed by the ReuqireSudo workaround
test/installation-test.sh
fi
|
# Check it the environment variable is set and points
# to a valid directory
if [[ "$METASHARE_SW_DIR" == "" ]] ; then
echo "The environment variable METASHARE_SW_DIR must be defined"
echo "and contain the directory with Metashare software."
exit 1
fi
# Remove trailing slash if present
METASHARE_SW_DIR=`echo $METASHARE_SW_DIR | sed -e "s/\/$//"`
# Verify METASHARE_SW_DIR is a valid directory
if [ ! -d "$METASHARE_SW_DIR" ] ; then
echo $METASHARE_SW_DIR " is not a valid directory."
exit 1
fi
export METASHARE_DIR="$METASHARE_SW_DIR/metashare"
MSERV_DIR="$METASHARE_SW_DIR/misc/tools/multitest"
|
// Copyright (C) (See commit logs on github.com/robhz786/strf)
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "test_utils.hpp"
namespace {
STRF_TEST_FUNC void utf8_to_utf32_valid_sequences()
{
TEST(U"\U00010000") (strf::sani(u8"\U00010000"));
TEST(U" ") (strf::sani("") > 1);
TEST(U"\U0010FFFF") (strf::sani("\xF4\x8F\xBF\xBF"));
TEST(U" abc") (strf::sani("abc") > 4);
TEST(U" ab\u0080\u0800\uD7FF\U00010000\U0010FFFF")
(strf::sani(u8"ab\u0080\u0800\uD7FF\U00010000\U0010FFFF") > 8);
TEST_CALLING_RECYCLE_AT<2,2>(U"abcd") (strf::sani("abcdef"));
TEST_CALLING_RECYCLE_AT<2> (U"ab") (strf::sani(u8"ab\u0080"));
TEST_CALLING_RECYCLE_AT<2> (U"ab") (strf::sani(u8"ab\u0800"));
TEST_CALLING_RECYCLE_AT<2> (U"ab") (strf::sani(u8"ab\uD7FF"));
TEST_CALLING_RECYCLE_AT<2> (U"ab") (strf::sani(u8"ab\U00010000"));
TEST_CALLING_RECYCLE_AT<3> (U"ab\u0080") (strf::sani(u8"ab\u0080"));
TEST_CALLING_RECYCLE_AT<3> (U"ab\u0800") (strf::sani(u8"ab\u0800"));
TEST_CALLING_RECYCLE_AT<3> (U"ab\uD7FF") (strf::sani(u8"ab\uD7FF"));
TEST_CALLING_RECYCLE_AT<3> (U"ab\U00010000") (strf::sani(u8"ab\U00010000"));
TEST_CALLING_RECYCLE_AT<3> (U"ab\U0010FFFF") (strf::sani(u8"ab\U0010FFFF"));
TEST_CALLING_RECYCLE_AT<2, 1> (U"ab\u0080") (strf::sani(u8"ab\u0080"));
TEST_CALLING_RECYCLE_AT<2, 1> (U"ab\u0800") (strf::sani(u8"ab\u0800"));
TEST_CALLING_RECYCLE_AT<2, 1> (U"ab\uD7FF") (strf::sani(u8"ab\uD7FF"));
TEST_CALLING_RECYCLE_AT<2, 2> (U"ab\U00010000") (strf::sani(u8"ab\U00010000"));
TEST_CALLING_RECYCLE_AT<2, 1> (U"ab\U0010FFFF") (strf::sani(u8"ab\U0010FFFF"));
{
// when surrogates are allowed
const char32_t u32str_D800[] = {U' ', 0xD800, 0};
const char32_t u32str_DBFF[] = {U' ', 0xDBFF, 0};
const char32_t u32str_DC00[] = {U' ', 0xDC00, 0};
const char32_t u32str_DFFF[] = {U' ', 0xDFFF, 0};
TEST(u32str_D800) .with(strf::surrogate_policy::lax) (strf::sani("\xED\xA0\x80") > 2);
TEST(u32str_DBFF) .with(strf::surrogate_policy::lax) (strf::sani("\xED\xAF\xBF") > 2);
TEST(u32str_DC00) .with(strf::surrogate_policy::lax) (strf::sani("\xED\xB0\x80") > 2);
TEST(u32str_DFFF) .with(strf::surrogate_policy::lax) (strf::sani("\xED\xBF\xBF") > 2);
TEST_CALLING_RECYCLE_AT<1, 1> (u32str_D800)
.with(strf::surrogate_policy::lax) (strf::sani("\xED\xA0\x80") > 2);
TEST_CALLING_RECYCLE_AT<1> (U" ")
.with(strf::surrogate_policy::lax) (strf::sani("\xED\xA0\x80") > 2);
}
}
STRF_TEST_FUNC void utf8_to_utf32_invalid_sequences()
{
// sample from Tabble 3-8 of Unicode standard
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xF1\x80\x80\xE1\x80\xC0") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xF1\x80\x80\xE1\x80\xC0_") > 5);
// missing leading byte
TEST(U" \uFFFD") (strf::sani("\xBF") > 2);
TEST(U" \uFFFD_") (strf::sani("\xBF_") > 3);
// missing leading byte
TEST(U" \uFFFD\uFFFD") (strf::sani("\x80\x80") > 3);
TEST(U" \uFFFD\uFFFD_") (strf::sani("\x80\x80_") > 4);
// overlong sequence
TEST(U" \uFFFD\uFFFD") (strf::sani("\xC1\xBF") > 3);
TEST(U" \uFFFD\uFFFD_") (strf::sani("\xC1\xBF_") > 4);
// overlong sequence
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xE0\x9F\x80") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xE0\x9F\x80_") > 5);
// overlong sequence with extra continuation bytes
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xC1\xBF\x80") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xC1\xBF\x80_") > 5);
// overlong sequence with extra continuation bytes
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD") (strf::sani("\xE0\x9F\x80\x80") > 5);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xE0\x9F\x80\x80_") > 6);
// overlong sequence
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD") (strf::sani("\xF0\x8F\xBF\xBF" ) > 5);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF0\x8F\xBF\xBF_" ) > 6);
// overlong sequence with extra continuation bytes
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD\uFFFD") (strf::sani("\xF0\x8F\xBF\xBF\x80" ) > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF0\x8F\xBF\xBF\x80_" ) > 7);
// codepoint too big.
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF4\x90\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF5\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF6\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF7\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF8\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xF9\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xFA\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xFB\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xFC\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xFD\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xFE\x80\x80\x80_") > 6);
TEST(U" \uFFFD\uFFFD\uFFFD\uFFFD_") (strf::sani("\xFF\x80\x80\x80_") > 6);
// missing continuation
TEST(U" \uFFFD") (strf::sani("\xF0\x90\xBF" ) > 2);
TEST(U" \uFFFD_") (strf::sani("\xF0\x90\xBF_" ) > 3);
TEST(U" \uFFFD") (strf::sani("\xC2") > 2);
TEST(U" \uFFFD_") (strf::sani("\xC2_") > 3);
TEST(U" \uFFFD") (strf::sani("\xE0") > 2);
TEST(U" \uFFFD_") (strf::sani("\xE0_") > 3);
TEST(U" \uFFFD") (strf::sani("\xE0\xA0") > 2);
TEST(U" \uFFFD_") (strf::sani("\xE0\xA0_") > 3);
TEST(U" \uFFFD") (strf::sani("\xE1") > 2);
TEST(U" \uFFFD_") (strf::sani("\xE1_") > 3);
TEST(U" \uFFFD") (strf::sani("\xF1") > 2);
TEST(U" \uFFFD_") (strf::sani("\xF1_") > 3);
TEST(U" \uFFFD") (strf::sani("\xF1\x81") > 2);
TEST(U" \uFFFD_") (strf::sani("\xF1\x81_") > 3);
TEST(U" \uFFFD") (strf::sani("\xF1\x81\x81") > 2);
TEST(U" \uFFFD_") (strf::sani("\xF1\x81\x81_") > 3);
// surrogate
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xED\xA0\x80") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xED\xAF\xBF") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xED\xB0\x80") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xED\xBF\xBF") > 4);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xED\xA0\x80_") > 5);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xED\xAF\xBF_") > 5);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xED\xB0\x80_") > 5);
TEST(U" \uFFFD\uFFFD\uFFFD_") (strf::sani("\xED\xBF\xBF_") > 5);
// missing continuation, but could only be a surrogate.
TEST(U" \uFFFD\uFFFD") (strf::sani("\xED\xA0") > 3);
TEST(U" \uFFFD\uFFFD_") (strf::sani("\xED\xBF_") > 4);
// missing continuation. It could only be a surrogate, but surrogates are allowed
auto allow_surr = strf::surrogate_policy::lax;
TEST(U" \uFFFD") .with(allow_surr) (strf::sani("\xED\xA0") > 2);
TEST(U" \uFFFD_") .with(allow_surr) (strf::sani("\xED\xBF_") > 3);
// missing continuation. Now it starts with \xED, but it is not a surrogate
TEST(U" \uFFFD") (strf::sani("\xED\x9F") > 2);
TEST(U" \uFFFD_") (strf::sani("\xED\x9F_") > 3);
// cover when recycle needs to be called
TEST_CALLING_RECYCLE_AT<2,2>(U" \uFFFD\uFFFD\uFFFD") (strf::sani("\xED\xA0\x80") > 4);
TEST_CALLING_RECYCLE_AT<2> (U" \uFFFD") (strf::sani("\xED\xA0\x80") > 4);
}
STRF_TEST_FUNC int error_handler_calls_count = 0 ;
struct dummy_exception {};
STRF_TEST_FUNC void utf8_to_utf32_error_notifier()
{
strf::invalid_seq_notifier notifier{ [](){++error_handler_calls_count;} };
::error_handler_calls_count = 0;
TEST(U"\uFFFD\uFFFD\uFFFD").with(notifier) (strf::sani("\xED\xA0\x80"));
TEST_EQ(::error_handler_calls_count, 3);
::error_handler_calls_count = 0;
TEST_CALLING_RECYCLE_AT<1>(U"\uFFFD").with(notifier) (strf::sani("\xED\xA0\x80"));
TEST_TRUE(::error_handler_calls_count > 0);
#if defined(__cpp_exceptions) && __cpp_exceptions && ! defined(__CUDACC__)
// check that an exception can be thrown, i.e,
// ensure there is no `noexcept` blocking it
strf::invalid_seq_notifier notifier_that_throws{ [](){ throw dummy_exception{}; } };
bool thrown = false;
try {
char32_t buff[10];
strf::to(buff) .with(notifier_that_throws) (strf::sani("\xED\xA0\x80"));
} catch (dummy_exception&) {
thrown = true;
} catch(...) {
}
TEST_TRUE(thrown);
#endif // __cpp_exceptions
}
STRF_TEST_FUNC void utf8_to_utf32_find_transcoder()
{
#if ! defined(__CUDACC__)
using static_transcoder_type = strf::static_transcoder
<char, char32_t, strf::csid_utf8, strf::csid_utf32>;
strf::dynamic_charset<char> dyn_utf8 = strf::utf8_t<char>{}.to_dynamic();
strf::dynamic_charset<char32_t> dyn_utf32 = strf::utf32_t<char32_t>{}.to_dynamic();
strf::dynamic_transcoder<char, char32_t> tr = strf::find_transcoder(dyn_utf8, dyn_utf32);
TEST_TRUE(tr.transcode_func() == static_transcoder_type::transcode);
TEST_TRUE(tr.transcode_size_func() == static_transcoder_type::transcode_size);
#endif // defined(__CUDACC__)
TEST_TRUE((std::is_same
< strf::static_transcoder
< char, char32_t, strf::csid_utf8, strf::csid_utf32 >
, decltype(strf::find_transcoder( strf::utf_t<char>{}
, strf::utf_t<char32_t>{})) >
:: value));
}
} // unnamed namespace
STRF_TEST_FUNC void test_utf8_to_utf32()
{
utf8_to_utf32_valid_sequences();
utf8_to_utf32_invalid_sequences();
utf8_to_utf32_error_notifier();
utf8_to_utf32_find_transcoder();
}
REGISTER_STRF_TEST(test_utf8_to_utf32);
|
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
# Check if Docker is installed
if ! command -v docker &> /dev/null; then
echo "Docker is not installed. Installing Docker..."
# Add commands to install Docker
# For example:
# curl -fsSL https://get.docker.com -o get-docker.sh
# sudo sh get-docker.sh
fi
# Check if Dockstarter is installed
if ! command -v dockstarter &> /dev/null; then
echo "Dockstarter is not installed. Installing Dockstarter..."
# Add commands to install Dockstarter
# For example:
# git clone https://github.com/GhostWriters/DockSTARTer.git
# cd DockSTARTer
# ./main.sh
fi
# Initialize Dockstarter and configure it for the specified project
echo "Initializing Dockstarter for the project..."
# Add commands to initialize and configure Dockstarter for the project
# For example:
# dockstarter init
# dockstarter configure <project_name>
|
function setup() {
export TF_CLI_INIT_FROM_MODULE="git::https://https://github.com/cloudposse/terraform-null-label?ref=master"
export TF_CLI_INIT_BACKEND=false
export TF_CLI_INIT="module/"
export TF_ENV=${TF_ENV:-../release/tfenv}
}
function teardown() {
unset TF_CLI_INIT_FROM_MODULE
unset TF_CLI_INIT_BACKEND
unset TF_CLI_INIT
unset TF_ENV
}
@test "TF_CLI_ARGS_init works" {
which ${TF_ENV}
${TF_ENV} printenv TF_CLI_ARGS_init >&2
[ "$(${TF_ENV} printenv TF_CLI_ARGS_init)" != "" ]
[ "$(${TF_ENV} printenv TF_CLI_ARGS_init)" == "-backend=${TF_CLI_INIT_BACKEND} -from-module=${TF_CLI_INIT_FROM_MODULE} ${TF_CLI_INIT}" ]
}
|
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import {NgForm} from '@angular/forms';
import { SuccessMsgComponent } from '../success-msg/success-msg.component';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import {HttpClient,HttpErrorResponse} from '@angular/common/http';
import { ServicingService } from '../../services/addServicing.service';
import {environment} from '../../../../environments/environment'
@Component({
selector: 'app-add-booking',
templateUrl: './add-booking.component.html',
styleUrls: ['./add-booking.component.scss']
})
export class AddBookingComponent implements OnInit {
vehicle_details:any = {};
cust_details: any = {};
amt_details:any = {};
service_details:any = {};
svc_id:string;
allbrand_list:any = [];
insuranceFlag:boolean;
models_list:any = [];
variant_list:any = [];
filename:any;
myFiles: string [] = [];
BtnDisable:boolean;
user_list:any = [];
file_error_msg:boolean = false;
salutation:any = [];
user_id:string;
countrycode1:string;
mobile_length:boolean =false;
upload_file = environment.upload_file;
constructor(private router: Router,
private http:HttpClient,
private modalService: NgbModal,
private ServicingService: ServicingService,) { }
ngOnInit() {
this.countrycode1 = "+91";
this.user_id = JSON.parse(sessionStorage.getItem("userId"));
if(JSON.parse(sessionStorage.getItem('insurance')) == "1"){
this.insuranceFlag = true;
}
else{
this.insuranceFlag = false;
}
if(sessionStorage.getItem('selectedsvc')){
this.svc_id = sessionStorage.getItem('selectedsvc');
}
else{
this.svc_id = JSON.parse(sessionStorage.getItem('globalsvcid'));
}
this.getAllBrands();
this.salutation = [
{ id: 1, type: 'Mr' },
{ id: 2, type: 'Mrs' },
{ id: 3, type: 'Ms' },
];
this.cust_details.salutation = 'Mr';
this.getUserList();
}
public httpOptions = {
withCredentials: true
};
onSelectBrand(brandId) {
for (let i = 0; i < this.allbrand_list.length; i++) {
if (this.allbrand_list[i].brand_id == brandId) {
this.vehicle_details.brand = this.allbrand_list[i].brand_id;
}
}
// console.log( this.vehicle_details.brand);
this.getModels(this.vehicle_details.brand);
}
checkMobile(ev){
this.BtnDisable = false;
this.file_error_msg = false;
console.log(ev.target.value);
if(ev.target.value.length == 10){
// this.mobile_length = true;
}
}
onSelectModel(model_id){
for (let i = 0; i < this.models_list.length; i++) {
if (this.models_list[i].model_id == model_id) {
this.vehicle_details.model = this.models_list[i];
}
}
this.getVariants(this.vehicle_details.model);
}
getVariants(variant_id){
const reqpara3 = {
requesttype: 'getvariants',
brandid: variant_id
}
const as3 = JSON.stringify(reqpara3)
this.ServicingService.webServiceCall(as3).subscribe(res => {
if (res[0].login === 0) {
sessionStorage.removeItem('currentUser');
this.router.navigate(['/auth/login']);
}
else {
this.variant_list = res[0].models
}
});
}
getModels(brand_id){
const reqpara2 = {
requesttype: 'getmodels',
brandid: brand_id
}
const as2 = JSON.stringify(reqpara2)
this.ServicingService.webServiceCall(as2).subscribe(res => {
if (res[0].login === 0) {
sessionStorage.removeItem('currentUser');
this.router.navigate(['/auth/login']);
}
else {
this.models_list = res[0].models;
if(this.models_list.length === 1){
var model_id = this.models_list[0].model_id;
this.getVariants(model_id);
}
}
});
}
getFileDetails (e) {
this.BtnDisable = false;
// console.log(e);
this.filename = e.target.files[0].name;
for (var i = 0; i < e.target.files.length; i++) {
this.myFiles.push(e.target.files[i]);
}
if(this.myFiles.length > 3){
this.BtnDisable = true;
}
else {
this.BtnDisable=false;
}
}
removefile(file) {
const index: number = this.myFiles.indexOf(file);
if (index !== -1) {
this.myFiles.splice(index, 1);
}
if(this.myFiles.length > 0 ){
this.BtnDisable = false;
}
else{
this.BtnDisable = true;
}
}
getAllBrands() {
const reqpara1 ={
requesttype: 'getallbrands',
svcid:this.svc_id
}
const as1 = JSON.stringify(reqpara1)
this.ServicingService.webServiceCall(as1).subscribe
(res => {
if (res[0].login === 0) {
sessionStorage.removeItem('currentUser');
this.router.navigate(['/auth/login']);
}
else {
this.allbrand_list = res[0].allbrands;
}
});
}
getUserList(){
this.user_list = [];
// this.userDisable = [];
const List1 = {
requesttype: 'getuserlist',
servicecentreid:this.svc_id,
}
const ListReq1 = JSON.stringify(List1);
this.ServicingService.webServiceCall(ListReq1).subscribe
(res => {
for(var i = 0; i < res[0].userlist.length; i++ ){
if(res[0].userlist[i].isenabled == '1'){
this.user_list.push(res[0].userlist[i])
}
else{
// this.userDisable.push(res[0].userlist[i]);
}
}
});
}
onSubmit(f: NgForm){
console.log(f.value.mobile1);
var mobile = f.value.mobile1.toString().length
if(mobile < 10){
this.BtnDisable = true;
this.mobile_length = true;
this.file_error_msg = true;
}
else{
this.BtnDisable = false;
const frmData: FormData = new FormData();
if(f.value.inv === undefined){
var inv = "0"
}
else{
inv = f.value.inv;
}
frmData.append('requesttype', 'createbooking_paymentgw');
frmData.append('vehnumber', f.value.num);
frmData.append('vehbrand', f.value.brand);
frmData.append('carmodelid', f.value.model);
frmData.append('carsubmodelid', f.value.variant);
frmData.append('customername', f.value.salutation1 +'.'+ f.value.Cus_name);
frmData.append('customermobile1',f.value.mobile1);
frmData.append('customeremail',f.value.email);
frmData.append('advisorid',f.value.ServiceAdvisor);
frmData.append('creid', this.user_id);
frmData.append('svcid',"1183");
frmData.append('amount',f.value.amt);
frmData.append('description',f.value.description);
frmData.append('invoiceid',f.value.inv);
for ( var i = 0; i < this.myFiles.length; i++) {
frmData.append('file' + i, this.myFiles[i]);
}
const us = JSON.stringify(frmData);
// console.log(frmData)
this.http.post(this.upload_file, frmData,this.httpOptions).subscribe(
data => {
console.log("coming here");
if(data[0].queue[0].queue_exists === "0"){
console.log("queue 0")
this.success(data[0].queue[0].queue_id,"0");
this.myFiles = [];
f.reset();
}
else{
console.log("queue 1")
this.success(data[0].queue[0].queue_id,"1");
this.myFiles = [];
f.reset();
}
},
(err: HttpErrorResponse) => {
console.log (err.message);
});
}
}
success(queueId,res:any) {
var dataTopass = {queueId: queueId,res:res}
const activeModal = this.modalService.open(SuccessMsgComponent, { size: 'lg', container: 'nb-layout' });
activeModal.componentInstance.modalHeader = 'Message';
activeModal.componentInstance.modalContent = dataTopass;
}
}
|
<reponame>zenitheesc/ZenView<gh_stars>1-10
const Container = require('../../../../../formBuilder/formBuilder').Container;
module.exports = class terminalEditMenu {
constructor() {
this.form = Container.div({}, {
id: 'terminalEditMenuConfig',
att: 'terminal',
conditions: [
{
id: 'BlockModule',
att: 'value',
requiredValue: 'terminal',
},
],
},
);
}
};
|
/*function display_map(latitude,longitude)// pour afficher un seul marqueur
{
let mymap = L.map('mapid').setView([48.8534, 2.3488], 13);
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: 'Map data © <a href="https://www.openstreetmap.org/">OpenStreetMap</a> contributors, <a href="https://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, Imagery © <a href="https://www.mapbox.com/">Mapbox</a>',
maxZoom: 18,
id: 'mapid',
accessToken: 'your.mapbox.access.token'
}).addTo(mymap);
L.marker([latitude, longitude]).addTo(mymap);
let circle = L.circle([48.8534, 2.3488], {
color: 'red',
fillColor: '#f03',
fillOpacity: 0.5,
radius: 250
}).addTo(mymap);
let popup = L.popup()
.setLatLng([48.8534, 2.3488])
.setContent("Hello world")
.openOn(mymap);
}*/
function display_map()
{
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: 'Map data © <a href="https://www.openstreetmap.org/">OpenStreetMap</a> contributors, <a href="https://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, Imagery © <a href="https://www.mapbox.com/">Mapbox</a>',
maxZoom: 18,
id: 'mapid',
accessToken: 'your.mapbox.access.token'
}).addTo(mymap);
}
function display_marker(latitude, longitude){
L.marker([latitude, longitude]).addTo(mymap);
}
|
<reponame>ViniciusDev26/api-nest<filename>src/users/users.controller.ts
import { BadRequestException, Body, Controller, Post } from '@nestjs/common';
import { Public } from 'src/auth/decorators/public.decorator';
import { ICreateAndUpdateUser } from './dtos/ICreateAndUpdateUser';
import { UsersService } from './users.service';
@Controller('users')
export class UsersController {
constructor(private readonly usersService: UsersService) {}
@Public()
@Post('/')
async create(@Body() userToRegister: ICreateAndUpdateUser) {
const requiredFields = ['name', 'email', 'password'];
for (const field of requiredFields) {
if (!userToRegister[field]) {
throw new BadRequestException(`${field} is required`);
}
}
const user = await this.usersService.create(userToRegister);
return user;
}
}
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
kube::golang::setup_env
"${KUBE_ROOT}/hack/build-go.sh" cmd/linkcheck
linkcheck=$(kube::util::find-binary "linkcheck")
kube::util::ensure-temp-dir
OUTPUT="${KUBE_TEMP}"/linkcheck-output
cleanup() {
rm -rf "${OUTPUT}"
}
trap "cleanup" EXIT SIGINT
mkdir -p "$OUTPUT"
APIROOT="${KUBE_ROOT}/pkg/api/"
APISROOT="${KUBE_ROOT}/pkg/apis/"
DOCROOT="${KUBE_ROOT}/docs/"
ROOTS=($APIROOT $APISROOT $DOCROOT)
found_invalid=false
for root in "${ROOTS[@]}"; do
"${linkcheck}" "--root-dir=${root}" 2> >(tee -a "${OUTPUT}/error" >&2) && ret=0 || ret=$?
if [[ $ret -eq 1 ]]; then
echo "Failed: found invalid links in ${root}."
found_invalid=true
fi
if [[ $ret -gt 1 ]]; then
echo "Error running linkcheck"
exit 1
fi
done
if [ ${found_invalid} = true ]; then
echo "Summary of invalid links:"
cat ${OUTPUT}/error
exit 1
fi
# ex: ts=2 sw=2 et filetype=sh
|
<reponame>Miyashin6/Introduction<gh_stars>1-10
package com.galfins.gnss_compare.Corrections;
import android.location.Location;
import com.galfins.gogpsextracts.Coordinates;
import com.galfins.gogpsextracts.IonoGps;
import com.galfins.gogpsextracts.NavigationProducer;
import com.galfins.gogpsextracts.SatellitePosition;
import com.galfins.gogpsextracts.Time;
import com.galfins.gogpsextracts.TopocentricCoordinates;
import com.galfins.gogpsextracts.Constants;
/**
* Created by <NAME> on 10/02/2018.
*
* Ionospheric Correction based on Klobuchar's Algorithm
*
* This algorithm can be applied to Galileo, GPS pseudoranges or any other constellation
* However the required coefficients to compute this correction are contained only in the
* GPS Navigation message. So for Galileo satellites this correciton will always return 0.0
*
* It accounts for roughly 50% of the total ionospheric error affecting the pseudoranges
*
*
*/
public class IonoCorrection extends Correction {
private double correctionValue;
private final static String NAME = "Klobuchar Iono Correction";
public IonoCorrection(){
super();
}
public void calculateCorrection(Time currentTime, Coordinates approximatedPose, SatellitePosition satelliteCoordinates, NavigationProducer navigationProducer, Location initialLocation) {
IonoGps iono = navigationProducer.getIono(currentTime.getMsec(), initialLocation);
if (iono.getBeta(0) == 0){
correctionValue = 0.0;
}else {
// Compute the elevation and azimuth angles for each satellite
TopocentricCoordinates topo = new TopocentricCoordinates();
topo.computeTopocentric(approximatedPose, satelliteCoordinates);
// Assign the elevation and azimuth information to new variables
double elevation = topo.getElevation();
double azimuth = topo.getAzimuth();
double ionoCorr = 0;
if (iono == null)
return;
// double a0 = navigation.getIono(currentTime.getMsec(),0);
// double a1 = navigation.getIono(currentTime.getMsec(),1);
// double a2 = navigation.getIono(currentTime.getMsec(),2);
// double a3 = navigation.getIono(currentTime.getMsec(),3);
// double b0 = navigation.getIono(currentTime.getMsec(),4);
// double b1 = navigation.getIono(currentTime.getMsec(),5);
// double b2 = navigation.getIono(currentTime.getMsec(),6);
// double b3 = navigation.getIono(currentTime.getMsec(),7);
elevation = Math.abs(elevation);
// Parameter conversion to semicircles
double lon = approximatedPose.getGeodeticLongitude() / 180; // geod.get(0)
double lat = approximatedPose.getGeodeticLatitude() / 180; //geod.get(1)
azimuth = azimuth / 180;
elevation = elevation / 180;
// Klobuchar algorithm
// Compute the slant factor
double f = 1 + 16 * Math.pow((0.53 - elevation), 3);
// Compute the earth-centred angle
double psi = 0.0137 / (elevation + 0.11) - 0.022;
// Compute the latitude of the Ionospheric Pierce Point (IPP)
double phi = lat + psi * Math.cos(azimuth * Math.PI);
if (phi > 0.416) {
phi = 0.416;
}
if (phi < -0.416) {
phi = -0.416;
}
// Compute the longitude of the IPP
double lambda = lon + (psi * Math.sin(azimuth * Math.PI))
/ Math.cos(phi * Math.PI);
// Find the geomagnetic latitude of the IPP
double ro = phi + 0.064 * Math.cos((lambda - 1.617) * Math.PI);
// Find the local time at the IPP
double t = lambda * 43200 + currentTime.getGpsTime();
while (t >= 86400)
t = t - 86400;
while (t < 0)
t = t + 86400;
// Compute the period of ionospheric delay
double p = iono.getBeta(0) + iono.getBeta(1) * ro + iono.getBeta(2) * Math.pow(ro, 2) + iono.getBeta(3) * Math.pow(ro, 3);
if (p < 72000)
p = 72000;
// Compute the amplitude of ionospheric delay
double a = iono.getAlpha(0) + iono.getAlpha(1) * ro + iono.getAlpha(2) * Math.pow(ro, 2) + iono.getAlpha(3) * Math.pow(ro, 3);
if (a < 0)
a = 0;
// Compute the phase of ionospheric delay
double x = (2 * Math.PI * (t - 50400)) / p;
// Compute the ionospheric correction
if (Math.abs(x) < 1.57) {
ionoCorr = Constants.SPEED_OF_LIGHT
* f
* (5e-9 + a
* (1 - (Math.pow(x, 2)) / 2 + (Math.pow(x, 4)) / 24));
} else {
ionoCorr = Constants.SPEED_OF_LIGHT * f * 5e-9;
}
correctionValue = ionoCorr;
}
}
@Override
public double getCorrection() {
return correctionValue;
}
@Override
public String getName() {
return NAME;
}
public static void registerClass(){
register(NAME, IonoCorrection.class);
}
}
|
package com.javatest.framework.commons.utils;
public class IPAddressUtil {
private static final int INADDR4SZ = 4;
private static final int INADDR16SZ = 16;
private static final int INT16SZ = 2;
public IPAddressUtil() {
}
public static byte[] textToNumericFormatV4(String var0) {
byte[] var1 = new byte[4];
long var2 = 0L;
int var4 = 0;
boolean var5 = true;
int var6 = var0.length();
if (var6 != 0 && var6 <= 15) {
for (int var7 = 0; var7 < var6; ++var7) {
char var8 = var0.charAt(var7);
if (var8 == '.') {
if (var5 || var2 < 0L || var2 > 255L || var4 == 3) {
return null;
}
var1[var4++] = (byte) ((int) (var2 & 255L));
var2 = 0L;
var5 = true;
} else {
int var9 = Character.digit(var8, 10);
if (var9 < 0) {
return null;
}
var2 *= 10L;
var2 += (long) var9;
var5 = false;
}
}
if (!var5 && var2 >= 0L && var2 < 1L << (4 - var4) * 8) {
switch(var4) {
case 0: {
var1[0] = (byte) ((int) (var2 >> 24 & 255L));
break;
}
case 1: {
var1[1] = (byte) ((int) (var2 >> 16 & 255L));
break;
}
case 2: {
var1[2] = (byte) ((int) (var2 >> 8 & 255L));
break;
}
case 3: {
var1[3] = (byte) ((int) (var2 >> 0 & 255L));
break;
}
default: {
break;
}
}
return var1;
} else {
return null;
}
} else {
return null;
}
}
public static byte[] textToNumericFormatV6(String var0) {
if (var0.length() < 2) {
return null;
} else {
char[] var5 = var0.toCharArray();
byte[] var6 = new byte[16];
int var7 = var5.length;
int var8 = var0.indexOf("%");
if (var8 == var7 - 1) {
return null;
} else {
if (var8 != -1) {
var7 = var8;
}
int var1 = -1;
int var9 = 0;
int var10 = 0;
if (var5[var9] == ':') {
++var9;
if (var5[var9] != ':') {
return null;
}
}
int var11 = var9;
boolean var3 = false;
int var4 = 0;
while (true) {
int var12;
while (var9 < var7) {
char var2 = var5[var9++];
var12 = Character.digit(var2, 16);
if (var12 != -1) {
var4 <<= 4;
var4 |= var12;
if (var4 > 65535) {
return null;
}
var3 = true;
} else {
if (var2 != ':') {
if (var2 == '.' && var10 + 4 <= 16) {
String var13 = var0.substring(var11, var7);
int var14 = 0;
for (int var15 = 0; (var15 = var13.indexOf(46, var15)) != -1; ++var15) {
++var14;
}
if (var14 != 3) {
return null;
}
byte[] var16 = textToNumericFormatV4(var13);
if (var16 == null) {
return null;
}
for (int var17 = 0; var17 < 4; ++var17) {
var6[var10++] = var16[var17];
}
var3 = false;
break;
}
return null;
}
var11 = var9;
if (!var3) {
if (var1 != -1) {
return null;
}
var1 = var10;
} else {
if (var9 == var7) {
return null;
}
if (var10 + 2 > 16) {
return null;
}
var6[var10++] = (byte) (var4 >> 8 & 255);
var6[var10++] = (byte) (var4 & 255);
var3 = false;
var4 = 0;
}
}
}
if (var3) {
if (var10 + 2 > 16) {
return null;
}
var6[var10++] = (byte) (var4 >> 8 & 255);
var6[var10++] = (byte) (var4 & 255);
}
if (var1 != -1) {
var12 = var10 - var1;
if (var10 == 16) {
return null;
}
for (var9 = 1; var9 <= var12; ++var9) {
var6[16 - var9] = var6[var1 + var12 - var9];
var6[var1 + var12 - var9] = 0;
}
var10 = 16;
}
if (var10 != 16) {
return null;
}
byte[] var18 = convertFromIPv4MappedAddress(var6);
if (var18 != null) {
return var18;
}
return var6;
}
}
}
}
public static boolean isIPv4LiteralAddress(String var0) {
return textToNumericFormatV4(var0) != null;
}
public static boolean isIPv6LiteralAddress(String var0) {
return textToNumericFormatV6(var0) != null;
}
public static byte[] convertFromIPv4MappedAddress(byte[] var0) {
if (isIPv4MappedAddress(var0)) {
byte[] var1 = new byte[4];
System.arraycopy(var0, 12, var1, 0, 4);
return var1;
} else {
return null;
}
}
private static boolean isIPv4MappedAddress(byte[] var0) {
if (var0.length < 16) {
return false;
} else {
return var0[0] == 0 && var0[1] == 0 && var0[2] == 0 && var0[3] == 0 && var0[4] == 0 && var0[5] == 0
&& var0[6] == 0 && var0[7] == 0 && var0[8] == 0 && var0[9] == 0
&& var0[10] == -1 && var0[11] == -1;
}
}
}
|
<gh_stars>10-100
/**
* @module meteoJS/timeline/navigationButtons
*/
import addEventFunctions from '../Events.js';
/**
* Determines how the time is chosen, when a button for time navigation is
* clicked. On "exact" the time in the timeline is only changed if the time
* exists. In all other cases the time will be changed and a suitable timestamp
* is chosen.
*
* @typedef {"exact"|"nearest"|"before"|"later"}
* module:meteoJS/timeline/navigationButtons~findTimeBy
*/
/**
* Options for constructor.
*
* @typedef {Object} module:meteoJS/timeline/navigationButtons~options
* @param {module:meteoJS/timeline.Timeline} timeline - Timeline object.
* @param {module:meteoJS/timeline/navigationButtons~findTimeBy} findTimeBy
* Determines how the time is chosen, when a button is clicked.
* @param {string|undefined} buttonClass - Default button class.
*/
/**
* @typedef {Object} module:meteoJS/timeline/navigationButtons~buttonDefinition
* @param {string|undefined} [buttonClass] - Class.
* @param {"first"|"last"|"prev"|"next"|"nextAllEnabled"|"prevAllEnabled"|"add"|"sub"}
* methodName - Method to execute on timeline, when button is clicked.
* @param {integer} [timeAmount] - Required when methodName is "add" or "sub."
* @param {string} [timeKey] - Required when methodName is "add" or "sub."
* @param {string} [text] - Text for button.
* @param {string} [title] - Title for button.
*/
/**
* @event module:meteoJS/timeline/navigationButtons#click:button
* @type {module:meteoJS/timeline/navigationButtons~buttonDefinition}
* @property {boolean} isTimeChanged - Time changed.
* @property {external:HTMLElement} button - Button.
* @property {"first"|"last"|"prev"|"next"|"nextAllEnabled"|"prevAllEnabled"|"add"|"sub"}
* methodName - Method executed on timeline.
* @property {integer} [timeAmount] - Passed if methodName is "add" or "sub."
* @property {string} [timeKey] - Passed if methodName is "add" or "sub."
*/
/**
* Class to create buttons and insert them into the DOM to navigate
* through the times of the passed timeline.
*
* <pre><code>import NavigationButtons from 'meteojs/timeline/NavigationButtons';</code></pre>
*
* @fires module:meteoJS/timeline/navigationButtons#click:button
*/
export class NavigationButtons {
/**
* @param {module:meteoJS/timeline/navigationButtons~options} [options]
* Options.
*/
constructor({ timeline,
findTimeBy = 'exact',
buttonClass,
} = {}) {
/**
* @type module:meteoJS/timeline.Timeline
* @private
*/
this.timeline = timeline;
/**
* @type module:meteoJS/timeline/navigationButtons~findTimeBy
* @private
*/
this.findTimeBy = findTimeBy;
/**
* @type string|undefined
* @private
*/
this.buttonClass = buttonClass;
}
/**
* Creates button HTMLElements and append them to the passed node.
*
* @param {external:HTMLElement|external:jQuery} node - Node to insert the buttons into it.
* @param {...module:meteoJS/timeline/navigationButtons~buttonDefinition}
* buttons - Button defintions to insert.
*/
insertButtonInto(node, ...buttons) {
buttons.forEach(({ buttonClass,
methodName,
timeAmount,
timeKey,
text,
title } = {}) => {
if (!/^(first|last|prev|next|nextAllEnabled|prevAllEnabled|add|sub)$/
.test(methodName))
return;
if (text === undefined)
switch (methodName) {
case 'first':
text = '|«';
break;
case 'last':
text = '»|';
break;
case 'prev':
text = '«';
break;
case 'next':
text = '»';
break;
case 'nextAllEnabled':
text = '»';
break;
case 'prevAllEnabled':
text = '«';
break;
case 'add':
text = `+${timeAmount}${timeKey}`;
break;
case 'sub':
text = `-${timeAmount}${timeKey}`;
break;
}
let button = document.createElement('button');
button.appendChild(document.createTextNode(text));
button.setAttribute('type', 'button');
if (typeof buttonClass == 'string')
buttonClass.split(' ').map(c => button.classList.add(c));
else if (typeof this.buttonClass == 'string')
this.buttonClass.split(' ').map(c => button.classList.add(c));
if (title !== undefined)
button.setAttribute('title', title);
button.addEventListener('click', () => {
let isTimeChanged = true;
let oldSelectedTime = this.timeline.getSelectedTime();
switch (methodName) {
case 'add':
this.timeline.add(timeAmount, timeKey);
if (this.timeline.getSelectedTime().valueOf() ==
oldSelectedTime.valueOf())
isTimeChanged = false;
break;
case 'sub':
this.timeline.sub(timeAmount, timeKey);
if (this.timeline.getSelectedTime().valueOf() ==
oldSelectedTime.valueOf())
isTimeChanged = false;
break;
default:
this.timeline[methodName]();
}
this.trigger('click:button', {
isTimeChanged,
button,
methodName,
timeAmount,
timeKey
});
});
if (node.jquery)
node[0].appendChild(button);
else
node.appendChild(button);
});
}
}
addEventFunctions(NavigationButtons.prototype);
export default NavigationButtons;
|
/* eslint-disable multiline-ternary */
const path = require('path');
const { execSync } = require('child_process');
const loaderUtils = require('loader-utils');
const validateOptions = require('schema-utils');
const schema = require('./options.json');
const compileXcodeProj = require('./compile-xcodeproj');
const frameworkClass = require('./wrapped-xcodeproj-class');
function findProjectPath(resourcePath) {
if (!resourcePath) {
throw new Error('Could not find the project to compile');
}
const ext = path.extname(resourcePath);
if (ext === '.xcodeproj' || ext === '.xcworkspace') {
return resourcePath;
}
return findProjectPath(path.dirname(resourcePath));
}
function emitFolder(filePath, outputPath) {
const parentFolder = path.dirname(outputPath);
execSync(
`rm -rf "${outputPath}" 2&>/dev/null && mkdir -p "${parentFolder}" && /bin/cp -fR "${filePath}" "${parentFolder}"`,
);
}
module.exports = function loader() {
if (!this.emitFile) {
throw new Error(
'XcodeProj Loader\n\nemitFile is required from module system',
);
}
const options = loaderUtils.getOptions(this) || {};
validateOptions(schema, options, 'XcodeProj Loader');
const context =
options.context ||
this.rootContext ||
(this.options && this.options.context);
// let url = loaderUtils.interpolateName(this, options.name, {
// context,
// content,
// regExp: options.regExp,
// });
const projectPath = findProjectPath(this.resourcePath);
const url = `${path
.basename(projectPath, path.extname(projectPath))
.replace(/-/g, '_')}.framework`;
let outputPath = url;
if (options.outputPath) {
if (typeof options.outputPath === 'function') {
outputPath = options.outputPath(url);
} else {
outputPath = path.posix.join(options.outputPath, url);
}
}
if (options.useRelativePath) {
const issuer = options.context
? context
: this._module && this._module.issuer && this._module.issuer.context;
const relativeUrl =
issuer &&
path
.relative(issuer, projectPath)
.split(path.sep)
.join('/');
const relativePath = relativeUrl && `${path.dirname(relativeUrl)}/`;
// eslint-disable-next-line no-bitwise
if (~relativePath.indexOf('../')) {
outputPath = path.posix.join(outputPath, relativePath, url);
} else {
outputPath = path.posix.join(relativePath, url);
}
}
let publicPath = `__webpack_public_path__ + ${JSON.stringify(outputPath)}`;
if (options.publicPath) {
if (typeof options.publicPath === 'function') {
publicPath = options.publicPath(url);
} else if (options.publicPath.endsWith('/')) {
publicPath = options.publicPath + url;
} else {
publicPath = `${options.publicPath}/${url}`;
}
if (!options.raw) {
publicPath = JSON.stringify(publicPath);
}
}
if (options.emitFile === undefined || options.emitFile) {
if (path.extname(projectPath) === '.framework') {
// we already have a framework so no need to compile the project
emitFolder(
projectPath,
path.join(this._compilation.compiler.outputPath, outputPath),
);
return frameworkClass(publicPath);
}
const callback = this.async();
const cachePath = path.join(__dirname, '../.cache');
compileXcodeProj(this, projectPath, cachePath, error => {
if (error) {
return callback(
new Error(`Error compiling Xcode project: ${error.message}`),
);
}
emitFolder(
path.join(
cachePath,
`./Build/Products/Release/${path
.basename(projectPath, path.extname(projectPath))
.replace(/-/g, '_')}.framework`,
),
path.join(this._compilation.compiler.outputPath, outputPath),
);
return callback(null, frameworkClass(publicPath));
});
return undefined;
}
return frameworkClass(publicPath);
};
module.exports.raw = true;
|
<gh_stars>0
# frozen_string_literal: true
require "open3"
module CommandTesting
RUBY_RUNNER = if defined?(JRUBY_VERSION)
# See https://github.com/jruby/jruby/wiki/Improving-startup-time#bundle-exec
"jruby -G"
else
"bundle exec ruby"
end
refine MSpecEnv do
def run(command, chdir: nil, should_fail: false, env: {})
output, err, status =
Open3.capture3(
env,
command,
chdir: chdir || File.expand_path("../..", __dir__)
)
if ENV["COMMAND_DEBUG"]
puts "\n\nCOMMAND:\n#{command}\n\nOUTPUT:\n#{output}\nERROR:\n#{err}\n"
end
status.success?.should == true unless should_fail
yield status, output, err if block_given?
end
def run_ruby(command, **options, &block)
run("#{RUBY_RUNNER} -rbundler/setup -I#{File.join(__dir__, "../../lib")} #{command}", **options, &block)
end
def run_ruby_next(command, **options, &block)
run("#{RUBY_RUNNER} #{File.join(__dir__, "../../bin/ruby-next")} #{command}", **options, &block)
end
end
end
|
function findAverage(num1, num2) {
return (num1 + num2) / 2;
}
let average = findAverage(10, 15);
console.log(average);
|
package com.android_group10.needy;
import com.google.firebase.database.Exclude;
import com.google.firebase.database.IgnoreExtraProperties;
import java.util.HashMap;
import java.util.Map;
@IgnoreExtraProperties
public class UserRating {
private String userUID;
private int ratingType;
private int ratingValue;
public UserRating(String userUID, int ratingType, int ratingValue){
this.userUID = userUID;
this.ratingType = ratingType; // 1= author rated, 2 = volunteer rated
this.ratingValue = ratingValue; // values 1-5
}
public UserRating(int ratingValue){
this.ratingValue = ratingValue; // values 1-5
}
public UserRating(){}
public String getUserUID() {
return userUID;
}
public void setUserUID(String userUID) {
this.userUID = userUID;
}
public int getRatingType() {
return ratingType;
}
public void setRatingType(int ratingType) {
this.ratingType = ratingType;
}
public int getRatingValue() {
return ratingValue;
}
public void setRatingValue(int ratingValue) {
this.ratingValue = ratingValue;
}
@Override
public String toString() {
return "UserRating{" +
"ratingValue=" + ratingValue +
'}';
}
@Exclude
public Map<String, Integer> toMap() {
HashMap<String, Integer> result = new HashMap<>();
result.put("value", ratingValue);
return result;
}
}
|
#pragma once
#include <stdexcept>
namespace frea {
struct InvalidAxis : std::invalid_argument {
InvalidAxis(): std::invalid_argument("InvalidAxis") {}
};
struct NoValidAxis : std::runtime_error {
NoValidAxis(): std::runtime_error("NoValidAxis") {}
};
struct InvalidFov : std::invalid_argument {
InvalidFov(): std::invalid_argument("InvalidFov") {}
};
struct NoInverseMatrix : std::runtime_error {
NoInverseMatrix(): std::runtime_error("NoInverseMatrix") {}
};
}
|
<reponame>nihei9/vartan
package grammar
import (
"encoding/json"
"fmt"
"os"
"strings"
mlcompiler "github.com/nihei9/maleeni/compiler"
mlspec "github.com/nihei9/maleeni/spec"
verr "github.com/nihei9/vartan/error"
"github.com/nihei9/vartan/spec"
)
type astActionEntry struct {
position int
expansion bool
}
type assocType string
const (
assocTypeNil = assocType("")
assocTypeLeft = assocType("left")
assocTypeRight = assocType("right")
)
const (
precNil = 0
precMin = 1
)
// precAndAssoc represents precedence and associativities of terminal symbols and productions.
// We use the priority of the production to resolve shift/reduce conflicts.
type precAndAssoc struct {
// termPrec and termAssoc represent the precedence of the terminal symbols.
termPrec map[symbolNum]int
termAssoc map[symbolNum]assocType
// prodPrec and prodAssoc represent the precedence and the associativities of the production.
// These values are inherited from the right-most symbols in the RHS of the productions.
prodPrec map[productionNum]int
prodAssoc map[productionNum]assocType
}
func (pa *precAndAssoc) terminalPrecedence(sym symbolNum) int {
prec, ok := pa.termPrec[sym]
if !ok {
return precNil
}
return prec
}
func (pa *precAndAssoc) terminalAssociativity(sym symbolNum) assocType {
assoc, ok := pa.termAssoc[sym]
if !ok {
return assocTypeNil
}
return assoc
}
func (pa *precAndAssoc) productionPredence(prod productionNum) int {
prec, ok := pa.prodPrec[prod]
if !ok {
return precNil
}
return prec
}
func (pa *precAndAssoc) productionAssociativity(prod productionNum) assocType {
assoc, ok := pa.prodAssoc[prod]
if !ok {
return assocTypeNil
}
return assoc
}
const reservedSymbolNameError = "error"
type Grammar struct {
name string
lexSpec *mlspec.LexSpec
skipLexKinds []mlspec.LexKindName
kindAliases map[symbol]string
sym2AnonPat map[symbol]string
productionSet *productionSet
augmentedStartSymbol symbol
errorSymbol symbol
symbolTable *symbolTable
astActions map[productionID][]*astActionEntry
precAndAssoc *precAndAssoc
// recoverProductions is a set of productions having the recover directive.
recoverProductions map[productionID]struct{}
}
type GrammarBuilder struct {
AST *spec.RootNode
errs verr.SpecErrors
}
func (b *GrammarBuilder) Build() (*Grammar, error) {
var specName string
{
errOccurred := false
for _, md := range b.AST.MetaData {
if md.Name != "name" {
continue
}
if len(md.Parameters) != 1 || md.Parameters[0].ID == "" {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrMDInvalidParam,
Detail: fmt.Sprintf("'name' takes just one ID parameter"),
Row: md.Pos.Row,
Col: md.Pos.Col,
})
errOccurred = true
break
}
specName = md.Parameters[0].ID
break
}
if specName == "" && !errOccurred {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrMDMissingName,
})
}
}
symTabAndLexSpec, err := b.genSymbolTableAndLexSpec(b.AST)
if err != nil {
return nil, err
}
prodsAndActs, err := b.genProductionsAndActions(b.AST, symTabAndLexSpec)
if err != nil {
return nil, err
}
if prodsAndActs == nil && len(b.errs) > 0 {
return nil, b.errs
}
pa, err := b.genPrecAndAssoc(symTabAndLexSpec.symTab, prodsAndActs.prods, prodsAndActs.prodPrecs)
if err != nil {
return nil, err
}
syms, err := findUsedAndUnusedSymbols(b.AST)
if err != nil {
return nil, err
}
// When a terminal symbol that cannot be reached from the start symbol has the skip directive,
// the compiler treats its terminal as a used symbol, not unused.
for _, sym := range symTabAndLexSpec.skipSyms {
if _, ok := syms.unusedTerminals[sym]; !ok {
prod := syms.usedTerminals[sym]
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrTermCannotBeSkipped,
Detail: sym,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
continue
}
delete(syms.unusedTerminals, sym)
}
for sym, prod := range syms.unusedProductions {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrUnusedProduction,
Detail: sym,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
}
for sym, prod := range syms.unusedTerminals {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrUnusedTerminal,
Detail: sym,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
}
if len(b.errs) > 0 {
return nil, b.errs
}
symTabAndLexSpec.lexSpec.Name = specName
return &Grammar{
name: specName,
lexSpec: symTabAndLexSpec.lexSpec,
skipLexKinds: symTabAndLexSpec.skip,
kindAliases: symTabAndLexSpec.aliases,
sym2AnonPat: symTabAndLexSpec.sym2AnonPat,
productionSet: prodsAndActs.prods,
augmentedStartSymbol: prodsAndActs.augStartSym,
errorSymbol: symTabAndLexSpec.errSym,
symbolTable: symTabAndLexSpec.symTab,
astActions: prodsAndActs.astActs,
recoverProductions: prodsAndActs.recoverProds,
precAndAssoc: pa,
}, nil
}
type usedAndUnusedSymbols struct {
unusedProductions map[string]*spec.ProductionNode
unusedTerminals map[string]*spec.ProductionNode
usedTerminals map[string]*spec.ProductionNode
}
func findUsedAndUnusedSymbols(root *spec.RootNode) (*usedAndUnusedSymbols, error) {
prods := map[string]*spec.ProductionNode{}
lexProds := map[string]*spec.ProductionNode{}
mark := map[string]bool{}
{
for _, p := range root.Productions {
prods[p.LHS] = p
mark[p.LHS] = false
for _, alt := range p.RHS {
for _, e := range alt.Elements {
if e.ID == "" {
continue
}
mark[e.ID] = false
}
}
}
for _, p := range root.LexProductions {
lexProds[p.LHS] = p
mark[p.LHS] = false
}
start := root.Productions[0]
mark[start.LHS] = true
markUsedSymbols(mark, map[string]bool{}, prods, start)
// We don't have to check the error symbol because the error symbol doesn't have a production.
delete(mark, reservedSymbolNameError)
}
usedTerms := make(map[string]*spec.ProductionNode, len(lexProds))
unusedProds := map[string]*spec.ProductionNode{}
unusedTerms := map[string]*spec.ProductionNode{}
for sym, used := range mark {
if p, ok := prods[sym]; ok {
if used {
continue
}
unusedProds[sym] = p
continue
}
if p, ok := lexProds[sym]; ok {
if used {
usedTerms[sym] = p
} else {
unusedTerms[sym] = p
}
continue
}
return nil, fmt.Errorf("a definition of unused production was not found: %v", sym)
}
return &usedAndUnusedSymbols{
usedTerminals: usedTerms,
unusedProductions: unusedProds,
unusedTerminals: unusedTerms,
}, nil
}
func markUsedSymbols(mark map[string]bool, marked map[string]bool, prods map[string]*spec.ProductionNode, prod *spec.ProductionNode) {
if marked[prod.LHS] {
return
}
for _, alt := range prod.RHS {
for _, e := range alt.Elements {
if e.ID == "" {
continue
}
mark[e.ID] = true
p, ok := prods[e.ID]
if !ok {
continue
}
// Remove a production to avoid inifinite recursion.
marked[prod.LHS] = true
markUsedSymbols(mark, marked, prods, p)
}
}
}
type symbolTableAndLexSpec struct {
symTab *symbolTable
anonPat2Sym map[string]symbol
sym2AnonPat map[symbol]string
lexSpec *mlspec.LexSpec
errSym symbol
skip []mlspec.LexKindName
skipSyms []string
aliases map[symbol]string
}
func (b *GrammarBuilder) genSymbolTableAndLexSpec(root *spec.RootNode) (*symbolTableAndLexSpec, error) {
// Anonymous patterns take precedence over explicitly defined lexical specifications (named patterns).
// Thus anonymous patterns must be registered to `symTab` and `entries` before named patterns.
symTab := newSymbolTable()
entries := []*mlspec.LexEntry{}
// We need to register the reserved symbol before registering others.
var errSym symbol
{
sym, err := symTab.registerTerminalSymbol(reservedSymbolNameError)
if err != nil {
return nil, err
}
errSym = sym
}
anonPat2Sym := map[string]symbol{}
sym2AnonPat := map[symbol]string{}
aliases := map[symbol]string{}
{
knownPats := map[string]struct{}{}
anonPats := []string{}
literalPats := map[string]struct{}{}
for _, prod := range root.Productions {
for _, alt := range prod.RHS {
for _, elem := range alt.Elements {
if elem.Pattern == "" {
continue
}
var pattern string
if elem.Literally {
pattern = mlspec.EscapePattern(elem.Pattern)
} else {
pattern = elem.Pattern
}
if _, ok := knownPats[pattern]; ok {
continue
}
knownPats[pattern] = struct{}{}
anonPats = append(anonPats, pattern)
if elem.Literally {
literalPats[pattern] = struct{}{}
}
}
}
}
for i, p := range anonPats {
kind := fmt.Sprintf("x_%v", i+1)
sym, err := symTab.registerTerminalSymbol(kind)
if err != nil {
return nil, err
}
anonPat2Sym[p] = sym
sym2AnonPat[sym] = p
if _, ok := literalPats[p]; ok {
aliases[sym] = p
}
entries = append(entries, &mlspec.LexEntry{
Kind: mlspec.LexKindName(kind),
Pattern: mlspec.LexPattern(p),
})
}
}
skipKinds := []mlspec.LexKindName{}
skipSyms := []string{}
for _, prod := range root.LexProductions {
if sym, exist := symTab.toSymbol(prod.LHS); exist {
if sym == errSym {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrErrSymIsReserved,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
} else {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDuplicateTerminal,
Detail: prod.LHS,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
}
continue
}
lhsSym, err := symTab.registerTerminalSymbol(prod.LHS)
if err != nil {
return nil, err
}
entry, skip, alias, specErr, err := genLexEntry(prod)
if err != nil {
return nil, err
}
if specErr != nil {
b.errs = append(b.errs, specErr)
continue
}
if skip {
skipKinds = append(skipKinds, mlspec.LexKindName(prod.LHS))
skipSyms = append(skipSyms, prod.LHS)
}
if alias != "" {
aliases[lhsSym] = alias
}
entries = append(entries, entry)
}
checkedFragments := map[string]struct{}{}
for _, fragment := range root.Fragments {
if _, exist := checkedFragments[fragment.LHS]; exist {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDuplicateTerminal,
Detail: fragment.LHS,
Row: fragment.Pos.Row,
Col: fragment.Pos.Col,
})
continue
}
checkedFragments[fragment.LHS] = struct{}{}
entries = append(entries, &mlspec.LexEntry{
Fragment: true,
Kind: mlspec.LexKindName(fragment.LHS),
Pattern: mlspec.LexPattern(fragment.RHS),
})
}
return &symbolTableAndLexSpec{
symTab: symTab,
anonPat2Sym: anonPat2Sym,
sym2AnonPat: sym2AnonPat,
lexSpec: &mlspec.LexSpec{
Entries: entries,
},
errSym: errSym,
skip: skipKinds,
skipSyms: skipSyms,
aliases: aliases,
}, nil
}
func genLexEntry(prod *spec.ProductionNode) (*mlspec.LexEntry, bool, string, *verr.SpecError, error) {
var modes []mlspec.LexModeName
if prod.Directive != nil {
dir := prod.Directive
switch dir.Name {
case "mode":
if len(dir.Parameters) == 0 {
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'mode' directive needs an ID parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
for _, param := range dir.Parameters {
if param.ID == "" {
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'mode' directive needs an ID parameter"),
Row: param.Pos.Row,
Col: param.Pos.Col,
}, nil
}
modes = append(modes, mlspec.LexModeName(param.ID))
}
default:
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidName,
Detail: dir.Name,
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
}
alt := prod.RHS[0]
elem := alt.Elements[0]
var pattern string
var alias string
if elem.Literally {
pattern = mlspec.EscapePattern(elem.Pattern)
alias = elem.Pattern
} else {
pattern = elem.Pattern
}
var skip bool
var push mlspec.LexModeName
var pop bool
if alt.Directive != nil {
dir := alt.Directive
switch dir.Name {
case "skip":
if len(dir.Parameters) > 0 {
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'skip' directive needs no parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
skip = true
case "push":
if len(dir.Parameters) != 1 || dir.Parameters[0].ID == "" {
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'push' directive needs an ID parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
push = mlspec.LexModeName(dir.Parameters[0].ID)
case "pop":
if len(dir.Parameters) > 0 {
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'pop' directive needs no parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
pop = true
case "alias":
if len(dir.Parameters) != 1 || dir.Parameters[0].String == "" {
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'alias' directive needs a string parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
alias = dir.Parameters[0].String
default:
return nil, false, "", &verr.SpecError{
Cause: semErrDirInvalidName,
Detail: dir.Name,
Row: dir.Pos.Row,
Col: dir.Pos.Col,
}, nil
}
}
return &mlspec.LexEntry{
Modes: modes,
Kind: mlspec.LexKindName(prod.LHS),
Pattern: mlspec.LexPattern(pattern),
Push: push,
Pop: pop,
}, skip, alias, nil, nil
}
type productionsAndActions struct {
prods *productionSet
augStartSym symbol
astActs map[productionID][]*astActionEntry
prodPrecs map[productionID]symbol
recoverProds map[productionID]struct{}
}
func (b *GrammarBuilder) genProductionsAndActions(root *spec.RootNode, symTabAndLexSpec *symbolTableAndLexSpec) (*productionsAndActions, error) {
symTab := symTabAndLexSpec.symTab
anonPat2Sym := symTabAndLexSpec.anonPat2Sym
errSym := symTabAndLexSpec.errSym
if len(root.Productions) == 0 {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrNoProduction,
})
return nil, nil
}
prods := newProductionSet()
var augStartSym symbol
astActs := map[productionID][]*astActionEntry{}
prodPrecs := map[productionID]symbol{}
recoverProds := map[productionID]struct{}{}
startProd := root.Productions[0]
augStartText := fmt.Sprintf("%s'", startProd.LHS)
var err error
augStartSym, err = symTab.registerStartSymbol(augStartText)
if err != nil {
return nil, err
}
if augStartSym == errSym {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrErrSymIsReserved,
Row: startProd.Pos.Row,
Col: startProd.Pos.Col,
})
}
startSym, err := symTab.registerNonTerminalSymbol(startProd.LHS)
if err != nil {
return nil, err
}
if startSym == errSym {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrErrSymIsReserved,
Row: startProd.Pos.Row,
Col: startProd.Pos.Col,
})
}
p, err := newProduction(augStartSym, []symbol{
startSym,
})
if err != nil {
return nil, err
}
prods.append(p)
for _, prod := range root.Productions {
sym, err := symTab.registerNonTerminalSymbol(prod.LHS)
if err != nil {
return nil, err
}
if sym.isTerminal() {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDuplicateName,
Detail: prod.LHS,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
}
if sym == errSym {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrErrSymIsReserved,
Row: prod.Pos.Row,
Col: prod.Pos.Col,
})
}
}
for _, prod := range root.Productions {
lhsSym, ok := symTab.toSymbol(prod.LHS)
if !ok {
// All symbols are assumed to be pre-detected, so it's a bug if we cannot find them here.
return nil, fmt.Errorf("symbol '%v' is undefined", prod.LHS)
}
LOOP_RHS:
for _, alt := range prod.RHS {
altSyms := make([]symbol, len(alt.Elements))
for i, elem := range alt.Elements {
var sym symbol
if elem.Pattern != "" {
var pattern string
if elem.Literally {
pattern = mlspec.EscapePattern(elem.Pattern)
} else {
pattern = elem.Pattern
}
var ok bool
sym, ok = anonPat2Sym[pattern]
if !ok {
// All patterns are assumed to be pre-detected, so it's a bug if we cannot find them here.
return nil, fmt.Errorf("pattern '%v' is undefined", pattern)
}
} else {
var ok bool
sym, ok = symTab.toSymbol(elem.ID)
if !ok {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrUndefinedSym,
Detail: elem.ID,
Row: elem.Pos.Row,
Col: elem.Pos.Col,
})
continue LOOP_RHS
}
}
altSyms[i] = sym
}
p, err := newProduction(lhsSym, altSyms)
if err != nil {
return nil, err
}
if _, exist := prods.findByID(p.id); exist {
// Report the line number of a duplicate alternative.
// When the alternative is empty, we report the position of its LHS.
var row int
var col int
if len(alt.Elements) > 0 {
row = alt.Elements[0].Pos.Row
col = alt.Elements[0].Pos.Col
} else {
row = prod.Pos.Row
col = prod.Pos.Col
}
var detail string
{
var b strings.Builder
fmt.Fprintf(&b, "%v →", prod.LHS)
for _, elem := range alt.Elements {
switch {
case elem.ID != "":
fmt.Fprintf(&b, " %v", elem.ID)
case elem.Pattern != "":
fmt.Fprintf(&b, ` "%v"`, elem.Pattern)
}
}
if len(alt.Elements) == 0 {
fmt.Fprintf(&b, " ε")
}
detail = b.String()
}
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDuplicateProduction,
Detail: detail,
Row: row,
Col: col,
})
continue LOOP_RHS
}
prods.append(p)
if alt.Directive != nil {
dir := alt.Directive
switch dir.Name {
case "ast":
if len(dir.Parameters) != 1 || dir.Parameters[0].Tree == nil {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'ast' directive needs a tree parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
})
continue LOOP_RHS
}
param := dir.Parameters[0]
lhsText, ok := symTab.toText(p.lhs)
if !ok || param.Tree.Name != lhsText {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("a name of a tree structure must be the same ID as an LHS of a production; LHS: %v", lhsText),
Row: param.Pos.Row,
Col: param.Pos.Col,
})
continue LOOP_RHS
}
astAct := make([]*astActionEntry, len(param.Tree.Children))
for i, c := range param.Tree.Children {
if c.Position > len(alt.Elements) {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("a position must be less than or equal to the length of an alternativ (%v)", len(alt.Elements)),
Row: c.Pos.Row,
Col: c.Pos.Col,
})
continue LOOP_RHS
}
if c.Expansion {
offset := c.Position - 1
elem := alt.Elements[offset]
if elem.Pattern != "" {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("the expansion symbol cannot be applied to a pattern ($%v: %v)", c.Position, elem.Pattern),
Row: c.Pos.Row,
Col: c.Pos.Col,
})
continue LOOP_RHS
}
elemSym, ok := symTab.toSymbol(elem.ID)
if !ok {
// If the symbol was not found, it's a bug.
return nil, fmt.Errorf("a symbol corresponding to a position ($%v: %v) was not found", c.Position, elem.ID)
}
if elemSym.isTerminal() {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("the expansion symbol cannot be applied to a terminal symbol ($%v: %v)", c.Position, elem.ID),
Row: c.Pos.Row,
Col: c.Pos.Col,
})
continue LOOP_RHS
}
}
astAct[i] = &astActionEntry{
position: c.Position,
expansion: c.Expansion,
}
}
astActs[p.id] = astAct
case "prec":
if len(dir.Parameters) != 1 || dir.Parameters[0].ID == "" {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'prec' directive needs an ID parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
})
continue LOOP_RHS
}
sym, ok := symTab.toSymbol(dir.Parameters[0].ID)
if !ok {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("unknown terminal symbol: %v", dir.Parameters[0].ID),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
})
continue LOOP_RHS
}
if !sym.isTerminal() {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("the symbol must be a terminal: %v", dir.Parameters[0].ID),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
})
continue LOOP_RHS
}
prodPrecs[p.id] = sym
case "recover":
if len(dir.Parameters) > 0 {
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidParam,
Detail: fmt.Sprintf("'recover' directive needs no parameter"),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
})
continue LOOP_RHS
}
recoverProds[p.id] = struct{}{}
default:
b.errs = append(b.errs, &verr.SpecError{
Cause: semErrDirInvalidName,
Detail: fmt.Sprintf("invalid directive name '%v'", dir.Name),
Row: dir.Pos.Row,
Col: dir.Pos.Col,
})
continue LOOP_RHS
}
}
}
}
return &productionsAndActions{
prods: prods,
augStartSym: augStartSym,
astActs: astActs,
prodPrecs: prodPrecs,
recoverProds: recoverProds,
}, nil
}
func (b *GrammarBuilder) genPrecAndAssoc(symTab *symbolTable, prods *productionSet, prodPrecs map[productionID]symbol) (*precAndAssoc, error) {
termPrec := map[symbolNum]int{}
termAssoc := map[symbolNum]assocType{}
{
precN := precMin
for _, md := range b.AST.MetaData {
var assocTy assocType
switch md.Name {
case "left":
assocTy = assocTypeLeft
case "right":
assocTy = assocTypeRight
case "name":
// Since `name` is used for a purpose other than priority, we will ignore it here.
continue
default:
return nil, &verr.SpecError{
Cause: semErrMDInvalidName,
Row: md.Pos.Row,
Col: md.Pos.Col,
}
}
if len(md.Parameters) == 0 {
return nil, &verr.SpecError{
Cause: semErrMDInvalidParam,
Detail: "associativity needs at least one symbol",
Row: md.Pos.Row,
Col: md.Pos.Col,
}
}
for _, p := range md.Parameters {
sym, ok := symTab.toSymbol(p.ID)
if !ok {
return nil, &verr.SpecError{
Cause: semErrMDInvalidParam,
Detail: fmt.Sprintf("'%v' is undefined", p.ID),
Row: p.Pos.Row,
Col: p.Pos.Col,
}
}
if !sym.isTerminal() {
return nil, &verr.SpecError{
Cause: semErrMDInvalidParam,
Detail: fmt.Sprintf("associativity can take only terminal symbol ('%v' is a non-terminal)", p.ID),
Row: p.Pos.Row,
Col: p.Pos.Col,
}
}
termPrec[sym.num()] = precN
termAssoc[sym.num()] = assocTy
}
precN++
}
}
prodPrec := map[productionNum]int{}
prodAssoc := map[productionNum]assocType{}
for _, prod := range prods.getAllProductions() {
term, ok := prodPrecs[prod.id]
if !ok {
mostrightTerm := symbolNil
for _, sym := range prod.rhs {
if !sym.isTerminal() {
continue
}
mostrightTerm = sym
}
term = mostrightTerm
}
if term.isNil() {
continue
}
prec, ok := termPrec[term.num()]
if !ok {
continue
}
assoc, ok := termAssoc[term.num()]
if !ok {
continue
}
prodPrec[prod.num] = prec
prodAssoc[prod.num] = assoc
}
return &precAndAssoc{
termPrec: termPrec,
termAssoc: termAssoc,
prodPrec: prodPrec,
prodAssoc: prodAssoc,
}, nil
}
type Class string
const (
ClassSLR Class = "SLR(1)"
ClassLALR Class = "LALR(1)"
)
type compileConfig struct {
descriptionFileName string
class Class
}
type CompileOption func(config *compileConfig)
func EnableDescription(fileName string) CompileOption {
return func(config *compileConfig) {
config.descriptionFileName = fileName
}
}
func SpecifyClass(class Class) CompileOption {
return func(config *compileConfig) {
config.class = class
}
}
func Compile(gram *Grammar, opts ...CompileOption) (*spec.CompiledGrammar, error) {
config := &compileConfig{
class: ClassLALR,
}
for _, opt := range opts {
opt(config)
}
lexSpec, err := mlcompiler.Compile(gram.lexSpec, mlcompiler.CompressionLevel(mlcompiler.CompressionLevelMax))
if err != nil {
return nil, err
}
kind2Term := make([]int, len(lexSpec.KindNames))
term2Kind := make([]int, gram.symbolTable.termNum.Int())
skip := make([]int, len(lexSpec.KindNames))
for i, k := range lexSpec.KindNames {
if k == mlspec.LexKindNameNil {
kind2Term[mlspec.LexKindIDNil] = symbolNil.num().Int()
term2Kind[symbolNil.num()] = mlspec.LexKindIDNil.Int()
continue
}
sym, ok := gram.symbolTable.toSymbol(k.String())
if !ok {
return nil, fmt.Errorf("terminal symbol '%v' was not found in a symbol table", k)
}
kind2Term[i] = sym.num().Int()
term2Kind[sym.num()] = i
for _, sk := range gram.skipLexKinds {
if k != sk {
continue
}
skip[i] = 1
break
}
}
terms, err := gram.symbolTable.terminalTexts()
if err != nil {
return nil, err
}
kindAliases := make([]string, gram.symbolTable.termNum.Int())
for _, sym := range gram.symbolTable.terminalSymbols() {
kindAliases[sym.num().Int()] = gram.kindAliases[sym]
}
nonTerms, err := gram.symbolTable.nonTerminalTexts()
if err != nil {
return nil, err
}
firstSet, err := genFirstSet(gram.productionSet)
if err != nil {
return nil, err
}
lr0, err := genLR0Automaton(gram.productionSet, gram.augmentedStartSymbol, gram.errorSymbol)
if err != nil {
return nil, err
}
var class string
var automaton *lr0Automaton
switch config.class {
case ClassSLR:
class = "slr"
followSet, err := genFollowSet(gram.productionSet, firstSet)
if err != nil {
return nil, err
}
slr1, err := genSLR1Automaton(lr0, gram.productionSet, followSet)
if err != nil {
return nil, err
}
automaton = slr1.lr0Automaton
case ClassLALR:
class = "lalr"
lalr1, err := genLALR1Automaton(lr0, gram.productionSet, firstSet)
if err != nil {
return nil, err
}
automaton = lalr1.lr0Automaton
}
var tab *ParsingTable
{
b := &lrTableBuilder{
class: config.class,
automaton: automaton,
prods: gram.productionSet,
termCount: len(terms),
nonTermCount: len(nonTerms),
symTab: gram.symbolTable,
sym2AnonPat: gram.sym2AnonPat,
precAndAssoc: gram.precAndAssoc,
}
tab, err = b.build()
if err != nil {
return nil, err
}
desc, err := b.genDescription(tab, gram)
if err != nil {
return nil, err
}
if config.descriptionFileName != "" {
f, err := os.OpenFile(config.descriptionFileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
return nil, err
}
defer f.Close()
d, err := json.Marshal(desc)
if err != nil {
return nil, err
}
f.Write(d)
}
if len(b.conflicts) > 0 {
fmt.Fprintf(os.Stderr, "%v conflicts\n", len(b.conflicts))
}
}
action := make([]int, len(tab.actionTable))
for i, e := range tab.actionTable {
action[i] = int(e)
}
goTo := make([]int, len(tab.goToTable))
for i, e := range tab.goToTable {
goTo[i] = int(e)
}
lhsSyms := make([]int, len(gram.productionSet.getAllProductions())+1)
altSymCounts := make([]int, len(gram.productionSet.getAllProductions())+1)
recoverProds := make([]int, len(gram.productionSet.getAllProductions())+1)
astActEnties := make([][]int, len(gram.productionSet.getAllProductions())+1)
for _, p := range gram.productionSet.getAllProductions() {
lhsSyms[p.num] = p.lhs.num().Int()
altSymCounts[p.num] = p.rhsLen
if _, ok := gram.recoverProductions[p.id]; ok {
recoverProds[p.num] = 1
}
astAct, ok := gram.astActions[p.id]
if !ok {
continue
}
astActEntry := make([]int, len(astAct))
for i, e := range astAct {
if e.expansion {
astActEntry[i] = e.position * -1
} else {
astActEntry[i] = e.position
}
}
astActEnties[p.num] = astActEntry
}
return &spec.CompiledGrammar{
Name: gram.name,
LexicalSpecification: &spec.LexicalSpecification{
Lexer: "maleeni",
Maleeni: &spec.Maleeni{
Spec: lexSpec,
KindToTerminal: kind2Term,
TerminalToKind: term2Kind,
Skip: skip,
KindAliases: kindAliases,
},
},
ParsingTable: &spec.ParsingTable{
Class: class,
Action: action,
GoTo: goTo,
StateCount: tab.stateCount,
InitialState: tab.InitialState.Int(),
StartProduction: productionNumStart.Int(),
LHSSymbols: lhsSyms,
AlternativeSymbolCounts: altSymCounts,
Terminals: terms,
TerminalCount: tab.terminalCount,
NonTerminals: nonTerms,
NonTerminalCount: tab.nonTerminalCount,
EOFSymbol: symbolEOF.num().Int(),
ErrorSymbol: gram.errorSymbol.num().Int(),
ErrorTrapperStates: tab.errorTrapperStates,
RecoverProductions: recoverProds,
},
ASTAction: &spec.ASTAction{
Entries: astActEnties,
},
}, nil
}
|
#!/bin/zsh
# @raycast.schemaVersion 1
# @raycast.title Reload CoreAudio
# @raycast.mode silent
# @raycast.author Maxim Krouk
# @raycast.authorURL https://github.com/maximkrouk
# @raycast.description Reloads CoreAudio.
# @raycast.icon 🎧
# @raycast.packageName System
sudo launchctl stop com.apple.audio.coreaudiod && sudo launchctl start com.apple.audio.coreaudiod
echo "Done"
|
<reponame>tipputhynhean/shawerma<filename>__tests__/cors.test.js
/* eslint-env jest */
'use strict'
const Cors = require('../lib/cors')
const event = require('./event')
process.env.ORIGIN = 'http://localhost:8080, http://127.0.0.1'
test(`Creates an array of allowed origins`, () => {
const expected = ['http://localhost:8080', 'http://127.0.0.1']
expect(Cors.validOrigins()).toEqual(expected)
})
test(`Check if an event has a valid origin`, () => {
expect(Cors.checkOrigin(event)).toEqual(true)
})
test(`Check if an event has a wrong origin`, () => {
event.headers.origin = 'https://wrong.com'
expect(Cors.checkOrigin(event)).toEqual(false)
})
test(`Check if process.env.ORIGIN: '*'`, () => {
process.env.ORIGIN = ''
expect(Cors.checkOrigin(event)).toEqual(true)
})
test(`CORS has to be enabled by default`, () => {
expect(Cors.enabled()).toEqual(true)
})
test(`Check if CORS is disabled`, () => {
// CORS: true is set in the env
// that's why it's a string and not a boolean
process.env.CORS = 'false'
expect(Cors.enabled()).toEqual(false)
})
|
<reponame>simonachkar/Personal
import Typography from 'typography'
import kirkhamTheme from 'typography-theme-kirkham'
import { colors, sizes } from './global'
kirkhamTheme.googleFonts = [
{
name: 'Fira Sans',
styles: ['200', '300', '400', '500', '800', '900', 'black'],
},
]
kirkhamTheme.headerFontFamily = ['Fira Sans', 'sans-serif']
kirkhamTheme.bodyFontFamily = ['Fira Sans', 'sans-serif']
kirkhamTheme.bodyWeight = '400'
kirkhamTheme.overrideThemeStyles = () => ({
'::selection': {
background: colors.primary,
color: 'white',
},
'::-moz-selection': {
background: colors.primary,
color: 'white',
},
a: {
color: 'black',
textDecoration: 'none',
borderBottom: `2px solid ${colors.primary}`,
'&:hover': {
backgroundColor: colors.primary,
color: 'white',
},
},
'h1, h2, h3': {
paddingTop: '0.8rem',
paddingBottom: '0.2rem'
}
})
const typography = new Typography(kirkhamTheme)
export default typography
|
'use strict';
import * as config from "../config";
import Particle from "./particle";
/**
* This class acts as Emitter
*/
class ParticleSystem {
constructor(scene){
console.log('ParticleSystem()');
this.scene = scene;
this.cfg = config.flowers;
this.animations = this.cfg.animations;
this.geo = ParticleSystem.createParticleGeometry();
this.particles = [];
this._fillParticleArray(this.cfg.count);
/*
this._animate('material.opacity', lifeMoment => {
return lifeMoment > 0.5 ? 1.0 : lifeMoment * 2;
});
this._animate('scale', (lifeMoment, p) => {
var sc = p.__scale;
if(lifeMoment > 0.9){
sc *= 1 - (lifeMoment - 0.9) * 10;
}
p.scale.set(sc,sc,sc);
});
*/
this._emit(this.cfg.count / 3);
}
_fillParticleArray(cnt){
var i = this.particles.length;
for(; i < cnt; i++){
var material = this._createMaterial();
var p = new Particle(this.geo, material);
p.onDie();
this.particles.push(p);
this.scene.add(p);
}
console.log(`particle array has ${this.particles.length} entries`);
}
_createMaterial(){
var m = this.cfg.material,
material = new THREE.MeshLambertMaterial();
_.chain(m)
.keys()
.filter( k => { return !_.isFunction(m[k]); })
.each( k => { material[k] = m[k]; });
material.transparent = true;
return material;
}
update(){
this._updateExisting();
this._emit(this.cfg.emitRate);
}
refreshCount(){
this._fillParticleArray(this.cfg.count);
}
_emit(cnt){
var slotIdx = 0,
maxParticles = Math.min(this.cfg.count, this.particles.length);
for(var i = 0; i < cnt; i++){
for(; slotIdx < maxParticles; slotIdx++){
var p = this.particles[slotIdx];
if(!p.isAlive()) break;
}
if(slotIdx < maxParticles){
// console.log('spawn, slot: ', slotIdx);
var p = this.particles[slotIdx];
p.onSpawn(this.cfg);
++slotIdx;
} else {
// console.log('no free particle slot');
break;
}
}
}
_updateExisting(){
var windOpt = this.cfg.wind;
_.chain(this.particles)
.filter( e => {return e.isAlive()} )
.each((p, i) => {
var wind = windOpt.force.clone().multiplyScalar(windOpt.speed);
p.position.add(wind);
p.update();
if(p.life < 0 ){
p.onDie();
} else {
this._applyAnimations(p);
}
});
}
_applyAnimations(p){
_.each(this.animations, anim => {
var lifeMoment = p.life / p.maxLife;
anim.apply(p, lifeMoment);
});
}
static createParticleGeometry(){
var geo = new THREE.Geometry();
v(0,0,0);
v(0,1,0);
v(1,0,0);
v(1,1,0);
f3(0, 2, 1);
f3(3, 1, 2);
geo.computeFaceNormals();
geo.dynamic = false;
return geo;
function v(x,y,z) { geo.vertices.push( new THREE.Vector3(x,y,z)); }
function f3(a,b,c){ geo.faces .push( new THREE.Face3(a,b,c)); }
}
}
export default ParticleSystem;
|
<filename>include/RvizOutput.h
//
// Created by philippe on 08/11/18.
//
#ifndef PROJECT_RVIZOUTPUT_H
#define PROJECT_RVIZOUTPUT_H
#include "EntityOutput.h"
#include <vector>
#include <ros/ros.h>
#include "PerceivedEntity.h"
class RvizOutput : public EntityOutput {
ros::Publisher markerPublisher;
public:
RvizOutput(ros::NodeHandle &nh);
~RvizOutput() override;
void writeEntities(const std::vector<PerceivedEntity> &entities) override;
void writePerceptions(const std::vector<PerceivedEntity> &entities) override;
};
#endif //PROJECT_RVIZOUTPUT_H
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableList;
import io.trino.plugin.hive.acid.AcidOperation;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HivePrincipal;
import io.trino.plugin.hive.metastore.HivePrivilegeInfo;
import io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege;
import io.trino.plugin.hive.metastore.Partition;
import io.trino.plugin.hive.metastore.PartitionWithStatistics;
import io.trino.plugin.hive.metastore.PrincipalPrivileges;
import io.trino.plugin.hive.metastore.Table;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.TableNotFoundException;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.statistics.ColumnStatisticType;
import io.trino.spi.type.Type;
import org.apache.hadoop.hive.metastore.api.DataOperationType;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.function.Function;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.Maps.immutableEntry;
import static io.trino.plugin.hive.HivePartitionManager.extractPartitionValues;
import static java.util.Objects.requireNonNull;
public class HiveMetastoreClosure
{
private final HiveMetastore delegate;
/**
* Do not use this directly. Instead, the closure should be fetched from the current SemiTransactionalHiveMetastore,
* which can be fetched from the current HiveMetadata.
*/
public HiveMetastoreClosure(HiveMetastore delegate)
{
this.delegate = requireNonNull(delegate, "delegate is null");
}
public Optional<Database> getDatabase(String databaseName)
{
return delegate.getDatabase(databaseName);
}
public List<String> getAllDatabases()
{
return delegate.getAllDatabases();
}
private Table getExistingTable(String databaseName, String tableName)
{
return delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
}
public Optional<Table> getTable(String databaseName, String tableName)
{
return delegate.getTable(databaseName, tableName);
}
public Set<ColumnStatisticType> getSupportedColumnStatistics(Type type)
{
return delegate.getSupportedColumnStatistics(type);
}
public PartitionStatistics getTableStatistics(String databaseName, String tableName)
{
return delegate.getTableStatistics(getExistingTable(databaseName, tableName));
}
public Map<String, PartitionStatistics> getPartitionStatistics(String databaseName, String tableName, Set<String> partitionNames)
{
Table table = getExistingTable(databaseName, tableName);
List<Partition> partitions = getExistingPartitionsByNames(table, ImmutableList.copyOf(partitionNames));
return delegate.getPartitionStatistics(table, partitions);
}
public void updateTableStatistics(String databaseName,
String tableName,
AcidTransaction transaction,
Function<PartitionStatistics, PartitionStatistics> update)
{
delegate.updateTableStatistics(databaseName, tableName, transaction, update);
}
public void updatePartitionStatistics(String databaseName,
String tableName,
String partitionName,
Function<PartitionStatistics, PartitionStatistics> update)
{
Table table = getExistingTable(databaseName, tableName);
delegate.updatePartitionStatistics(table, partitionName, update);
}
public void updatePartitionStatistics(String databaseName, String tableName, Map<String, Function<PartitionStatistics, PartitionStatistics>> updates)
{
Table table = getExistingTable(databaseName, tableName);
delegate.updatePartitionStatistics(table, updates);
}
public List<String> getAllTables(String databaseName)
{
return delegate.getAllTables(databaseName);
}
public List<String> getTablesWithParameter(String databaseName, String parameterKey, String parameterValue)
{
return delegate.getTablesWithParameter(databaseName, parameterKey, parameterValue);
}
public List<String> getAllViews(String databaseName)
{
return delegate.getAllViews(databaseName);
}
public void createDatabase(Database database)
{
delegate.createDatabase(database);
}
public void dropDatabase(String databaseName, boolean deleteData)
{
delegate.dropDatabase(databaseName, deleteData);
}
public void renameDatabase(String databaseName, String newDatabaseName)
{
delegate.renameDatabase(databaseName, newDatabaseName);
}
public void setDatabaseOwner(String databaseName, HivePrincipal principal)
{
delegate.setDatabaseOwner(databaseName, principal);
}
public void setTableOwner(String databaseName, String tableName, HivePrincipal principal)
{
delegate.setTableOwner(databaseName, tableName, principal);
}
public void createTable(Table table, PrincipalPrivileges principalPrivileges)
{
delegate.createTable(table, principalPrivileges);
}
public void dropTable(String databaseName, String tableName, boolean deleteData)
{
delegate.dropTable(databaseName, tableName, deleteData);
}
public void replaceTable(String databaseName, String tableName, Table newTable, PrincipalPrivileges principalPrivileges)
{
delegate.replaceTable(databaseName, tableName, newTable, principalPrivileges);
}
public void renameTable(String databaseName, String tableName, String newDatabaseName, String newTableName)
{
delegate.renameTable(databaseName, tableName, newDatabaseName, newTableName);
}
public void commentTable(String databaseName, String tableName, Optional<String> comment)
{
delegate.commentTable(databaseName, tableName, comment);
}
public void commentColumn(String databaseName, String tableName, String columnName, Optional<String> comment)
{
delegate.commentColumn(databaseName, tableName, columnName, comment);
}
public void addColumn(String databaseName, String tableName, String columnName, HiveType columnType, String columnComment)
{
delegate.addColumn(databaseName, tableName, columnName, columnType, columnComment);
}
public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName)
{
delegate.renameColumn(databaseName, tableName, oldColumnName, newColumnName);
}
public void dropColumn(String databaseName, String tableName, String columnName)
{
delegate.dropColumn(databaseName, tableName, columnName);
}
public Optional<Partition> getPartition(String databaseName, String tableName, List<String> partitionValues)
{
return delegate.getTable(databaseName, tableName)
.flatMap(table -> delegate.getPartition(table, partitionValues));
}
public Optional<List<String>> getPartitionNamesByFilter(
String databaseName,
String tableName,
List<String> columnNames,
TupleDomain<String> partitionKeysFilter)
{
return delegate.getPartitionNamesByFilter(databaseName, tableName, columnNames, partitionKeysFilter);
}
private List<Partition> getExistingPartitionsByNames(Table table, List<String> partitionNames)
{
Map<String, Partition> partitions = delegate.getPartitionsByNames(table, partitionNames).entrySet().stream()
.map(entry -> immutableEntry(entry.getKey(), entry.getValue().orElseThrow(() ->
new PartitionNotFoundException(table.getSchemaTableName(), extractPartitionValues(entry.getKey())))))
.collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue));
return partitionNames.stream()
.map(partitions::get)
.collect(toImmutableList());
}
public Map<String, Optional<Partition>> getPartitionsByNames(String databaseName, String tableName, List<String> partitionNames)
{
return delegate.getTable(databaseName, tableName)
.map(table -> delegate.getPartitionsByNames(table, partitionNames))
.orElseGet(() -> partitionNames.stream()
.collect(toImmutableMap(name -> name, name -> Optional.empty())));
}
public void addPartitions(String databaseName, String tableName, List<PartitionWithStatistics> partitions)
{
delegate.addPartitions(databaseName, tableName, partitions);
}
public void dropPartition(String databaseName, String tableName, List<String> parts, boolean deleteData)
{
delegate.dropPartition(databaseName, tableName, parts, deleteData);
}
public void alterPartition(String databaseName, String tableName, PartitionWithStatistics partition)
{
delegate.alterPartition(databaseName, tableName, partition);
}
public void createRole(String role, String grantor)
{
delegate.createRole(role, grantor);
}
public void dropRole(String role)
{
delegate.dropRole(role);
}
public Set<String> listRoles()
{
return delegate.listRoles();
}
public void grantRoles(Set<String> roles, Set<HivePrincipal> grantees, boolean adminOption, HivePrincipal grantor)
{
delegate.grantRoles(roles, grantees, adminOption, grantor);
}
public void revokeRoles(Set<String> roles, Set<HivePrincipal> grantees, boolean adminOption, HivePrincipal grantor)
{
delegate.revokeRoles(roles, grantees, adminOption, grantor);
}
public Set<RoleGrant> listGrantedPrincipals(String role)
{
return delegate.listGrantedPrincipals(role);
}
public Set<RoleGrant> listRoleGrants(HivePrincipal principal)
{
return delegate.listRoleGrants(principal);
}
public void grantTablePrivileges(String databaseName, String tableName, String tableOwner, HivePrincipal grantee, HivePrincipal grantor, Set<HivePrivilege> privileges, boolean grantOption)
{
delegate.grantTablePrivileges(databaseName, tableName, tableOwner, grantee, grantor, privileges, grantOption);
}
public void revokeTablePrivileges(String databaseName, String tableName, String tableOwner, HivePrincipal grantee, HivePrincipal grantor, Set<HivePrivilege> privileges, boolean grantOption)
{
delegate.revokeTablePrivileges(databaseName, tableName, tableOwner, grantee, grantor, privileges, grantOption);
}
public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String tableName, Optional<String> tableOwner, Optional<HivePrincipal> principal)
{
return delegate.listTablePrivileges(databaseName, tableName, tableOwner, principal);
}
public long openTransaction()
{
return delegate.openTransaction();
}
public void commitTransaction(long transactionId)
{
delegate.commitTransaction(transactionId);
}
public void abortTransaction(long transactionId)
{
delegate.abortTransaction(transactionId);
}
public void sendTransactionHeartbeat(long transactionId)
{
delegate.sendTransactionHeartbeat(transactionId);
}
public void acquireSharedReadLock(String queryId, long transactionId, List<SchemaTableName> fullTables, List<HivePartition> partitions)
{
delegate.acquireSharedReadLock(queryId, transactionId, fullTables, partitions);
}
public String getValidWriteIds(List<SchemaTableName> tables, long currentTransactionId)
{
return delegate.getValidWriteIds(tables, currentTransactionId);
}
public Optional<String> getConfigValue(String name)
{
return delegate.getConfigValue(name);
}
public long allocateWriteId(String dbName, String tableName, long transactionId)
{
return delegate.allocateWriteId(dbName, tableName, transactionId);
}
public void acquireTableWriteLock(String queryId, long transactionId, String dbName, String tableName, DataOperationType operation, boolean isPartitioned)
{
delegate.acquireTableWriteLock(queryId, transactionId, dbName, tableName, operation, isPartitioned);
}
public void updateTableWriteId(String dbName, String tableName, long transactionId, long writeId, OptionalLong rowCountChange)
{
delegate.updateTableWriteId(dbName, tableName, transactionId, writeId, rowCountChange);
}
public void alterPartitions(String dbName, String tableName, List<Partition> partitions, long writeId)
{
delegate.alterPartitions(dbName, tableName, partitions, writeId);
}
public void addDynamicPartitions(String dbName, String tableName, List<String> partitionNames, long transactionId, long writeId, AcidOperation operation)
{
delegate.addDynamicPartitions(dbName, tableName, partitionNames, transactionId, writeId, operation);
}
public void alterTransactionalTable(Table table, long transactionId, long writeId, PrincipalPrivileges principalPrivileges)
{
delegate.alterTransactionalTable(table, transactionId, writeId, principalPrivileges);
}
}
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=C:\flutter"
export "FLUTTER_APPLICATION_PATH=D:\WORK\_2021\Flutter\practical-flutter\ch_05+06\flutter_book"
export "FLUTTER_TARGET=lib\main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build\ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
export "DART_OBFUSCATION=false"
export "TRACK_WIDGET_CREATION=false"
export "TREE_SHAKE_ICONS=false"
export "PACKAGE_CONFIG=.packages"
|
<reponame>PrideFuu/Beavis<filename>functions/time.js
module.exports = function (bot) {
timeSince = function (timestamp, suppressAgo) {
ago = typeof suppressAgo !== 'undefined' ? suppressAgo : false;
var message = moment.utc(timestamp).fromNow(suppressAgo);
if (moment().isAfter(moment.utc(timestamp).add(24, 'hours'))) {
message += ' (' + moment.utc(timestamp).calendar() + ')';
}
return message;
};
timeUntil = function (timestamp, prefixMessage) {
var message = moment.utc(timestamp).fromNow();
if (prefixMessage !== undefined) {
return '(' + prefixMessage + ' ' + message + ')';
}
else {
return '(' + message + ')';
}
};
secondsSince = function (timestamp) {
var now = moment.utc();
timestamp = moment.utc(timestamp);
return now.diff(timestamp, 'seconds');
};
};
|
/*
* http://suyati.github.io/line-control
* LineControl 1.1.0
* Copyright (C) 2014, Suyati Technologies
* This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
(function( $ ) {
var methods = {
saveSelection: function() {
//Function to save the text selection range from the editor
$(this).data('editor').focus();
if (window.getSelection) {
sel = window.getSelection();
if (sel.getRangeAt && sel.rangeCount)
$(this).data('currentRange', sel.getRangeAt(0));
}
else {
if (document.selection && document.selection.createRange)
$(this).data('currentRange',document.selection.createRange());
else
$(this).data('currentRange', null);
}
},
restoreSelection: function(text,mode) {
//Function to restore the text selection range from the editor
var node;
typeof text !== 'undefined' ? text : false;
typeof mode !== 'undefined' ? mode : "";
var range = $(this).data('currentRange');
if (range) {
if (window.getSelection) {
if (text) {
range.deleteContents();
if (mode == "html") {
var el = document.createElement("div");
el.innerHTML = text;
var frag = document.createDocumentFragment(), node, lastNode;
while (node = el.firstChild) {
lastNode = frag.appendChild(node);
}
range.insertNode(frag);
}
else {
range.insertNode(document.createTextNode(text));
}
}
sel = window.getSelection();
sel.removeAllRanges();
sel.addRange(range);
}
else if (document.selection && range.select) {
range.select();
if (text) {
if (mode == "html")
range.pasteHTML(text);
else
range.text = text;
}
}
}
},
restoreIESelection:function() {
//Function to restore the text selection range from the editor in IE
var range = $(this).data('currentRange');
if (range) {
if (window.getSelection) {
sel = window.getSelection();
sel.removeAllRanges();
sel.addRange(range);
}
else if (document.selection && range.select) {
range.select();
}
}
},
init: function(options)
{
if ($(this).attr("id") === undefined || $(this).attr("id") === "") {
$(this).attr("id", Date.now());
}
//Número máximo de palabras y gráficos permitidos
var maxWords = 500, maxGraps = 5;
var menuItems = {
'screeneffects':true,
'togglescreen':{ "text": "Ampliar/reducir visualizador",
"icon": "fa fa-arrows-alt",
"tooltip": "Ampliar/reducir visualizador",
"commandname":null,
"custom":function(button, parameters){
$(this).data("editor").parent().toggleClass('fullscreen');
var statusdBarHeight=0;
if($(this).data("statusBar").length)
{
statusdBarHeight = $(this).data("statusBar").height();
}
if($(this).data("editor").parent().hasClass('fullscreen'))
$(this).data("editor").css({"height":$(this).data("editor").parent().height()-($(this).data("menuBar").height()+statusdBarHeight)-13});
else
$(this).data("editor").css({"height":""});
}},
'params': {"obj": null},
};
var menuGroups = {'screeneffects': ['togglescreen'] };
var settings = $.extend({'togglescreen':true},options);
var containerDiv = $("<div/>",{ class : "row-fluid Editor-container" });
var $this = $(this).hide();
$this.after(containerDiv);
var menuBar = $( "<div/>",{ id : "menuBarDiv_" + $(this).attr("id"),
class : "row-fluid line-control-menu-bar"
}).prependTo(containerDiv);
var editor = $( "<div/>",{ id: "editorDiv_" + $(this).attr("id"),
class : "Editor-editor",
css : {overflow: "auto"},
contenteditable:"false"
}).appendTo(containerDiv);
var statusBar = $("<div/>", { id : "statusbar_" + $(this).attr("id"),
class: "row-fluid line-control-status-bar",
unselectable:"on",
}).appendTo(containerDiv);
$(this).data("menuBar", menuBar);
$(this).data("editor", editor);
$(this).data("statusBar", statusBar);
$(this).data("maxWords", maxWords);
$(this).data("maxGraps", maxGraps);
var editor_Content = this;
function showStatusBar() {
var maxwrd = $(editor_Content).data("maxWords");
var maxgrp = $(editor_Content).data("maxGraps");
var cntwrd = methods.getWordCount.apply(editor_Content);
var cntgrp = methods.getGrapCount.apply(editor_Content);
$(editor_Content).data("statusBar").html('<div class="label" style="background:' + (cntgrp > maxgrp ? 'red' : '#bd9e56') + '">Gráficos: ' + cntgrp + '/' + maxgrp + '</div>');
$(editor_Content).data("statusBar").append('<div class="label" style="background:' + (cntwrd > maxwrd ? 'red' : '#bd9e56') + '">Palabras: ' + cntwrd + '/' + maxwrd + '</div>');
reviewButtons($(editor_Content).data("name"));
}
for(var item in menuItems){
if(!settings[item] ){ //if the display is not set to true for the button in the settings.
if(settings[item] in menuGroups){
for(var each in menuGroups[item]){
settings[each] = false;
}
}
continue;
}
if(item in menuGroups){
var group = $("<div/>",{class:"btn-group"});
for(var index=0;index<menuGroups[item].length;index++){
var value = menuGroups[item][index];
if(settings[value]){
var menuItem = methods.createMenuItem.apply(this,[menuItems[value], settings[value], true]);
group.append(menuItem);
}
settings[value] = false;
}
menuBar.append(group);
}
else{
var menuItem = methods.createMenuItem.apply(this,[menuItems[item], settings[item],true]);
menuBar.append(menuItem);
}
}
//For contextmenu
editor.bind("contextmenu", function(e){
if($('#context-menu').length)
$('#context-menu').remove();
var cMenu = $('<div/>',{id:"context-menu"
}).css({position:"absolute", top:e.pageY, left: e.pageX, "z-index":9999
}).click(function(event){
event.stopPropagation();
});
var cMenuUl = $('<ul/>',{ class:"dropdown-menu on","role":"menu"});
e.preventDefault();
if($(e.target).is('img')) {
methods.createImageContext.apply(this,[e,cMenuUl]);
cMenuUl.appendTo(cMenu);
cMenu.appendTo('body');
}
});
},
createMenuItem: function(itemSettings, options, returnElement){
//Function to perform multiple actions.supplied arguments: itemsettings-list of buttons and button options, options: options for select input, returnelement: boolean.
//1.Create Select Options using Bootstrap Dropdown.
//2.Create modal dialog using bootstrap options
//3.Create menubar buttons binded with corresponding event actions
typeof returnElement !== 'undefined' ? returnElement : false;
if(itemSettings["select"]){
var menuWrapElement = $("<div/>", {class:"btn-group"});
var menuElement = $("<ul/>", {class:"dropdown-menu"});
menuWrapElement.append($('<a/>',{
class:"btn btn-default dropdown-toggle",
"data-toggle":"dropdown",
"href":"javascript:void(0)",
"title":itemSettings["tooltip"]
}).html(itemSettings["default"]).append($("<span/>",{class:"caret"})).mousedown(function(e){
e.preventDefault();
}));
$.each(options,function(i,v){
var option = $('<li/>')
$("<a/>",{
tabindex : "-1",
href : "javascript:void(0)"
}).html(i).appendTo(option);
option.click(function(){
$(this).parent().parent().data("value", v);
$(this).parent().parent().trigger("change")
});
menuElement.append(option);
});
var action = "change";
}
else if(itemSettings["modal"]){
var menuWrapElement = methods.createModal.apply(this,[itemSettings["modalId"], itemSettings["modalHeader"], itemSettings["modalBody"], itemSettings["onSave"]]);
var menuElement = $("<i/>");
if(itemSettings["icon"])
menuElement.addClass(itemSettings["icon"]);
else
menuElement.html(itemSettings["text"]);
menuWrapElement.append(menuElement);
menuWrapElement.mousedown(function(obj, methods, beforeLoad){
return function(e){
e.preventDefault();
methods.saveSelection.apply(obj);
if(beforeLoad){
beforeLoad.apply(obj);
}
}
}(this, methods,itemSettings["beforeLoad"]));
menuWrapElement.attr('title', itemSettings['tooltip']);
return menuWrapElement;
}
else{
var menuWrapElement = $("<a/>",{href:'javascript:void(0)', class:'btn btn-default'});
var menuElement = $("<i/>");
if(itemSettings["icon"])
menuElement.addClass(itemSettings["icon"]);
else
menuElement.html(itemSettings["text"]);
var action = "click";
}
if(itemSettings["custom"]){
menuWrapElement.bind(action, (function(obj, params){
return function(){
methods.saveSelection.apply(obj);
itemSettings["custom"].apply(obj, [$(this), params]);
}
})(this, itemSettings['params']));
}
else{
menuWrapElement.data("commandName", itemSettings["commandname"]);
menuWrapElement.data("editor", $(this).data("editor"));
menuWrapElement.bind(action, function(){ methods.setTextFormat.apply(this) });
}
menuWrapElement.attr('title', itemSettings['tooltip']);
menuWrapElement.css('cursor', 'pointer');
menuWrapElement.append(menuElement);
if(returnElement)
return menuWrapElement;
$(this).data("menuBar").append(menuWrapElement);
},
setTextFormat: function(){
//Function to run the text formatting options using execCommand.
methods.setStyleWithCSS.apply(this);
document.execCommand($(this).data("commandName"), false, $(this).data("value") || null);
$(this).data("editor").focus();
return false;
},
getSource: function(button, params){
//Function to show the html source code to the editor and toggle the text display.
var flag = 0;
if(button.data('state')){
flag = 1;
button.data('state', null);
}
else
button.data('state', 1);
$(this).data("source-mode", !flag);
var editor = $(this).data('editor');
var content;
if(flag==0){ //Convert text to HTML
content = document.createTextNode(editor.html());
editor.empty();
editor.attr('contenteditable', false);
preElement = $("<pre/>",{
contenteditable: true
});
preElement.append(content);
editor.append(preElement);
button.parent().siblings().hide();
button.siblings().hide();
}
else{
var html = editor.children().first().text();
editor.html(html);
editor.attr('contenteditable', true);
button.parent().siblings().show();
button.siblings().show();
}
},
countGraps: function(){
//Function to count the number of graphs recursively as the text grows in the editor.
var texto = $(this).data("editor").html();
var count = (texto.match(/<img src="/g) || []).length;
return count;
},
countWords: function(node){
//Function to count the number of words recursively as the text grows in the editor.
var count = 0;
var textNodes = node.contents().filter(function(){
return (this.nodeType == 3);
});
for (var i=0; i<textNodes.length; i++){
text = textNodes[i].textContent.replace(/[áéíóúüñ]/g,'').replace(/[^-\w\s]/gi, ' ');
count += $.trim(text).split(/\s+/).length;
}
node.children().each(function(){
count += methods.countWords.apply(this, [$(this)]);
});
return count;
},
countChars: function(node){
//Function to count the number of characters recursively as the text grows in the editor.
var count = 0;
var textNodes = node.contents().filter(function(){
return (this.nodeType == 3);
});
for (var i=0; i<textNodes.length; i++){
text = textNodes[i].textContent;
count += text.length;
}
node.children().each(function(){
count += methods.countChars.apply(this, [$(this)]);
});
return count;
},
getGrapCount: function(){
//Function to return the word count of the text in the editor
return methods.countGraps.apply(this, [$(this).data("editor")]);
},
getWordCount: function(){
if ($(this).data("editor").text().trim() == '') return 0;
//Function to return the word count of the text in the editor
return methods.countWords.apply(this, [$(this).data("editor")]);
},
getCharCount: function(){
//Function to return the character count of the text in the editor
return methods.countChars.apply(this, [$(this).data("editor")]);
},
rgbToHex: function(rgb){
//Function to convert the rgb color codes into hexadecimal code
rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/);
return "#" +
("0" + parseInt(rgb[1],10).toString(16)).slice(-2) +
("0" + parseInt(rgb[2],10).toString(16)).slice(-2) +
("0" + parseInt(rgb[3],10).toString(16)).slice(-2);
},
showMessage: function(target,message){
//Function to show the error message. Supplied arguments:target-div id, message-message text to be displayed.
var errorDiv=$('<div/>',{ class:"alert alert-danger"}
).append($('<button/>',{
type:"button",
class:"close",
"data-dismiss":"alert",
html:"x"
})).append($('<span/>').html(message));
errorDiv.appendTo($('#'+target));
setTimeout(function() { $('.alert').alert('close'); }, 3000);
},
getText: function(){
//Function to get the source code.
if(!$(this).data("source-mode"))
return $(this).data("editor").html();
else
return $(this).data("editor").children().first().text();
},
setText: function(text){
//Function to set the source code
if(!$(this).data("source-mode"))
$(this).data("editor").html(text);
else
$(this).data("editor").children().first().text(text);
},
setStyleWithCSS:function(){
if(navigator.userAgent.match(/MSIE/i)){ //for IE10
try {
Editor.execCommand("styleWithCSS", 0, false);
} catch (e) {
try {
Editor.execCommand("useCSS", 0, true);
} catch (e) {
try {
Editor.execCommand('styleWithCSS', false, false);
} catch (e) {
}
}
}
}
else{
document.execCommand("styleWithCSS", null, true);
}
},
setName: function(name){
$(this).data("name",name);
},
getName: function(){
return $(this).data("name");
},
setMaxWords: function(maxWords){
$(this).data("maxWords",maxWords);
},
getMaxWords: function(){
return $(this).data("maxWords");
},
setMaxGraps: function(maxGraps){
$(this).data("maxGraps",maxGraps);
},
getMaxGraps: function(){
return $(this).data("maxGraps");
}
}
$.fn.Viewer = function(method){
if ( methods[method] ) {
return methods[method].apply( this, Array.prototype.slice.call( arguments, 1 ));
} else if ( typeof method === 'object' || ! method ) {
return methods.init.apply( this, arguments );
} else {
$.error( 'El método "' + method + '" no existe en el visualizador' );
}
};
})( jQuery );
|
package com.linwei.buriedpointlibrary.template.intent;
import com.linwei.buriedpointlibrary.config.Constant;
import com.linwei.buriedpointlibrary.utils.ProcessorUtils;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.TypeName;
import java.util.List;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
/**
* @Author: WS
* @Time: 2020/4/30
* @Description: Activity跳转起始模板生成
*/
public class ActivityEnterGenerator implements ActivityGenerator {
/**
* @param clazzType 类
* @param variableElements 字段
* @param processorUtils 工具
* @param processingEnv
*/
@Override
public void generator(String clazzType,
List<VariableElement> variableElements,
ExecutableElement executableElement,
ProcessorUtils processorUtils,
ProcessingEnvironment processingEnv) {
MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(Constant.METHOD_JUMP_ACTIVITY)
.addModifiers(Modifier.PUBLIC)
.returns(void.class);
methodBuilder.addParameter(Object.class, "context");
methodBuilder.addStatement("android.content.Intent intent = new android.content.Intent()");
for (VariableElement element : variableElements) {
//Element 只是一种语言元素,本身并不包含信息,所以我们这里获取TypeMirror
TypeMirror typeMirror = element.asType();
//获取注解字段变量类型
TypeName typeName = TypeName.get(typeMirror);
//获取注解字段变量名称
String fieldName = element.getSimpleName().toString();
methodBuilder.addParameter(typeName, fieldName);
methodBuilder.addStatement("intent.putExtra(\"" + fieldName + "\"," + fieldName + ")");
}
if (!processorUtils.isNotEmpty(clazzType)) {
processingEnv.getMessager().printMessage(
Diagnostic.Kind.ERROR,
"IntentClass注解定义不明确,无法进行界面跳转!"
);
}
methodBuilder.addStatement("intent.setClass((android.content.Context)context, " + clazzType + ".class)");
methodBuilder.addStatement("((android.content.Context)context).startActivity(intent)");
TypeElement typeElement = (TypeElement)
executableElement.getEnclosingElement();
processorUtils.writeToFile(clazzType + Constant.JUMP_SUFFIX,
processorUtils.getPackageName(typeElement),
methodBuilder.build(), processingEnv, null);
}
}
|
import React, { useState } from 'react';
const App = () => {
const [selected, setSelected] = useState([]);
const handleClick = (cell) => {
// toggle cell
const newSelected = [...selected];
const index = newSelected.indexOf(cell);
if (index === -1) {
newSelected.push(cell);
} else {
newSelected.splice(index, 1);
}
setSelected(newSelected);
};
return (
<div className="grid">
<div
className={`grid-cell ${selected.includes(1) && 'selected'}`}
onClick={() => handleClick(1)}
>
1
</div>
<div
className={`grid-cell ${selected.includes(2) && 'selected'}`}
onClick={() => handleClick(2)}
>
2
</div>
<div
className={`grid-cell ${selected.includes(3) && 'selected'}`}
onClick={() => handleClick(3)}
>
3
</div>
<div
className={`grid-cell ${selected.includes(4) && 'selected'}`}
onClick={() => handleClick(4)}
>
4
</div>
<div
className={`grid-cell ${selected.includes(5) && 'selected'}`}
onClick={() => handleClick(5)}
>
5
</div>
<div
className={`grid-cell ${selected.includes(6) && 'selected'}`}
onClick={() => handleClick(6)}
>
6
</div>
<div
className={`grid-cell ${selected.includes(7) && 'selected'}`}
onClick={() => handleClick(7)}
>
7
</div>
<div
className={`grid-cell ${selected.includes(8) && 'selected'}`}
onClick={() => handleClick(8)}
>
8
</div>
<div
className={`grid-cell ${selected.includes(9) && 'selected'}`}
onClick={() => handleClick(9)}
>
9
</div>
</div>
);
};
export default App;
|
<filename>app/scripts/controllers/children.js<gh_stars>0
'use strict';
/**
* @ngdoc function
* @name magosApp.controller:ChildrenCtrl
* @description
* # ChildrenCtrl
* Controller of the magosApp
*/
angular.module('magosApp')
.controller('ChildrenCtrl', function ($scope, $location) {
$scope.children = [
{
id:1,
name: 'Carolina',
assignedCount: 3,
pendingCount: 4,
},
{
id:2,
name: 'Diego',
assignedCount: 6,
pendingCount: 1,
},
];
$scope.showChild = function (id) {
$location.path('/child/'+ id);
};
});
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/componentDyyTest/componentDyyTest.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/componentDyyTest/componentDyyTest.framework"
fi
|
#!/bin/bash
set -e
tag=$(git describe --tags --abbrev=0)
platforms=$(echo "darwin-amd64,linux-386,linux-arm,linux-amd64,linux-arm64,windows-386,windows-amd64" | tr "," "\n")
include="dist/*"
if [ -n "${GH_EXT_BUILD_SCRIPT}" ]; then
echo "invoking build script override ${GH_EXT_BUILD_SCRIPT}"
./${GH_EXT_BUILD_SCRIPT} $tag || exit $?
else
for p in $platforms; do
goos=$(echo $p | sed 's/-.*//')
goarch=$(echo $p | sed 's/.*-//')
ext=""
if [[ "${goos}" == "windows" ]]; then
ext=".exe"
fi
GOOS=${goos} GOARCH=${goarch} go build -o "dist/${goos}-${goarch}${ext}"
done
fi
ls -A dist >/dev/null || (echo "no files found in dist/" && exit 1)
if [ -n "${GPG_FINGERPRINT}" ]; then
for f in $(ls dist); do
shasum -a 256 dist/$f >> checksums.txt
done
gpg --output checksums.txt.sig --detach-sign checksums.txt
include="dist/* checksums*"
fi
prerelease=""
if [[ "${tag}" =~ .*-.* ]]; then
prerelease="-p"
fi
gh api repos/$GITHUB_REPOSITORY/releases/generate-notes \
-f tag_name="${tag}" -q .body > CHANGELOG.md
gh release create $tag $prerelease --notes-file CHANGELOG.md $include
|
#!/bin/bash
ADDITIONAL_KOPANO_PACKAGES=${ADDITIONAL_KOPANO_PACKAGES:-""}
set -eu # unset variables are errors & non-zero return values exit the whole script
[ "$DEBUG" ] && set -x
if [ ! -e /kopano/"$SERVICE_TO_START".py ]; then
echo "Invalid service specified: $SERVICE_TO_START" | ts
exit 1
fi
echo "Configure service '$SERVICE_TO_START'" | ts
/usr/bin/python3 /kopano/"$SERVICE_TO_START".py
# allow helper commands given by "docker-compose run"
if [ $# -gt 0 ]; then
exec "$@"
exit
fi
CONFIG_JSON="/usr/share/kopano-kweb/www/config/kopano/meet.json"
echo "Updating $CONFIG_JSON"
for setting in $(compgen -A variable KCCONF_MEET); do
setting2=${setting#KCCONF_MEET_}
# dots in setting2 need to be escaped to not be handled as separate entities in the json file
case ${!setting} in
true|TRUE|false|FALSE)
jq ".\"${setting2//_/\".\"}\" = ${!setting}" $CONFIG_JSON | sponge $CONFIG_JSON
;;
*)
jq ".\"${setting2//_/\".\"}\" = \"${!setting}\"" $CONFIG_JSON | sponge $CONFIG_JSON
;;
esac
done
# enable Kopano WebApp in the app switcher
jq '.apps += {"enabled": ["kopano-webapp"]}' $CONFIG_JSON | sponge $CONFIG_JSON
#cat $CONFIG_JSON
sed -i s/\ *=\ */=/g /etc/kopano/kwebd.cfg
export tls=no
# shellcheck disable=SC2046
export $(grep -v '^#' /etc/kopano/kwebd.cfg | xargs -d '\n')
# cleaning up env variables
unset "${!KCCONF_@}"
exec kopano-kwebd serve
|
# training the neural network on the training set and evaluate it on the evaluation set
# simly run this script to train the network, no need to change anything
# the training parameters, which you may tune, are specified in nn_config.yml
# the model structure, which you may optimize, is specified in net_model/alex_net.py
# tune your neural network on the evaluation set, NOT on the test set
# you should only test the neural network on the test set once to evaluate its robustness
# the best performing model will be saved in net_weights/exp_name
# this pretrained model will be used by nn_test.py and nn_detect.py
import time
import torch
from torchvision import transforms, datasets
from torch.utils.data import DataLoader
from nn_config import NNState
class Train:
def __init__(self, params_dict=None):
self.net_dict = NNState('train', params=params_dict)
# Data Augmentation operations
img_transforms = transforms.Compose(
[transforms.RandomRotation((-30, 30)),
transforms.RandomResizedCrop((64, 64), scale=(0.7, 1.0)),
transforms.ColorJitter(brightness=0.4, contrast=0.3,
saturation=0.3, hue=0.3),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])])
self.train_data = datasets.ImageFolder('./dataset_test/train',
transform=img_transforms)
print(self.train_data.class_to_idx)
self.eval_data = datasets.ImageFolder('./dataset_test/eval',
transform=img_transforms)
def train(self):
train_loader = DataLoader(dataset=self.train_data,
batch_size=self.net_dict.batch_size,
shuffle=True, num_workers=4,
drop_last=True)
n_batch = len(train_loader)
for epoch_idx in range(self.net_dict.last_epoch + 1,
self.net_dict.n_epochs):
train_loss_buff = torch.Tensor()
train_loss_buff = self.net_dict.to_device(train_loss_buff)
print('\nEpoch [%d/%d]:' % (epoch_idx, self.net_dict.n_epochs))
t_start = time.time()
# update the network
for i, batch in enumerate(train_loader):
self.net_dict.optimiser.zero_grad()
inputs, labels = batch[0], batch[1]
inputs = self.net_dict.to_device(inputs)
labels = self.net_dict.to_device(labels)
# Forward
labels_hat = self.net_dict.net.forward(inputs)
loss = self.net_dict.criterion(labels_hat, labels)
# Backward
loss.backward()
# Optimise
self.net_dict.optimiser.step()
train_loss_buff = torch.cat((train_loss_buff,
loss.reshape(1, 1)), 0)
if (i + 1) % 10 == 0:
print('[%d/%d], Itr [%d/%d], Loss: %.4f'
% (epoch_idx, self.net_dict.n_epochs, i,
n_batch, loss.item()))
# current_lr = self.optimiser.param_groups[0]['lr']
self.net_dict.lr_scheduler.step()
avg_train_loss = torch.mean(train_loss_buff)
print('=> Average training loss: %.4f' % avg_train_loss)
print('Training Duration: %.3fs' % (time.time() - t_start))
if (epoch_idx+1) % 1 == 0:
eval_loss_mean = self.eval()
# Save model, and best model if qualified
delta_acc = self.net_dict.best_acc - eval_loss_mean
if delta_acc > 0:
self.net_dict.best_acc = eval_loss_mean
self.net_dict.save_ckpt(epoch_idx, delta_acc)
def eval(self):
print('Evaluating...')
self.net_dict.net = self.net_dict.net.eval()
eval_loader = DataLoader(dataset=self.eval_data,
batch_size=self.net_dict.batch_size,
shuffle=False, num_workers=self.net_dict.num_workers,
drop_last=False)
n_batch = len(eval_loader)
with torch.no_grad():
eval_loss_stack = self.net_dict.to_device(torch.Tensor())
correct = 0
total = 0
for i, batch in enumerate(eval_loader):
# forward propagation
inputs, labels = batch[0], batch[1]
inputs = self.net_dict.to_device(inputs)
labels = self.net_dict.to_device(labels)
# Forward
labels_hat = self.net_dict.net.forward(inputs)
_, predicted = torch.max(labels_hat.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
loss_batch = self.net_dict.criterion(labels_hat, labels)
eval_loss_stack = torch.cat(
(eval_loss_stack, loss_batch.unsqueeze(0)), 0)
print('Batch [%d/%d], Eval Loss: %.4f'
% (i + 1, n_batch, loss_batch))
eval_loss = torch.mean(eval_loss_stack)
print('*********************************')
print('=> Mean Evaluation Loss: %.3f' % eval_loss)
print('=> Accuracy of the network: %d %%' % (
100 * correct / total))
print('*********************************')
return eval_loss
|
<reponame>scala-steward/http4s-extend
package http4s.extend.test.types
import http4s.extend.OrBoolean
import http4s.extend.test.Fixtures.MinimalSuite
final class OrBooleanDiscipline extends MinimalSuite {
test("typed AND correct evaluation"){
(OrBoolean(true) =||= OrBoolean(true)) should be(OrBoolean(true))
(OrBoolean(true) =||= OrBoolean(false)) should be(OrBoolean(false))
(OrBoolean(false) =||= OrBoolean(true)) should be(OrBoolean(false))
(OrBoolean(false) =||= OrBoolean(false)) should be(OrBoolean(false))
}
}
|
#!/bin/bash
pkgname=hgvs
pip install $pkgname
|
def print_objects(arr, boundary):
for obj in arr:
for i in range(0, boundary):
print(obj, end = '')
print("\r")
print_objects(objects, 10)
|
#!/usr/bin/env bash
export APPD_NAME=${NAIS_APP_NAME}
export APPD_TIER="${NAIS_NAMESPACE}_${NAIS_APP_NAME}"
|
<reponame>SimonMonecke/go-mines<gh_stars>0
package generate
import (
"math/rand"
"time"
)
const (
noMine = "."
mine = "X"
)
func EasyStringMap() [][]string {
return generateStringMap(8, 8, 10)
}
func NormalStringMap() [][]string {
return generateStringMap(16, 16, 40)
}
func HardStringMap() [][]string {
return generateStringMap(16, 30, 99)
}
func generateStringMap(width, height, mines int) [][]string {
stringMap := initStringMapWithoutMines(width, height)
return placeMinesRandomly(stringMap, mines)
}
func initStringMapWithoutMines(width, height int) [][]string {
stringMap := make([][]string, height)
for i := range stringMap {
stringMap[i] = make([]string, width)
for j := range stringMap[i] {
stringMap[i][j] = noMine
}
}
return stringMap
}
func placeMinesRandomly(stringMap [][]string, mines int) [][]string {
randGen := rand.New(rand.NewSource(time.Now().UnixNano()))
height := len(stringMap)
width := len(stringMap[0])
for placedMines := 0; placedMines < mines; {
row := randGen.Intn(height)
col := randGen.Intn(width)
if stringMap[row][col] == noMine {
stringMap[row][col] = mine
placedMines++
}
}
return stringMap
}
|
<filename>src/main/java/com/mamuya/datrastocospringbootapi/dto/StockDTO.java<gh_stars>0
package com.mamuya.datrastocospringbootapi.dto;
import com.mamuya.datrastocospringbootapi.entities.Stock;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
@Data
@AllArgsConstructor
@NoArgsConstructor
@ToString
public class StockDTO implements DTOValidation<Stock> {
private int id = -1;
private EntrepriseDTO entreprise;
private ProductDTO product;
private double stckQnty = -1;
private double stckByprice = -1;
private double stckSllprice = -1;
public StockDTO(Stock stock) {
setId(stock.getId());
setEntreprise(new EntrepriseDTO(stock.getEntreprise()));
setProduct(new ProductDTO(stock.getProduct()));
setStckQnty(stock.getQuantity());
setStckByprice(stock.getBuyingPrice());
setStckSllprice(stock.getSellingPrice());
}
@Override
public boolean hasAllValidMappings() {
return (
hasValid(entreprise)
&& hasValid(product)
&& hasValid(stckQnty)
&& hasValid(stckByprice)
&& hasValid(stckSllprice)
);
}
@Override
public boolean hasAnyValidMappings() {
return (
hasValid(entreprise)
|| hasValid(product)
|| hasValid(stckQnty)
|| hasValid(stckByprice)
|| hasValid(stckSllprice)
);
}
@Override
public Stock createEntity() {
Stock stock = new Stock();
if(hasValid(id)){
stock.setId(id);
}
stock.setEntreprise(entreprise.createEntity());
stock.setProduct(product.createEntity());
stock.setQuantity(stckQnty);
stock.setBuyingPrice(stckByprice);
stock.setSellingPrice(stckSllprice);
return stock;
}
@Override
public Stock updateEntity(Stock stock) {
if(entreprise != null){
stock.setEntreprise(entreprise.createEntity());
}
if(product != null){
stock.setProduct(product.createEntity());
}
if(hasValid(stckQnty)){
stock.setQuantity(stckQnty);
}
if(hasValid(stckByprice)){
stock.setBuyingPrice(stckByprice);
}
if(hasValid(stckSllprice)){
stock.setSellingPrice(stckSllprice);
}
return stock;
}
@Override
public boolean hasValid(String item) {
return ((item != null) && (item.trim().length() > 0));
}
@Override
public boolean hasValid(Integer item) {
return item >= 1;
}
public boolean hasValid(Double item) {
return item >= 0;
}
public boolean hasValid(Object item) {
return item != null;
}
}
|
<reponame>Arnold-Huang/QuantitativeTrading<filename>app/binance-api-library/src/main/java/com/quant/binance/beans/ExchangeSymbol.java
package com.quant.binance.beans;
import java.util.List;
public class ExchangeSymbol {
String symbol;
String status;
String baseAsset;
int baseAssetPrecision;
String quoteAsset;
int quotePrecision;
int quoteAssetPrecision;
int baseCommissionPrecision;
int quoteCommissionPrecision;
String[] orderTypes;
boolean icebergAllowed;
boolean ocoAllowed;
boolean quoteOrderQtyMarketAllowed;
boolean isSpotTradingAllowed;
boolean isMarginTradingAllowed;
List<ExchangeSymbolFilter> filters;
String[] permissions;
public String getSymbol() {
return symbol;
}
public void setSymbol(String symbol) {
this.symbol = symbol;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getBaseAsset() {
return baseAsset;
}
public void setBaseAsset(String baseAsset) {
this.baseAsset = baseAsset;
}
public int getBaseAssetPrecision() {
return baseAssetPrecision;
}
public void setBaseAssetPrecision(int baseAssetPrecision) {
this.baseAssetPrecision = baseAssetPrecision;
}
public String getQuoteAsset() {
return quoteAsset;
}
public void setQuoteAsset(String quoteAsset) {
this.quoteAsset = quoteAsset;
}
public int getQuotePrecision() {
return quotePrecision;
}
public void setQuotePrecision(int quotePrecision) {
this.quotePrecision = quotePrecision;
}
public int getQuoteAssetPrecision() {
return quoteAssetPrecision;
}
public void setQuoteAssetPrecision(int quoteAssetPrecision) {
this.quoteAssetPrecision = quoteAssetPrecision;
}
public int getBaseCommissionPrecision() {
return baseCommissionPrecision;
}
public void setBaseCommissionPrecision(int baseCommissionPrecision) {
this.baseCommissionPrecision = baseCommissionPrecision;
}
public int getQuoteCommissionPrecision() {
return quoteCommissionPrecision;
}
public void setQuoteCommissionPrecision(int quoteCommissionPrecision) {
this.quoteCommissionPrecision = quoteCommissionPrecision;
}
public String[] getOrderTypes() {
return orderTypes;
}
public void setOrderTypes(String[] orderTypes) {
this.orderTypes = orderTypes;
}
public boolean isIcebergAllowed() {
return icebergAllowed;
}
public void setIcebergAllowed(boolean icebergAllowed) {
this.icebergAllowed = icebergAllowed;
}
public boolean isOcoAllowed() {
return ocoAllowed;
}
public void setOcoAllowed(boolean ocoAllowed) {
this.ocoAllowed = ocoAllowed;
}
public boolean isQuoteOrderQtyMarketAllowed() {
return quoteOrderQtyMarketAllowed;
}
public void setQuoteOrderQtyMarketAllowed(boolean quoteOrderQtyMarketAllowed) {
this.quoteOrderQtyMarketAllowed = quoteOrderQtyMarketAllowed;
}
public boolean isSpotTradingAllowed() {
return isSpotTradingAllowed;
}
public void setSpotTradingAllowed(boolean spotTradingAllowed) {
isSpotTradingAllowed = spotTradingAllowed;
}
public boolean isMarginTradingAllowed() {
return isMarginTradingAllowed;
}
public void setMarginTradingAllowed(boolean marginTradingAllowed) {
isMarginTradingAllowed = marginTradingAllowed;
}
public List<ExchangeSymbolFilter> getFilters() {
return filters;
}
public void setFilters(List<ExchangeSymbolFilter> filters) {
this.filters = filters;
}
public String[] getPermissions() {
return permissions;
}
public void setPermissions(String[] permissions) {
this.permissions = permissions;
}
}
|
#include <string>
std::string modifyFilePath(const std::string& filePath) {
char chSaved = '/'; // Initialize chSaved with a default value
const char* pszStart = filePath.c_str();
const char* pszEnd = filePath.c_str();
const char* pszLastSlash = filePath.c_str();
for (pszStart = pszEnd; *pszEnd; pszEnd++) {
if ('\\' == *pszEnd) {
pszLastSlash = pszEnd;
}
}
pszEnd = pszLastSlash;
chSaved = *pszEnd;
std::string m_strPath = pszStart;
*const_cast<char*>(pszEnd) = chSaved;
if ('\0' == *pszEnd) {
return m_strPath;
}
*const_cast<char*>(pszEnd) = chSaved;
return m_strPath;
}
|
Create a model that uses an encoder-decoder architecture with attention to take in a sequence of words as input, encode these words into a fixed-dimensional vector representation, and using this representation to decode a sequence of words that have the same meaning as the input.
|
<gh_stars>0
package com.rsodha;
public class TennisCoach implements Coach {
private FortuneService fortuneService;
public TennisCoach(FortuneService sadFortuneService) {
this.fortuneService = sadFortuneService;
}
@Override
public String getWO() {
return "Practice tennis";
}
@Override
public String getFn() {
return fortuneService.getFn();
}
}
|
#!/bin/bash
current=$(pwd)
./gradlew clean build shadowJar build
cd ./build/distributions/
tar -xvf WordCount-1.0.tar
cd $current
|
#!/bin/bash
./mvnw versions:display-property-updates
./mvnw versions:display-plugin-updates
./mvnw versions:display-dependency-updates
|
#!/bin/bash
# spell-checker:ignore (paths) abmon deref discrim eacces getlimits getopt ginstall gnulib inacc infloop inotify reflink ; (misc) INT_OFLOW OFLOW baddecode ; (vars/env) BUILDDIR SRCDIR
set -e
if test ! -d ../gnu; then
echo "Could not find ../gnu"
echo "git clone git@github.com:coreutils/coreutils.git gnu"
exit 1
fi
if test ! -d ../gnulib; then
echo "Could not find ../gnulib"
echo "git clone git@github.com:coreutils/gnulib.git gnulib"
exit 1
fi
pushd $(pwd)
make PROFILE=release
BUILDDIR="$PWD/target/release/"
cp "${BUILDDIR}/install" "${BUILDDIR}/ginstall" # The GNU tests rename this script before running, to avoid confusion with the make target
# Create *sum binaries
for sum in b2sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum
do
sum_path="${BUILDDIR}/${sum}"
test -f "${sum_path}" || cp "${BUILDDIR}/hashsum" "${sum_path}"
done
test -f "${BUILDDIR}/[" || cp "${BUILDDIR}/test" "${BUILDDIR}/["
popd
GNULIB_SRCDIR="$PWD/../gnulib"
pushd ../gnu/
# Any binaries that aren't built become `false` so their tests fail
for binary in $(./build-aux/gen-lists-of-programs.sh --list-progs)
do
bin_path="${BUILDDIR}/${binary}"
test -f "${bin_path}" || { echo "'${binary}' was not built with uutils, using the 'false' program"; cp "${BUILDDIR}/false" "${bin_path}"; }
done
./bootstrap --gnulib-srcdir="$GNULIB_SRCDIR"
./configure --quiet --disable-gcc-warnings
#Add timeout to to protect against hangs
sed -i 's|"\$@|/usr/bin/timeout 600 "\$@|' build-aux/test-driver
# Change the PATH in the Makefile to test the uutils coreutils instead of the GNU coreutils
sed -i "s/^[[:blank:]]*PATH=.*/ PATH='${BUILDDIR//\//\\/}\$(PATH_SEPARATOR)'\"\$\$PATH\" \\\/" Makefile
sed -i 's| tr | /usr/bin/tr |' tests/init.sh
make -j "$(nproc)"
# Generate the factor tests, so they can be fixed
# Used to be 36. Reduced to 20 to decrease the log size
for i in {00..20}
do
make tests/factor/t${i}.sh
done
# strip the long stuff
for i in {21..36}
do
sed -i -e "s/\$(tf)\/t${i}.sh//g" Makefile
done
grep -rl 'path_prepend_' tests/* | xargs sed -i 's| path_prepend_ ./src||'
sed -i -e 's|^seq |/usr/bin/seq |' -e 's|sha1sum |/usr/bin/sha1sum |' tests/factor/t*sh
# Remove tests checking for --version & --help
# Not really interesting for us and logs are too big
sed -i -e '/tests\/misc\/invalid-opt.pl/ D' \
-e '/tests\/misc\/help-version.sh/ D' \
-e '/tests\/misc\/help-version-getopt.sh/ D' \
Makefile
# logs are clotted because of this test
sed -i -e '/tests\/misc\/seq-precision.sh/ D' \
Makefile
# printf doesn't limit the values used in its arg, so this produced ~2GB of output
sed -i '/INT_OFLOW/ D' tests/misc/printf.sh
# Use the system coreutils where the test fails due to error in a util that is not the one being tested
sed -i 's|stat|/usr/bin/stat|' tests/chgrp/basic.sh tests/cp/existing-perm-dir.sh tests/touch/60-seconds.sh tests/misc/sort-compress-proc.sh
sed -i 's|ls -|/usr/bin/ls -|' tests/chgrp/posix-H.sh tests/chown/deref.sh tests/cp/same-file.sh tests/misc/mknod.sh tests/mv/part-symlink.sh tests/du/8gb.sh
sed -i 's|mkdir |/usr/bin/mkdir |' tests/cp/existing-perm-dir.sh tests/rm/empty-inacc.sh
sed -i 's|timeout \([[:digit:]]\)| /usr/bin/timeout \1|' tests/tail-2/inotify-rotate.sh tests/tail-2/inotify-dir-recreate.sh tests/tail-2/inotify-rotate-resources.sh tests/cp/parent-perm-race.sh tests/ls/infloop.sh tests/misc/sort-exit-early.sh tests/misc/sort-NaN-infloop.sh tests/misc/uniq-perf.sh tests/tail-2/inotify-only-regular.sh tests/tail-2/pipe-f2.sh tests/tail-2/retry.sh tests/tail-2/symlink.sh tests/tail-2/wait.sh tests/tail-2/pid.sh tests/dd/stats.sh tests/tail-2/follow-name.sh tests/misc/shuf.sh # Don't break the function called 'grep_timeout'
sed -i 's|chmod |/usr/bin/chmod |' tests/du/inacc-dir.sh tests/mkdir/p-3.sh tests/tail-2/tail-n0f.sh tests/cp/fail-perm.sh tests/du/inaccessible-cwd.sh tests/mv/i-2.sh tests/chgrp/basic.sh tests/misc/shuf.sh
sed -i 's|sort |/usr/bin/sort |' tests/ls/hyperlink.sh tests/misc/test-N.sh
sed -i 's|split |/usr/bin/split |' tests/misc/factor-parallel.sh
sed -i 's|truncate |/usr/bin/truncate |' tests/split/fail.sh
sed -i 's|dd |/usr/bin/dd |' tests/du/8gb.sh tests/tail-2/big-4gb.sh init.cfg
sed -i 's|id -|/usr/bin/id -|' tests/misc/runcon-no-reorder.sh
sed -i 's|touch |/usr/bin/touch |' tests/cp/preserve-link.sh tests/cp/reflink-perm.sh tests/ls/block-size.sh tests/ls/abmon-align.sh tests/ls/rt-1.sh tests/mv/update.sh tests/misc/ls-time.sh tests/misc/stat-nanoseconds.sh tests/misc/time-style.sh tests/misc/test-N.sh
sed -i 's|ln -|/usr/bin/ln -|' tests/cp/link-deref.sh
sed -i 's|printf |/usr/bin/printf |' tests/dd/ascii.sh
sed -i 's|cp |/usr/bin/cp |' tests/mv/hard-2.sh
sed -i 's|paste |/usr/bin/paste |' tests/misc/od-endian.sh
sed -i 's|seq |/usr/bin/seq |' tests/misc/sort-discrim.sh
# Add specific timeout to tests that currently hang to limit time spent waiting
sed -i 's|seq \$|/usr/bin/timeout 0.1 seq \$|' tests/misc/seq-precision.sh tests/misc/seq-long-double.sh
# Remove dup of /usr/bin/ when executed several times
grep -rl '/usr/bin//usr/bin/' tests/* | xargs --no-run-if-empty sed -i 's|/usr/bin//usr/bin/|/usr/bin/|g'
#### Adjust tests to make them work with Rust/coreutils
# in some cases, what we are doing in rust/coreutils is good (or better)
# we should not regress our project just to match what GNU is going.
# So, do some changes on the fly
sed -i -e "s|rm: cannot remove 'e/slink'|rm: cannot remove 'e'|g" tests/rm/fail-eacces.sh
sed -i -e "s|rm: cannot remove 'a/b/file'|rm: cannot remove 'a'|g" tests/rm/cycle.sh
sed -i -e "s|rm: cannot remove directory 'b/a/p'|rm: cannot remove 'b'|g" tests/rm/rm1.sh
sed -i -e "s|rm: cannot remove 'a/1'|rm: cannot remove 'a'|g" tests/rm/rm2.sh
sed -i -e "s|removed directory 'a/'|removed directory 'a'|g" tests/rm/v-slash.sh
test -f "${BUILDDIR}/getlimits" || cp src/getlimits "${BUILDDIR}"
# When decoding an invalid base32/64 string, gnu writes everything it was able to decode until
# it hit the decode error, while we don't write anything if the input is invalid.
sed -i "s/\(baddecode.*OUT=>\"\).*\"/\1\"/g" tests/misc/base64.pl
sed -i "s/\(\(b2[ml]_[69]\|b32h_[56]\|z85_8\|z85_35\).*OUT=>\)[^}]*\(.*\)/\1\"\"\3/g" tests/misc/basenc.pl
# add "error: " to the expected error message
sed -i "s/\$prog: invalid input/\$prog: error: invalid input/g" tests/misc/basenc.pl
# basenc: swap out error message for unexpected arg
sed -i "s/ {ERR=>\"\$prog: foobar\\\\n\" \. \$try_help }/ {ERR=>\"error: Found argument '--foobar' which wasn't expected, or isn't valid in this context\n\nUSAGE:\n basenc [OPTION]... [FILE]\n\nFor more information try --help\n\"}]/" tests/misc/basenc.pl
sed -i "s/ {ERR_SUBST=>\"s\/(unrecognized|unknown) option \[-' \]\*foobar\[' \]\*\/foobar\/\"}],//" tests/misc/basenc.pl
# Remove the check whether a util was built. Otherwise tests against utils like "arch" are not run.
sed -i "s|require_built_ |# require_built_ |g" init.cfg
|
#!/usr/bin/env bash
if [[ -z "${NODE_NAME}" ]]; then
echo "NODE_NAME environment variable not set"
exit 1
fi
set -m
set -x
chroot /host
echo "user.max_user_namespaces = 31477" > /etc/sysctl.d/99-usernamespaces.conf
sysctl --system
cat /proc/sys/user/max_user_namespaces
while true; do
sleep 60
done
|
def get_inclusive_range(start, end):
return [i for i in range(start, end+1)]
|
<reponame>glowroot/glowroot-instrumentation
package org.glowroot.instrumentation.executor;
import org.glowroot.instrumentation.api.Descriptor;
@Descriptor(
id = "executor",
name = "Executor",
classes = {
ExecutorInstrumentation.class
})
public class InstrumentationDescriptor {}
|
#!/bin/sh
set -e
THUNDER_ROOT=$(pwd)/thunder
THUNDER_INSTALL_DIR=${THUNDER_ROOT}/install
VALGRINDLOG=$(pwd)/valgrind_log
checkInstalled() {
command -v "$1"
case "$?" in
0) true ;;
*) false ;;
esac
}
startDummyServerDeviceDiagnostic() {
python Scripts/DeviceDiagnosticMock.py &
sleep 0.1
}
stopDummyServerDeviceDiagnostic() {
pkill -f Scripts/DeviceDiagnosticMock.py
}
if ! checkInstalled "valgrind"; then
echo "valgrind should be installed"
exit 1
fi
startDummyServerDeviceDiagnostic
PATH=${THUNDER_INSTALL_DIR}/usr/bin:${PATH} \
LD_LIBRARY_PATH=${THUNDER_INSTALL_DIR}/usr/lib:${THUNDER_INSTALL_DIR}/usr/lib/wpeframework/plugins:${LD_LIBRARY_PATH} \
valgrind \
--tool=memcheck \
--log-file="${VALGRINDLOG}" \
--leak-check=yes \
--show-reachable=yes \
--track-fds=yes \
--fair-sched=try \
RdkServicesTest
stopDummyServerDeviceDiagnostic
echo "==== DONE ===="
exit 0
|
echo "Install homebrew: https://brew.sh/"
# make it possible to quit the finder
defaults write com.apple.finder QuitMenuItem -bool true
killall Finder
# disable two-finger swipe navigation in Chrome http://apple.stackexchange.com/a/80163/62458
defaults write com.google.Chrome AppleEnableSwipeNavigateWithScrolls -bool FALSE
defaults write com.google.Chrome.canary AppleEnableSwipeNavigateWithScrolls -bool FALSE
defaults write com.google.Chrome AppleEnableMouseSwipeNavigateWithScrolls -bool FALSE
defaults write com.google.Chrome.canary AppleEnableMouseSwipeNavigateWithScrolls -bool FALSE
# todo:
#for x in com.microsoft.VSCode com.github.Atom com.sublimetext.3
#do
# defaults $x ApplePressAndHoldEnabled -bool false
#done
defaults write com.microsoft.VSCode ApplePressAndHoldEnabled -bool false
# add me to wheel and _www for apache
sudo dseditgroup -o edit -a $USER -t user wheel
#sudo dseditgroup -o edit -a $USER -t user _www
# add _www to _www - wtf is this not already the case?
#sudo dseditgroup -o edit -a _www -t user _www
sudo chmod 664 /etc/hosts
# enable key-repeat for vscode vim extension
defaults write com.microsoft.VSCode ApplePressAndHoldEnabled -bool false
defaults write com.microsoft.VSCodeInsiders ApplePressAndHoldEnabled -bool false
# stop media keys ⏯ from launching itunes? https://www.howtogeek.com/274345/stop-itunes-from-launching-when-you-press-play-on-your-macs-keyboard/
launchctl unload -w /System/Library/LaunchAgents/com.apple.rcd.plist
# hmm.. this seems to have broken media keys for spotify too?
brew install ack wget zsh tree git diff-so-fancy watchman pyenv pyenv-virtualenv jq
brew cask install phoenix flux qlcolorcode qlstephen qlmarkdown quicklook-json qlprettypatch quicklook-csv betterzipql qlimagesize webpquicklook suspicious-package
echo "Install driver for xbox 360 controllers https://github.com/360Controller/360Controller/releases"
echo "Install SF Mono nerd font for terminal usage https://github.com/artofrawr/powerline-fonts/blob/master/fonts/SFMono/SF%20Mono%20Regular%20Nerd%20Font%20Complete.otf"
echo "Try to make text look better on a low-density display:"
echo "defaults -currentHost write -globalDomain AppleFontSmoothing -int 3"
echo "defaults write -g CGFontRenderingFontSmoothingDisabled -bool NO"
echo "Install nvm: https://github.com/nvm-sh/nvm#installing-and-updating"
# https://github.com/junegunn/vim-plug/wiki/tips#automatic-installation
if empty(glob('~/.vim/autoload/plug.vim'))
silent !curl -fLo ~/.vim/autoload/plug.vim --create-dirs
\ https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
autocmd VimEnter * PlugInstall --sync | source $MYVIMRC
endif
|
<filename>copy_test.go<gh_stars>0
// scuffold project main.go
package filegenerator
import (
"fmt"
"os"
"path/filepath"
"testing"
)
func TestCopyAdd(t *testing.T) {
d := NewCopyGenerator()
err := d.Add("copy.go", "ToDo")
if err != nil {
t.Errorf("adding copy job failed %v", err)
} else {
t.Log("adding copy job is possible")
}
}
func TestCopyGetFiles(t *testing.T) {
const cfile = "copy.go"
const cdir = "master"
output := fmt.Sprintf("[%s, %s]", cfile, filepath.Join(cdir, filepath.Base(cfile)))
t.Log("separator: " + string(os.PathSeparator))
d := NewCopyGenerator()
err := d.Add("copy.go", "master")
if err != nil {
t.Errorf("adding copy job failed %v", err)
}
txt := d.GetFiles()
if txt != output+"\n" {
t.Errorf("copy job is wrong: %s expected: %s", txt, output)
} else {
t.Log("copy job is right")
}
}
func TestCopyAddFileNotExist(t *testing.T) {
d := NewCopyGenerator()
err := d.Add("abc.tmp", "ToDo")
if err != nil {
t.Logf("adding copy job failed %v", err)
} else {
t.Error("adding copy job is possible")
}
}
func TestCopyRun(t *testing.T) {
d := NewCopyGenerator()
err := d.Add("copy.go", "ToDo")
if err != nil {
t.Errorf("adding copy job failed %v", err)
} else {
t.Log("adding copy job is possible")
}
err = d.Run()
if err != nil {
t.Logf("run copy job failed by design %v", err)
} else {
t.Error("run copy job must fail")
}
}
|
import {getLocalStorage, setLocalStorage} from "./storage";
import {getBrowserObject} from "./browser";
import {Options} from "./types";
//
// MousePager options page script
//
let browser = getBrowserObject();
let defaultOptions: {[k: string]: boolean} = {
"prim+scroll": true,
"prim+middle+scroll": true,
"middle+scroll": true,
"middle+dblclick": true,
"shift+scroll": true,
clearselection: false,
};
// Returns a promise that always fulfills with a valid set of options
async function readOptions() {
try {
let obj = await getLocalStorage(["options"]);
// Returns an empty object the first time
if (Object.keys(obj).length == 0) {
// Write default options if there was nothing stored before
setLocalStorage({"options": defaultOptions});
return defaultOptions;
} else {
return (obj as Options)["options"];
}
} catch (e) {
console.error("Error getting options");
return {};
}
}
// Get the value of a key from the options page
function getValueOf(key: string) {
let el = document.getElementById(key) as HTMLInputElement;
return el ? el.checked : false;
}
// Set the value of a key on the options page
function setValueOf(key: string, value: boolean | string) {
let el = document.getElementById(key) as HTMLInputElement;
if (!el) {
console.error("couldn't find this key: " + key);
return;
} else if (el.getAttribute("type") === "checkbox") {
el.checked = value as boolean;
} else {
el.value = value as string;
}
}
// Populate all keys on the options page after validating them
function fillOptions(options: {[k: string]: boolean}) {
Object.keys(defaultOptions).forEach((key: string) => {
setValueOf(key, options[key]);
});
}
// Click handler for save button
function onSaveClick() {
let options: {[k: string]: boolean} = {};
Object.keys(defaultOptions).forEach((key: string) => {
options[key] = getValueOf(key);
});
setLocalStorage({"options": options});
let confirmation = document.querySelector("#save-confirmation")! as HTMLElement;
confirmation.style.visibility = "visible";
setTimeout(() => (confirmation.style.visibility = "hidden"), 2000);
}
// Options page load handler
window.addEventListener("load", async () => {
// Set individual values in the options page
let o = await readOptions();
fillOptions(o);
document.getElementById("save")?.addEventListener("click", onSaveClick);
});
|
function check_failure { #TODO: we can't give a script as input to the function (unless the script is returning something...)
"$@"
local status=$?
if (( status != 0 ))
then
echo "error with $1" >&2
failure_counter=$(($failure_counter+1))
echo "Failure counter: $failure_counter"
fi
}
function setupEnvironment {
cd $HOME/teamcode/appsAway/demos/$APPSAWAY_APP_NAME
yml_files_default=("main.yml" "composeGui.yml")
yml_files_default_len=${#yml_files_default[@]}
yml_files=()
for (( i=0; i<$yml_files_default_len; i++ ))
do
if [ -f "${yml_files_default[$i]}" ]
then
yml_files+=(${yml_files_default[$i]})
fi
done
for (( i=0; i<${#yml_files[@]}; i++ ))
do
if [[ $APPSAWAY_IMAGES != '' ]]
then
list_images=($APPSAWAY_IMAGES)
list_versions=($APPSAWAY_VERSIONS)
list_tags=($APPSAWAY_TAGS)
fi
for (( j=0; j<${#list_images[@]}; j++ ))
do
sed -i 's,image: '"${list_images[$j]}"'.*$,image: '"${list_images[$j]}"':'"${list_versions[$j]}"'_'"${list_tags[$j]}"',g' ${yml_files[$i]}
done
# while read -r line || [ -n "$line" ]
# do
# done < ${yml_files[$i]}
done
# # env file is located in iCubApps folder, so we need APPSAWAY_APP_PATH
# os.chdir(os.environ.get('APPSAWAY_APP_PATH'))
# env_file = open(".env", "r")
# env_list = env_file.read().split('\n')
# env_file.close()
# # Checking if we already have all the environment variables in the .env; if yes we overwrite them, if not we add them
# for button in self.button_list:
# not_found = True
# not_found_path = True
# for i in range(len(env_list)):
# if button.varType == 'fileInput':
# if env_list[i].find(button.varName + "_PATH=") != -1 and os.environ.get(button.varName + "_PATH") != None:
# env_list[i] = button.varName + "_PATH=" + os.environ.get(button.varName + "_PATH")
# not_found_path = False
# if env_list[i].find(button.varName + "=") != -1 and os.environ.get(button.varName) != None:
# env_list[i] = button.varName + "=" + os.environ.get(button.varName)
# not_found = False
# if not_found and os.environ.get(button.varName) != None:
# env_list.insert(len(env_list), button.varName + "=" + os.environ.get(button.varName))
# if not_found_path and os.environ.get(button.varName + "_PATH") != None:
# env_list.insert(len(env_list), button.varName + "_PATH=" + os.environ.get(button.varName + "_PATH"))
# env_file = open(".env", "w")
# for line in env_list:
# env_file.write(line + '\n')
# env_file.close()
# os.chdir(os.path.join(os.environ.get('HOME'), "teamcode","appsAway","scripts"))
# # now we copy all the files to their respective machines
cd $HOME/teamcode/appsAway/scripts
}
failure_counter=0
echo "Passed parameters: $1 $2 $3 $4 $5 $6"
cd $HOME/teamcode/appsAway/scripts
echo "#! /bin/bash
export APPSAWAY_APP_NAME=$1
export APPSAWAY_USER_NAME=icub
export APPSAWAY_APP_PATH=\${HOME}/iCubApps/\${APPSAWAY_APP_NAME}
export APPSAWAY_CONSOLENODE_ADDR=$2
export APPSAWAY_CONSOLENODE_USERNAME=icub
export APPSAWAY_IMAGES=${3:-\"icubteamcode/open-face\"}
export APPSAWAY_VERSIONS=${4:-\"master-unstable_master\"}
export APPSAWAY_TAGS=${5:-\"binaries\"}
export APPSAWAY_DEPLOY_YAML_FILE_LIST=main.yml
export APPSAWAY_GUI_YAML_FILE_LIST=composeGui.yml
export APPSAWAY_STACK_NAME=mystack
export APPSAWAY_NODES_NAME_LIST=\"console\"
export APPSAWAY_NODES_ADDR_LIST=\"\${APPSAWAY_GUINODE_ADDR} \${APPSAWAY_ICUBHEADNODE_ADDR} \${APPSAWAY_CONSOLENODE_ADDR} \${APPSAWAY_CUDANODE_ADDR} \${APPSAWAY_WORKERNODE_ADDR}\"
export APPSAWAY_NODES_USERNAME_LIST=\"\${APPSAWAY_GUINODE_USERNAME} \${APPSAWAY_ICUBHEADNODE_USERNAME} \${APPSAWAY_CONSOLENODE_USERNAME} \${APPSAWAY_CUDANODE_USERNAME} \${APPSAWAY_WORKERNODE_USERNAME}\" " > ./appsAway_setEnvironment.local.sh
chmod +x appsAway_setEnvironment.local.sh
source ./appsAway_setEnvironment.local.sh
echo "images: $APPSAWAY_IMAGES"
echo "versions: $APPSAWAY_VERSIONS"
echo "tags: $APPSAWAY_TAGS"
echo "about to setup the cluster..."
./appsAway_setupCluster.sh
echo "
CUSTOM_PORT=/icub/cam/left" >> $HOME/iCubApps/$APPSAWAY_APP_NAME/.env
./appsAway_setupSwarm.sh
setupEnvironment
./appsAway_copyFiles.sh
check_failure ./appsAway_startApp.sh
sleep 30
check_failure ./testApp.sh $APPSAWAY_APP_NAME $APPSAWAY_STACK_NAME
./appsAway_stopApp.sh
if (( $failure_counter > 0 ))
then
echo "Some commands failed"
exit 1
fi
|
package org.kettle.beam.core;
import org.junit.Test;
import java.util.Date;
import static org.junit.Assert.*;
public class KettleRowTest {
@Test
public void equalsTest() {
Object[] row1 = new Object[] { "AAA", "BBB", Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
Object[] row2 = new Object[] { "AAA", "BBB", Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
KettleRow kettleRow1 = new KettleRow(row1);
KettleRow kettleRow2 = new KettleRow(row2);
assertTrue(kettleRow1.equals( kettleRow2 ));
Object[] row3 = new Object[] { "AAA", "BBB", Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
Object[] row4 = new Object[] { "AAA", "CCC", Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
KettleRow kettleRow3 = new KettleRow(row3);
KettleRow kettleRow4 = new KettleRow(row4);
assertFalse(kettleRow3.equals( kettleRow4 ));
Object[] row5 = new Object[] { "AAA", null, Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
Object[] row6 = new Object[] { "AAA", null, Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
KettleRow kettleRow5 = new KettleRow(row5);
KettleRow kettleRow6 = new KettleRow(row6);
assertTrue(kettleRow5.equals( kettleRow6 ));
Object[] row7 = new Object[] { "AAA", "BBB", Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
Object[] row8 = new Object[] { "AAA", null, Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
KettleRow kettleRow7 = new KettleRow(row7);
KettleRow kettleRow8 = new KettleRow(row8);
assertFalse(kettleRow7.equals( kettleRow8 ));
}
@Test
public void hashCodeTest() {
Object[] row1 = new Object[] { "AAA", "BBB", Long.valueOf( 100 ), Double.valueOf(1.234), new Date( 876876868 ) };
KettleRow kettleRow1 = new KettleRow(row1);
assertEquals( -1023250643, kettleRow1.hashCode() );
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.location.cloud;
import java.util.Map;
import brooklyn.config.ConfigKey;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.ConfigKeys;
import brooklyn.entity.basic.EntityInternal;
import brooklyn.util.config.ConfigBag;
import brooklyn.util.text.Strings;
import brooklyn.util.text.TemplateProcessor;
import com.google.common.collect.ImmutableMap;
import com.google.common.reflect.TypeToken;
public class CustomMachineNamer extends CloudMachineNamer {
public static final ConfigKey<String> MACHINE_NAME_TEMPLATE = ConfigKeys.newStringConfigKey("custom.machine.namer.machine",
"Freemarker template format for custom machine name", "${entity.displayName}");
@SuppressWarnings("serial")
public static final ConfigKey<Map<String, ?>> EXTRA_SUBSTITUTIONS = ConfigKeys.newConfigKey(new TypeToken<Map<String, ?>>() {},
"custom.machine.namer.substitutions", "Additional substitutions to be used in the template", ImmutableMap.<String, Object>of());
public CustomMachineNamer(ConfigBag setup) {
super(setup);
}
@Override
public String generateNewMachineUniqueName() {
Object context = setup.peek(CloudLocationConfig.CALLER_CONTEXT);
Entity entity = null;
if (context instanceof Entity) {
entity = (Entity) context;
}
String template = this.setup.get(MACHINE_NAME_TEMPLATE);
String processed;
if (entity == null) {
processed = TemplateProcessor.processTemplateContents(template, this.setup.get(EXTRA_SUBSTITUTIONS));
} else {
processed = TemplateProcessor.processTemplateContents(template, (EntityInternal)entity, this.setup.get(EXTRA_SUBSTITUTIONS));
}
processed = Strings.removeFromStart(processed, "#ftl\n");
return sanitize(processed);
}
@Override
public String generateNewMachineUniqueNameFromGroupId(String groupId) {
return generateNewMachineUniqueName();
}
}
|
#!/bin/bash
binimg=build/dnsd
# Today and yesterdays cores
cores=$(find ./dumps/ -name '*.core' -mtime -1)
for core in $cores
do
gdblogfile="$core-gdb.log"
rm -f "$gdblogfile"
bininfo=$(ls -l $binimg)
coreinfo=$(ls -l "$core")
gdb -batch \
-ex "set logging file $gdblogfile" \
-ex "set logging on" \
-ex "set pagination off" \
-ex "printf \"**\n** Process info for $binimg - $core \n** Generated $(date)\n\"" \
-ex "printf \"**\n** $bininfo \n** $coreinfo\n**\n\"" \
-ex "file $binimg" \
-ex "core-file $core" \
-ex "bt" \
-ex "info proc" \
-ex "printf \"*\n* Libraries \n*\n\"" \
-ex "info sharedlib" \
-ex "printf \"*\n* Memory map \n*\n\"" \
-ex "info target" \
-ex "printf \"*\n* Registers \n*\n\"" \
-ex "info registers" \
-ex "printf \"*\n* Current instructions \n*\n\"" -ex "x/16i \$pc" \
-ex "printf \"*\n* Threads (full) \n*\n\"" \
-ex "info threads" \
-ex "bt" \
-ex "thread apply all bt full" \
-ex "printf \"*\n* Threads (basic) \n*\n\"" \
-ex "info threads" \
-ex "thread apply all bt" \
-ex "printf \"*\n* Done \n*\n\"" \
-ex "quit"
done
|
#!/usr/bin/env bash
# Copyright 2018 Expedia Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
mvn exec:java -Dexec.mainClass="com.expedia.adaptivealerting.kafka.KafkaAnomalyToAlertMapper"
|
<filename>ebl/context.py
import attr
from falcon_auth.backends import AuthBackend
from ebl.bibliography.application.bibliography import Bibliography
from ebl.bibliography.application.bibliography_repository import BibliographyRepository
from ebl.changelog import Changelog
from ebl.corpus.infrastructure.mongo_text_repository import MongoTextRepository
from ebl.dictionary.application.word_repository import WordRepository
from ebl.ebl_ai_client import EblAiClient
from ebl.files.application.file_repository import FileRepository
from ebl.fragmentarium.application.annotations_repository import AnnotationsRepository
from ebl.fragmentarium.application.fragment_repository import FragmentRepository
from ebl.fragmentarium.application.fragment_updater import FragmentUpdater
from ebl.fragmentarium.application.transliteration_update_factory import (
TransliterationUpdateFactory,
)
from ebl.lemmatization.application.suggestion_finder import LemmaRepository
from ebl.transliteration.application.sign_repository import SignRepository
from ebl.transliteration.application.transliteration_query_factory import (
TransliterationQueryFactory,
)
@attr.s(auto_attribs=True, frozen=True)
class Context:
ebl_ai_client: EblAiClient
auth_backend: AuthBackend
word_repository: WordRepository
sign_repository: SignRepository
public_file_repository: FileRepository
photo_repository: FileRepository
folio_repository: FileRepository
fragment_repository: FragmentRepository
changelog: Changelog
bibliography_repository: BibliographyRepository
text_repository: MongoTextRepository
annotations_repository: AnnotationsRepository
lemma_repository: LemmaRepository
def get_bibliography(self):
return Bibliography(self.bibliography_repository, self.changelog)
def get_fragment_updater(self):
return FragmentUpdater(
self.fragment_repository,
self.changelog,
self.get_bibliography(),
self.photo_repository,
)
def get_transliteration_update_factory(self):
return TransliterationUpdateFactory(self.sign_repository)
def get_transliteration_query_factory(self):
return TransliterationQueryFactory(self.sign_repository)
|
package com.mybatis.project.mapper;
import com.mybatis.project.po.User;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Before;
import org.junit.Test;
import java.beans.Transient;
import java.io.InputStream;
/**
* @Project: mybatis
* @description:
* @author: sunkang
* @create: 2018-10-10 14:10
* @ModificationHistory who when What
**/
public class ExampleTypeHandlerTest {
private SqlSessionFactory sqlSessionFactory ;
@Before
public void setUp() throws Exception {
//创建sqlsessionFactory
String resource = "SqlMapConfig.xml";
//得到配置文件流
InputStream inputStream = Resources.getResourceAsStream(resource);
//创建会话工厂,传入mybatis的配置文件的信息
sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream);
}
//测试自己增加的typehandler的情况,插入的 时候,address字段的前面会增加 address:
@Test
public void testHandler() throws Exception{
SqlSession sqlSession =sqlSessionFactory.openSession();
//创建userMapper对象,mybatis自动生成mapper代理对象
UserMapper userMapper = sqlSession.getMapper(UserMapper.class);
User user = new User();
user.setUsername("wangzhezhi");
user.setAddress("hangzhoubingjiang");
//调用userMapper方法
userMapper.insertUser(user);
sqlSession.commit();
sqlSession.close();
}
}
|
#!/bin/bash
function set_volume()
{
amixer -c 1 set Speaker $1 >/dev/null
}
vol=50
echo "Set volume: $vol"
set_volume $vol
|
#!/bin/sh
python scholar_py/scholar.py -ddd -c 1 --author "albert einstein" --phrase "quantum theory" --citation bt
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.wearable.watchface.CanvasWatchFaceService;
import android.support.wearable.watchface.WatchFaceStyle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.WindowInsets;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataItem;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.Wearable;
import java.lang.ref.WeakReference;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
/**
* Digital watch face with seconds. In ambient mode, the seconds aren't displayed. On devices with
* low-bit ambient mode, the text is drawn without anti-aliasing in ambient mode.
*/
public class MyWatchFace extends CanvasWatchFaceService {
private static final Typeface NORMAL_TYPEFACE =
Typeface.create(Typeface.SANS_SERIF, Typeface.NORMAL);
/**
* Update rate in milliseconds for interactive mode. We update once a second since seconds are
* displayed in interactive mode.
*/
private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.SECONDS.toMillis(1);
/**
* Handler message id for updating the time periodically in interactive mode.
*/
private static final int MSG_UPDATE_TIME = 0;
static final String PATH = "/weather_info";
static final String WEATHER_ID = "WEATHER_ID";
static final String MIN_TEMP = "MIN_TEMP";
static final String MAX_TEMP = "MAX_TEMP";
static final String DATA_CHECK = "Checking received data";
private Bitmap Icon;
private String maxTemp;
private String minTemp;
Date date;
DateFormat dateFormat;
private GoogleApiClient googleApiClient;
@Override
public Engine onCreateEngine() {
return new Engine();
}
private static class EngineHandler extends Handler {
private final WeakReference<MyWatchFace.Engine> mWeakReference;
public EngineHandler(MyWatchFace.Engine reference) {
mWeakReference = new WeakReference<>(reference);
}
@Override
public void handleMessage(Message msg) {
MyWatchFace.Engine engine = mWeakReference.get();
if (engine != null) {
switch (msg.what) {
case MSG_UPDATE_TIME:
engine.handleUpdateTimeMessage();
break;
}
}
}
}
private class Engine extends CanvasWatchFaceService.Engine implements DataApi.DataListener,
GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener {
final Handler mUpdateTimeHandler = new EngineHandler(this);
boolean mRegisteredTimeZoneReceiver = false;
Paint backgroundPaint;
Paint textPaint;
Paint hourPaint;
Paint minPaint;
Paint datePaint;
Paint maxTempPaint;
Paint minTempPaint;
boolean mAmbient;
Calendar calendar;
final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
calendar.setTimeZone(TimeZone.getDefault());
invalidate();
}
};
float XOffset;
float timeYOffset;
float dateYOffset;
float lineSeparatorYOffset;
float weatherIconYOffset;
float weatherYOffset;
/**
* Whether the display supports fewer bits for each color in ambient mode. When true, we
* disable anti-aliasing in ambient mode.
*/
boolean mLowBitAmbient;
@Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
googleApiClient = new GoogleApiClient.Builder(MyWatchFace.this).addApi(Wearable.API)
.addConnectionCallbacks(this).addOnConnectionFailedListener(this)
.build();
googleApiClient.connect();
setWatchFaceStyle(new WatchFaceStyle.Builder(MyWatchFace.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_VARIABLE)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setShowSystemUiTime(false)
.setAcceptsTapEvents(true)
.build());
Resources resources = MyWatchFace.this.getResources();
timeYOffset = resources.getDimension(R.dimen.time_y_offset);
dateYOffset = resources.getDimension(R.dimen.date_offset);
lineSeparatorYOffset = resources.getDimension(R.dimen.separator_y_offset);
weatherIconYOffset = resources.getDimension(R.dimen.weather_icon_y_offset);
weatherYOffset = resources.getDimension(R.dimen.weather_y_offset);
backgroundPaint = new Paint();
backgroundPaint.setColor(resources.getColor(R.color.background));
textPaint = new Paint();
textPaint = createTextPaint(resources.getColor(R.color.primary_text));
datePaint = createTextPaint(resources.getColor(R.color.primary_text));
hourPaint = createTextPaint(resources.getColor(R.color.primary_text));
minPaint = createTextPaint(resources.getColor(R.color.primary_text));
maxTempPaint = createTextPaint(resources.getColor(R.color.primary_text));
minTempPaint = createTextPaint(resources.getColor(R.color.primary_text));
Icon = BitmapFactory.decodeResource(getResources(),R.drawable.art_clear);
calendar = Calendar.getInstance();
date = new Date();
dateFormat = new SimpleDateFormat("EEE, MMM d yyyy", Locale.getDefault());
dateFormat.setCalendar(calendar);
minTemp = getString(R.string.default_temp);
maxTemp = getString(R.string.default_temp);
}
//text
@Override
public void onDestroy() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
super.onDestroy();
}
private Paint createTextPaint(int textColor) {
return createTextPaint(textColor,NORMAL_TYPEFACE);
}
private Paint createTextPaint(int textColor, Typeface typeface) {
Paint paint = new Paint();
paint.setColor(textColor);
paint.setTypeface(typeface);
paint.setAntiAlias(true);
return paint;
}
@Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
registerReceiver();
calendar.setTimeZone(TimeZone.getDefault());
invalidate();
} else {
unregisterReceiver();
if (googleApiClient != null && googleApiClient.isConnected()) {
Wearable.DataApi.removeListener(googleApiClient, this);
googleApiClient.disconnect();
}
}
updateTimer();
}
private void registerReceiver() {
if (mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = true;
IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED);
MyWatchFace.this.registerReceiver(mTimeZoneReceiver, filter);
}
private void unregisterReceiver() {
if (!mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = false;
MyWatchFace.this.unregisterReceiver(mTimeZoneReceiver);
}
@Override
public void onApplyWindowInsets(WindowInsets insets) {
super.onApplyWindowInsets(insets);
// Load resources that have alternate values for round watches.
Resources resources = MyWatchFace.this.getResources();
XOffset = resources.getDimension(R.dimen.x_offset);
float textSize = resources.getDimension(R.dimen.text_size);
float dateTextSize = resources.getDimension(R.dimen.date_text_size);
float tempTextSize = resources.getDimension(R.dimen.temp_text_size);
hourPaint.setTextSize(textSize);
minPaint.setTextSize(textSize);
datePaint.setTextSize(dateTextSize);
textPaint.setTextSize(textSize);
maxTempPaint.setTextSize(tempTextSize);
minTempPaint.setTextSize(tempTextSize);
}
@Override
public void onPropertiesChanged(Bundle properties) {
super.onPropertiesChanged(properties);
mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false);
}
@Override
public void onTimeTick() {
super.onTimeTick();
invalidate();
}
@Override
public void onAmbientModeChanged(boolean inAmbientMode) {
super.onAmbientModeChanged(inAmbientMode);
if (mAmbient != inAmbientMode) {
mAmbient = inAmbientMode;
if (mLowBitAmbient) {
textPaint.setAntiAlias(!inAmbientMode);
datePaint.setAntiAlias(!inAmbientMode);
hourPaint.setAntiAlias(!inAmbientMode);
minPaint.setAntiAlias(!inAmbientMode);
maxTempPaint.setAntiAlias(!inAmbientMode);
minTempPaint.setAntiAlias(!inAmbientMode);
}
invalidate();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
/**
* Captures tap event (and tap type) and toggles the background color if the user finishes
* a tap.
*/
@Override
public void onTapCommand(int tapType, int x, int y, long eventTime) {
switch (tapType) {
case TAP_TYPE_TOUCH:
// The user has started touching the screen.
break;
case TAP_TYPE_TOUCH_CANCEL:
// The user has started a different gesture or otherwise cancelled the tap.
break;
case TAP_TYPE_TAP:
// The user has completed the tap gesture.
// TODO: Add code to handle the tap gesture.
Toast.makeText(getApplicationContext(), R.string.message, Toast.LENGTH_SHORT)
.show();
break;
}
invalidate();
}
@Override
public void onDraw(Canvas canvas, Rect bounds) {
// Draw the background.
if (isInAmbientMode()) {
canvas.drawColor(Color.BLACK);
} else {
canvas.drawRect(0, 0, bounds.width(), bounds.height(), backgroundPaint);
}
// Draw H:MM in ambient mode or H:MM:SS in interactive mode.
long now = System.currentTimeMillis();
calendar.setTimeInMillis(now);
date.setTime(now);
int hour = calendar.get(Calendar.HOUR_OF_DAY);
int minute = calendar.get(Calendar.MINUTE);
String hourText = String.format("%02d:", hour);
String minuteText = String.format("%02d", minute);
float centerX = bounds.centerX();
float hourSize = hourPaint.measureText(hourText);
float minuteSize = minPaint.measureText(minuteText);
float hourXOffset = centerX - (hourSize + minuteSize)/2;
float minuteXOffset = centerX + (hourSize - minuteSize)/2;
canvas.drawText(hourText, hourXOffset, timeYOffset, hourPaint);
canvas.drawText(minuteText, minuteXOffset, timeYOffset, minPaint);
String dateString = dateFormat.format(date);
canvas.drawText(dateString, centerX - datePaint.measureText(dateString)/2, dateYOffset, datePaint);
canvas.drawLine(bounds.centerX() - 25, lineSeparatorYOffset, bounds.centerX() + 25, lineSeparatorYOffset, datePaint);
Bitmap resizedBitmap = Bitmap.createScaledBitmap(Icon, 40, 40, true);
String maxTempString = maxTemp;
String minTempString = minTemp;
Log.v("Logging here", minTemp + " and " + maxTemp);
float maxTempMeasureText = maxTempPaint.measureText(maxTempString);
float maxTempXPosition = centerX - maxTempPaint.measureText(maxTempString) / 2;
float minTempXPosition = maxTempXPosition + maxTempMeasureText + 10;
if (!isInAmbientMode()) {
float iconXPosition = maxTempXPosition - (resizedBitmap.getWidth() + 10);
canvas.drawBitmap(resizedBitmap, iconXPosition, weatherIconYOffset, new Paint());
}
canvas.drawText(maxTempString, maxTempXPosition, weatherYOffset, maxTempPaint);
canvas.drawText(minTempString, minTempXPosition, weatherYOffset, minTempPaint);
}
/**
* Starts the {@link #mUpdateTimeHandler} timer if it should be running and isn't currently
* or stops it if it shouldn't be running but currently is.
*/
private void updateTimer() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
if (shouldTimerBeRunning()) {
mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME);
}
}
/**
* Returns whether the {@link #mUpdateTimeHandler} timer should be running. The timer should
* only run when we're visible and in interactive mode.
*/
private boolean shouldTimerBeRunning() {
return isVisible() && !isInAmbientMode();
}
/**
* Handle updating the time periodically in interactive mode.
*/
private void handleUpdateTimeMessage() {
invalidate();
if (shouldTimerBeRunning()) {
long timeMs = System.currentTimeMillis();
long delayMs = INTERACTIVE_UPDATE_RATE_MS
- (timeMs % INTERACTIVE_UPDATE_RATE_MS);
mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs);
}
}
@Override
public void onConnected(@Nullable Bundle bundle) {
Wearable.DataApi.addListener(googleApiClient, Engine.this);
Log.v("Wearable connection","Connected");
}
@Override
public void onConnectionSuspended(int i) {
Log.v("Wearable connection","Suspended");
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
Log.v("Wearable connection","Failed");
}
@Override
public void onDataChanged(DataEventBuffer dataEventBuffer) {
for (DataEvent dataEvent: dataEventBuffer) {
if(dataEvent.getType() == DataEvent.TYPE_CHANGED) {
DataItem dataItem = dataEvent.getDataItem();
if(dataItem.getUri().getPath().equals(PATH)) {
DataMap dataMap = DataMapItem.fromDataItem(dataItem).getDataMap();
int weatherId = dataMap.getInt(WEATHER_ID);
Icon = BitmapFactory.decodeResource(getResources(), IconHelper.getArtResourceForWeatherCondition(weatherId));
maxTemp = dataMap.getString(MAX_TEMP);
minTemp = dataMap.getString(MIN_TEMP);
Log.v("Received ", maxTemp + " " + minTemp + " " + weatherId);
invalidate();
}
}
}
}
}
}
|
def snapshot(ui, repo, files, node, tmproot):
'''snapshot files as of some revision
if not using snapshot, -I/-X does not work and recursive diff
in tools like kdiff3 and meld displays too many files.'''
# Check if the specified revision node exists in the repository
if node not in repo:
raise ValueError("Invalid revision node specified")
# Create a temporary directory for the snapshot
snapshot_dir = tempfile.mkdtemp(dir=tmproot)
try:
# Iterate through the list of files and copy them to the snapshot directory
for file_path in files:
file_contents = repo[node][file_path].data()
snapshot_file_path = os.path.join(snapshot_dir, file_path)
os.makedirs(os.path.dirname(snapshot_file_path), exist_ok=True)
with open(snapshot_file_path, 'wb') as snapshot_file:
snapshot_file.write(file_contents)
# Return the path to the created snapshot directory
return snapshot_dir
except Exception as e:
# Clean up the temporary directory in case of any errors
shutil.rmtree(snapshot_dir)
raise e
|
#!/bin/bash
# Update the docker image
docker pull mattrayner/meeting-display:latest
# Re-set permissions for backlight
sudo chmod 777 /sys/class/backlight/rpi_backlight/brightness
sudo chmod 777 /sys/class/backlight/rpi_backlight/bl_power
# Launch the backend service
docker-compose -f /home/pi/meeting-display/docker-compose.meeting-display.yml up -d
# Run this script in display 0 - the monitor
export DISPLAY=:0
# Hide the mouse from the display
unclutter &
# If Chrome crashes (usually due to rebooting), clear the crash flag so we don't have the annoying warning bar
sed -i 's/"exited_cleanly":false/"exited_cleanly":true/' /home/pi/.config/chromium/Default/Preferences
sed -i 's/"exit_type":"Crashed"/"exit_type":"Normal"/' /home/pi/.config/chromium/Default/Preferences
# Run Chromium and open tabs
#
# NOTE: --check-fo-update-interval is set as a workaround. The raspbian chromium will often be older than the
# main chromium branch. We update nightly so can we assured we are running the latest available, but sometimes
# a pop up will appear asking you to download a new version.
/usr/bin/chromium-browser --check-for-update-interval=604800 --no-first-run --no-default-browser-check --window-size=800,480 --kiosk --window-position=0,0 http://localhost:4567/index.html &
# Start the kiosk loop. This keystroke changes the Chromium tab
# To have just anti-idle, use this line instead:
# xdotool keydown ctrl; xdotool keyup ctrl;
# Otherwise, the ctrl+Tab is designed to switch tabs in Chrome
# xdotool keydown ctrl+Tab; xdotool keyup ctrl+Tab;
# #
while (true)
do
xdotool keydown ctrl; xdotool keyup ctrl;
sleep 15
done
EOT
|
<filename>controllers/powerpod_controller_test.go
package controllers
import (
"context"
"fmt"
"reflect"
"testing"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/resource"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/kubernetes/scheme"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/client/fake"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
powerv1alpha1 "gitlab.devtools.intel.com/OrchSW/CNO/power-operator.git/api/v1alpha1"
"gitlab.devtools.intel.com/OrchSW/CNO/power-operator.git/pkg/podresourcesclient"
"gitlab.devtools.intel.com/OrchSW/CNO/power-operator.git/pkg/podstate"
grpc "google.golang.org/grpc"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
podresourcesapi "k8s.io/kubelet/pkg/apis/podresources/v1"
)
const (
PowerPodName = "TestPowerPod"
PowerPodNamespace = "default"
)
type fakePodResourcesClient struct {
listResponse *podresourcesapi.ListPodResourcesResponse
}
func (f *fakePodResourcesClient) List(ctx context.Context, in *podresourcesapi.ListPodResourcesRequest, opts ...grpc.CallOption) (*podresourcesapi.ListPodResourcesResponse, error) {
return f.listResponse, nil
}
func createPowerPodReconcilerObject(objs []runtime.Object) (*PowerPodReconciler, error) {
s := scheme.Scheme
if err := powerv1alpha1.AddToScheme(s); err != nil {
return nil, err
}
s.AddKnownTypes(powerv1alpha1.GroupVersion)
cl := fake.NewFakeClient(objs...)
state, err := podstate.NewState()
if err != nil {
return nil, err
}
r := &PowerPodReconciler{Client: cl, Log: ctrl.Log.WithName("controllers").WithName("PowerWorkload"), Scheme: s, State: *state}
return r, nil
}
func createFakePodResourcesListerClient(listResponse *podresourcesapi.ListPodResourcesResponse) *podresourcesclient.PodResourcesClient {
podResourcesListerClient := &fakePodResourcesClient{}
podResourcesListerClient.listResponse = listResponse
return &podresourcesclient.PodResourcesClient{Client: podResourcesListerClient}
}
func TestPodReconcileNewWorkloadCreated(t *testing.T) {
tcases := []struct {
testCase string
pods *corev1.PodList
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
resources map[string]string
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
expectedNumberOfPowerWorkloads int
expectedPowerWorkloadName string
expectedNumberOfPowerWorkloadContainers int
expectedPowerWorkloadContainerCpuIds map[string][]int
expectedPowerWorkloadCpuIds []int
expectedPowerWorkloadPowerProfile string
}{
{
testCase: "Test Case 1",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
resources: map[string]string{
"cpu": "2",
"memory": "200Mi",
"power.intel.com/performance-example-node1": "2",
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
},
},
expectedNumberOfPowerWorkloads: 1,
expectedPowerWorkloadName: "performance-example-node1-workload",
expectedNumberOfPowerWorkloadContainers: 1,
expectedPowerWorkloadContainerCpuIds: map[string][]int{
"example-container-1": []int{1, 2},
},
expectedPowerWorkloadCpuIds: []int{1, 2},
expectedPowerWorkloadPowerProfile: "performance-example-node1",
},
{
testCase: "Test Case 2",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://hijklmnop",
},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 1,
expectedPowerWorkloadName: "performance-example-node1-workload",
expectedNumberOfPowerWorkloadContainers: 2,
expectedPowerWorkloadContainerCpuIds: map[string][]int{
"example-container-1": []int{1, 2},
"example-container-2": []int{3, 4},
},
expectedPowerWorkloadCpuIds: []int{1, 2, 3, 4},
expectedPowerWorkloadPowerProfile: "performance-example-node1",
},
{
testCase: "Test Case 3",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://hijklmnop",
},
},
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod2",
Namespace: PowerPodNamespace,
UID: "efghijk",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-3",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-3",
ContainerID: "docker://defg",
},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
{
Name: "example-pod2",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
"example-pod2": []podresourcesapi.ContainerResources{
{
Name: "example-container-3",
CpuIds: []int64{5, 6},
},
},
},
expectedNumberOfPowerWorkloads: 1,
expectedPowerWorkloadName: "performance-example-node1-workload",
expectedNumberOfPowerWorkloadContainers: 3,
expectedPowerWorkloadContainerCpuIds: map[string][]int{
"example-container-1": []int{1, 2},
"example-container-2": []int{3, 4},
"example-container-3": []int{5, 6},
},
expectedPowerWorkloadCpuIds: []int{1, 2, 3, 4, 5, 6},
expectedPowerWorkloadPowerProfile: "performance-example-node1",
},
{
testCase: "Test Case 4",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://hijklmnop",
},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 1,
expectedPowerWorkloadName: "performance-example-node1-workload",
expectedNumberOfPowerWorkloadContainers: 1,
expectedPowerWorkloadContainerCpuIds: map[string][]int{
"example-container-1": []int{1, 2},
},
expectedPowerWorkloadCpuIds: []int{1, 2},
expectedPowerWorkloadPowerProfile: "performance-example-node1",
},
{
testCase: "Test Case 5",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
resources: map[string]string{
"cpu": "2",
"memory": "200Mi",
"power.intel.com/performance-example-node1": "2",
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
},
},
expectedNumberOfPowerWorkloads: 1,
expectedPowerWorkloadName: "performance-example-node1-workload",
expectedNumberOfPowerWorkloadContainers: 1,
expectedPowerWorkloadContainerCpuIds: map[string][]int{
"example-container-1": []int{1, 2},
},
expectedPowerWorkloadCpuIds: []int{1, 2},
expectedPowerWorkloadPowerProfile: "performance-example-node1",
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
for i := range tc.pods.Items {
objs = append(objs, &tc.pods.Items[i])
}
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
for i := range tc.pods.Items {
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pods.Items[i].Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload list object", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
powerWorkload := &powerv1alpha1.PowerWorkload{}
err = r.Client.Get(context.TODO(), client.ObjectKey{
Name: tc.expectedPowerWorkloadName,
Namespace: PowerPodNamespace,
}, powerWorkload)
if err != nil {
if errors.IsNotFound(err) {
t.Errorf("%s - Failed: Expected PowerWorkload %s to exist", tc.testCase, tc.expectedPowerWorkloadName)
t.Fatal("Unable to retrieve PowerWorkload object, cannot continue")
} else {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload object", tc.testCase))
}
}
if len(powerWorkload.Spec.Node.Containers) != tc.expectedNumberOfPowerWorkloadContainers {
t.Errorf("%s - Failed: Expected number of PowerWorkload Containers to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloadContainers, len(powerWorkload.Spec.Node.Containers))
}
for containerName, cpuIds := range tc.expectedPowerWorkloadContainerCpuIds {
containerFromNodeInfo := powerv1alpha1.Container{}
for _, container := range powerWorkload.Spec.Node.Containers {
if container.Name == containerName {
containerFromNodeInfo = container
}
}
if reflect.DeepEqual(containerFromNodeInfo, powerv1alpha1.Container{}) {
t.Errorf("%s - Failed: Expected Container '%s' to exist", tc.testCase, containerName)
} else {
if !reflect.DeepEqual(containerFromNodeInfo.ExclusiveCPUs, cpuIds) {
t.Errorf("%s - Failed: Expected PowerWorkload Container '%s' CpuIds to be %v, got %v", tc.testCase, containerName, cpuIds, containerFromNodeInfo.ExclusiveCPUs)
}
}
}
if !reflect.DeepEqual(powerWorkload.Spec.Node.CpuIds, tc.expectedPowerWorkloadCpuIds) {
t.Errorf("%s - Failed: Expected PowerWorkload CpuIds to be %v, got %v", tc.testCase, tc.expectedPowerWorkloadCpuIds, powerWorkload.Spec.Node.CpuIds)
}
if powerWorkload.Spec.PowerProfile != tc.expectedPowerWorkloadPowerProfile {
t.Errorf("%s - Failed: Expected PowerWorkload PowerProfile to be %v, got %v", tc.testCase, tc.expectedPowerWorkloadPowerProfile, powerWorkload.Spec.PowerProfile)
}
}
}
func TestPodDeletion(t *testing.T) {
tcases := []struct {
testCase string
pods *corev1.PodList
powerWorkloads *powerv1alpha1.PowerWorkloadList
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
powerWorkloadNames []string
expectedNumberOfPowerWorkloads int
expectedPowerWorkloadToNotBeFound map[string]bool
expectedPowerWorkloadCpuIds map[string][]int
}{
{
testCase: "Test Case 1",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
},
},
powerWorkloads: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1-workload",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-example-node1-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node1",
Containers: []powerv1alpha1.Container{
{
Name: "example-container-1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{1, 2},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
},
CpuIds: []int{1, 2},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3700,
Min: 3400,
Epp: "performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
},
},
powerWorkloadNames: []string{
"performance-example-node1-workload",
},
expectedNumberOfPowerWorkloads: 0,
expectedPowerWorkloadToNotBeFound: map[string]bool{
"performance-example-node1-workload": true,
},
},
{
testCase: "Test Case 2",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://abcdefglasdjkfh",
},
},
},
},
},
},
powerWorkloads: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1-workload",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-example-node1-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node1",
Containers: []powerv1alpha1.Container{
{
Name: "example-container-1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{1, 2},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
{
Name: "example-container-2",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{3, 4},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
},
CpuIds: []int{1, 2, 3, 4},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3700,
Min: 3400,
Epp: "performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
powerWorkloadNames: []string{
"performance-example-node1-workload",
},
expectedNumberOfPowerWorkloads: 0,
expectedPowerWorkloadToNotBeFound: map[string]bool{
"performance-example-node1-workload": true,
},
},
{
testCase: "Test Case 3",
pods: &corev1.PodList{
Items: []corev1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
},
},
powerWorkloads: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1-workload",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-example-node1-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node1",
Containers: []powerv1alpha1.Container{
{
Name: "example-container-1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{1, 2},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
{
Name: "example-container-2",
Id: "abcdefgskdfjlha",
Pod: "example-pod2",
ExclusiveCPUs: []int{3, 4},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
},
CpuIds: []int{1, 2, 3, 4},
},
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3700,
Min: 3400,
Epp: "performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
{
Name: "example-pod2",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
},
"example-pod2": []podresourcesapi.ContainerResources{
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
powerWorkloadNames: []string{
"performance-example-node1-workload",
},
expectedNumberOfPowerWorkloads: 1,
expectedPowerWorkloadCpuIds: map[string][]int{
"performance-example-node1-workload": []int{3, 4},
},
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
for i := range tc.pods.Items {
objs = append(objs, &tc.pods.Items[i])
}
for i := range tc.powerWorkloads.Items {
objs = append(objs, &tc.powerWorkloads.Items[i])
}
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
for i := range tc.pods.Items {
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pods.Items[i].Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
for i := range tc.pods.Items {
now := metav1.Now()
tc.pods.Items[i].DeletionTimestamp = &now
err = r.Client.Update(context.TODO(), &tc.pods.Items[i])
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error updating Pod '%s' DeletionTimestamp", tc.testCase, tc.pods.Items[i].Name))
}
}
for i := range tc.pods.Items {
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pods.Items[i].Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload list object", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
for _, powerWorkloadName := range tc.powerWorkloadNames {
powerWorkload := &powerv1alpha1.PowerWorkload{}
err = r.Client.Get(context.TODO(), client.ObjectKey{
Name: powerWorkloadName,
Namespace: PowerPodNamespace,
}, powerWorkload)
if errors.IsNotFound(err) != tc.expectedPowerWorkloadToNotBeFound[powerWorkloadName] {
t.Errorf("%s - Failed: Expected PowerWorkload '%s' to exist to be %v, got %v", tc.testCase, powerWorkloadName, tc.expectedPowerWorkloadToNotBeFound[powerWorkloadName], errors.IsNotFound(err))
}
if !errors.IsNotFound(err) {
if !reflect.DeepEqual(powerWorkload.Spec.Node.CpuIds, tc.expectedPowerWorkloadCpuIds[powerWorkloadName]) {
t.Errorf("%s - Failed: Expected PowerWorkload '%s' CpuIds to be %v, got %v", tc.testCase, powerWorkloadName, tc.expectedPowerWorkloadCpuIds[powerWorkloadName], powerWorkload.Spec.Node.CpuIds)
}
}
}
}
}
func TestIncorrectNode(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", "incorrect")
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload list", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestKubeSystemNamespace(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: "kube-system",
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", "example-node1")
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: "kube-system",
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload list", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestNoContainersRequestingPowerProfile(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: "kube-system",
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", "example-node1")
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: "kube-system",
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload list", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestMultiplePowerProfilesRequested(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "balance-performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
{
testCase: "Test Case 2",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "balance-performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
if !errors.IsServiceUnavailable(err) {
if err == nil {
t.Errorf("%s - Failed: Expected moreThanOneProfileError to have occured", tc.testCase)
} else {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkload list", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestPodNotInRunningPhase(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "balance-performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
if !errors.IsServiceUnavailable(err) {
if err == nil {
t.Errorf("%s - Failed: Expected podNotRunningErr to have occured", tc.testCase)
} else {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkloads", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestNoUIDForPodFound(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/balance-performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "balance-performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
if !errors.IsServiceUnavailable(err) {
if err == nil {
t.Errorf("%s - Failed: Expected podUIDNotFoundError to have occured", tc.testCase)
} else {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkloads", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestPowerProfileDoesNotExist(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "balance_performance",
},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
if !errors.IsServiceUnavailable(err) {
if err == nil {
t.Errorf("%s - Failed: Expected powerProfileNotFoundError to have occured", tc.testCase)
} else {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkloads", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestCpuPowerProfileResourcesMismatch(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
resources map[string]string
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(1, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("memory"): *resource.NewQuantity(200, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(1, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
QOSClass: corev1.PodQOSGuaranteed,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
resources: map[string]string{
"cpu": "2",
"memory": "200Mi",
"power.intel.com/performance-example-node1": "2",
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
if !errors.IsServiceUnavailable(err) {
if err == nil {
t.Errorf("%s - Failed: Expected resourceRequestsMismatchError to have occured", tc.testCase)
} else {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
}
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkloads", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
func TestNoContainersRequestingExclusiveCPUs(t *testing.T) {
tcases := []struct {
testCase string
pod *corev1.Pod
node *corev1.Node
powerProfiles *powerv1alpha1.PowerProfileList
podResources []podresourcesapi.PodResources
containerResources map[string][]podresourcesapi.ContainerResources
expectedNumberOfPowerWorkloads int
}{
{
testCase: "Test Case 1",
pod: &corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "example-pod",
Namespace: PowerPodNamespace,
UID: "abcdefg",
},
Spec: corev1.PodSpec{
NodeName: "example-node1",
Containers: []corev1.Container{
{
Name: "example-container-1",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
{
Name: "example-container-2",
Resources: corev1.ResourceRequirements{
Limits: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
Requests: map[corev1.ResourceName]resource.Quantity{
corev1.ResourceName("cpu"): *resource.NewQuantity(2, resource.DecimalSI),
corev1.ResourceName("power.intel.com/performance-example-node1"): *resource.NewQuantity(2, resource.DecimalSI),
},
},
},
},
},
Status: corev1.PodStatus{
Phase: corev1.PodRunning,
ContainerStatuses: []corev1.ContainerStatus{
{
Name: "example-container-1",
ContainerID: "docker://abcdefg",
},
{
Name: "example-container-2",
ContainerID: "docker://abcdefg",
},
},
},
},
node: &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
},
},
powerProfiles: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node1",
Namespace: PowerPodNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node1",
Max: 3200,
Min: 2800,
Epp: "balance-performance",
},
},
},
},
podResources: []podresourcesapi.PodResources{
{
Name: "example-pod",
Containers: []*podresourcesapi.ContainerResources{},
},
},
containerResources: map[string][]podresourcesapi.ContainerResources{
"example-pod": []podresourcesapi.ContainerResources{
{
Name: "example-container-1",
CpuIds: []int64{1, 2},
},
{
Name: "example-container-2",
CpuIds: []int64{3, 4},
},
},
},
expectedNumberOfPowerWorkloads: 0,
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.node.Name)
objs := make([]runtime.Object, 0)
objs = append(objs, tc.pod)
objs = append(objs, tc.node)
for i := range tc.powerProfiles.Items {
objs = append(objs, &tc.powerProfiles.Items[i])
}
r, err := createPowerPodReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating reconciler object", tc.testCase))
}
fakePodResources := []*podresourcesapi.PodResources{}
for i := range tc.podResources {
fakeContainers := []*podresourcesapi.ContainerResources{}
for j := range tc.containerResources[tc.podResources[i].Name] {
fakeContainers = append(fakeContainers, &tc.containerResources[tc.podResources[i].Name][j])
}
tc.podResources[i].Containers = fakeContainers
fakePodResources = append(fakePodResources, &tc.podResources[i])
}
fakeListResponse := &podresourcesapi.ListPodResourcesResponse{
PodResources: fakePodResources,
}
podResourcesClient := createFakePodResourcesListerClient(fakeListResponse)
r.PodResourcesClient = *podResourcesClient
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.pod.Name,
Namespace: PowerPodNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
powerWorkloads := &powerv1alpha1.PowerWorkloadList{}
err = r.Client.List(context.TODO(), powerWorkloads)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerWorkloads", tc.testCase))
}
if len(powerWorkloads.Items) != tc.expectedNumberOfPowerWorkloads {
t.Errorf("%s - Failed: Expected number of PowerWorkloads to be %v, got %v", tc.testCase, tc.expectedNumberOfPowerWorkloads, len(powerWorkloads.Items))
}
}
}
|
import { BrowserRouter, Route, Routes } from 'react-router-dom'
import Footer from './components/Footer'
import NavBar from './components/NavBar'
import NewQuotes from './pages/NewQuotes'
import QuotesByTopic from './pages/QuotesByTopic'
import Topics from './pages/Topics'
function AppRoutes() {
return (
<BrowserRouter>
<NavBar />
<Routes>
<Route exact path='/' element={<NewQuotes />} />
<Route path='/topics' element={<Topics />}/>
<Route path='/topics/blessing' element={<QuotesByTopic topic='blessing'/>}/>
<Route path='/topics/commandments' element={<QuotesByTopic topic='commandments'/>}/>
<Route path='/topics/consolation' element={<QuotesByTopic topic='consolation'/>}/>
<Route path='/topics/death' element={<QuotesByTopic topic='death'/>}/>
<Route path='/topics/faith' element={<QuotesByTopic topic='faith'/>}/>
<Route path='/topics/forgiveness' element={<QuotesByTopic topic='forgiveness'/>}/>
<Route path='/topics/friendship' element={<QuotesByTopic topic='friendship'/>}/>
<Route path='/topics/gratitude' element={<QuotesByTopic topic='gratitude'/>}/>
<Route path='/topics/health' element={<QuotesByTopic topic='health'/>}/>
<Route path='/topics/hope' element={<QuotesByTopic topic='hope'/>}/>
<Route path='/topics/judgment' element={<QuotesByTopic topic='judgment'/>}/>
<Route path='/topics/love' element={<QuotesByTopic topic='love'/>}/>
<Route path='/topics/marriage' element={<QuotesByTopic topic='marriage'/>}/>
<Route path='/topics/money' element={<QuotesByTopic topic='money'/>}/>
<Route path='/topics/motivation' element={<QuotesByTopic topic='motivation'/>}/>
<Route path='/topics/prayer' element={<QuotesByTopic topic='prayer'/>}/>
<Route path='/topics/promises' element={<QuotesByTopic topic='promises'/>}/>
<Route path='/topics/purity' element={<QuotesByTopic topic='purity'/>}/>
<Route path='/topics/repentance' element={<QuotesByTopic topic='repentance'/>}/>
<Route path='/topics/salvation' element={<QuotesByTopic topic='salvation'/>}/>
<Route path='/topics/wisdom' element={<QuotesByTopic topic='wisdom'/>}/>
</Routes>
<Footer />
</BrowserRouter>
)
}
export default AppRoutes
|
#!/bin/bash
set -e
apt-get install -y wget unzip libssl-dev
wget https://github.com/AGWA/git-crypt/archive/master.zip && unzip master.zip && cd git-crypt-master && make install
|
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayDeque;
import java.util.Deque;
/**
*
* @author minchoba
* 백준 10826번: 피보나치 수 4
*
* @see https://www.acmicpc.net/problem/10826/
*
*/
public class Boj10826 {
private static final int INF = 19;
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int n = Integer.parseInt(br.readLine());
System.out.println(finbonacci(n)); // 결과 출력
}
private static String finbonacci(int N) {
String[] dp = new String[N + 1];
dp[0] = "0";
if(N > 0) dp[1] = "1";
for(int i = 2; i < N + 1; i++) {
if(dp[i - 1].length() < INF) {
dp[i] = String.valueOf(Long.parseLong(dp[i - 2]) + Long.parseLong(dp[i - 1]));
}
else {
dp[i] = getBigNumSum(dp[i - 2], dp[i - 1]); // Long 범위를 넘어가는 경우
}
}
return dp[N];
}
private static String getBigNumSum(String n1, String n2) {
int leng1 = n1.length();
int leng2 = n2.length();
int leng = 0;
char[] num1 = null;
char[] num2 = null;
if(leng1 > leng2) {
num1 = n1.toCharArray();
leng = leng1;
num2 = stringToCharArr(n2, leng2, leng1);
}
else {
num2 = n2.toCharArray();
leng = leng2;
num1 = stringToCharArr(n1, leng1, leng2);
}
return makeStringNumber(leng, num1, num2);
}
private static char[] stringToCharArr(String A, int leng1, int leng2) {
char[] arr = new char[leng2];
int idx = 0;
for(int i = 0; i < leng2; i++) {
if(i < leng2 - leng1) {
arr[i] = '0';
}
else {
arr[i] = A.charAt(idx++);
}
}
return arr;
}
private static String makeStringNumber(int length, char[] num1, char[] num2) { // 숫자를 문자열로 변환해 저장
Deque<Integer> stack = new ArrayDeque<>();
int carry = 0;
for(int i = length - 1; i >= 0; i--) {
int tmp = (num1[i] - '0') + (num2[i] - '0') + carry;
if(tmp > 9) {
tmp -= 10;
stack.push(tmp);
carry = 1;
}
else {
stack.push(tmp);
carry = 0;
}
}
if(carry == 1) stack.push(1);
StringBuilder sb = new StringBuilder();
while(!stack.isEmpty()) sb.append(stack.pop());
return sb.toString();
}
}
|
package databasex
import (
"database/sql"
"fmt"
"github.com/changebooks/database"
"github.com/changebooks/log"
"time"
)
func (x *Database) Exec(idRegister *log.IdRegister,
driver *database.Driver, query string, args ...interface{}) (result sql.Result, err error) {
tag := "Exec"
start := time.Now()
result, err = database.Exec(driver, query, args...)
done := time.Now()
remark := NewRemark(driver, start, done, query, args...)
if err == nil {
affectedRows, affectedRowsErr := result.RowsAffected()
if affectedRowsErr != nil {
x.logger.E(tag, AffectedRowsFailure, remark, affectedRowsErr, "", idRegister)
}
x.logger.I(tag, fmt.Sprintf("affected's rows: %d", affectedRows), remark, idRegister)
} else {
x.logger.E(tag, Failure, remark, err, "", idRegister)
}
return
}
|
#!/usr/bin/env bash
# Consider running these two commands separately
# Do a reboot before continuing.
# To set environmental variables add to the end of this file.
# nano .zshrc
"""
apt update
apt upgrade -y
apt install zsh
sh -c "$(curl -fsSL https://raw.github.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
# Install some OS dependencies:
sudo apt-get install -y -q build-essential git unzip zip nload tree
sudo apt-get install -y -q python3-pip python3-dev python3-venv
sudo apt-get install -y -q nginx
# for gzip support in uwsgi
sudo apt-get install --no-install-recommends -y -q libpcre3-dev libz-dev
# Stop the hackers
sudo apt install fail2ban -y
ufw allow 22
ufw allow 80
ufw allow 443
ufw enable
# Basic git setup
git config --global credential.helper cache
git config --global credential.helper 'cache --timeout=720000'
# Be sure to put your info here:
git config --global user.email "aambrioso1@gmail.com"
git config --global user.name "aambrioso"
# Web app file structure
mkdir /apps
chmod 777 /apps
mkdir /apps/logs
mkdir /apps/logs/my_blog
mkdir /apps/logs/my_blog/app_log
cd /apps
# Create a virtual env for the app.
cd /apps
python3 -m venv venv
source /apps/venv/bin/activate
pip install --upgrade pip setuptools
pip install --upgrade httpie glances
pip install --upgrade uwsgi
# clone the repo:
cd /apps
git clone https://github.com/aambrioso1/pypi-flask-app
"""
# Setup the web app:
cd cd /apps/my_blog/
pip install -r requirements.txt
# Copy and enable the daemon
cp /apps/my_blog/server/my_blog.service /etc/systemd/system/my_blog.service
systemctl start my_blog
systemctl status my_blog
systemctl enable my_blog
# Setup the public facing server (NGINX)
apt install nginx
# CAREFUL HERE. If you are using default, maybe skip this
rm /etc/nginx/sites-enabled/default
cp /apps/my_blog/server/my_blog.nginx /etc/nginx/sites-enabled/my_blog.nginx
update-rc.d nginx enable
service nginx restart
"""
# Optionally add SSL support via Let's Encrypt:
# https://www.digitalocean.com/community/tutorials/how-to-secure-nginx-with-let-s-encrypt-on-ubuntu-18-04
add-apt-repository ppa:certbot/certbot
apt install python-certbot-nginx
certbot --nginx -d fakepypi.talkpython.com
"""
|
#!/usr/bin/env bash
set -ev
coveralls -r . -b build/ -i keyvi \
--gcov /usr/bin/gcov-4.8 --gcov-options '\-lp' \
-E '.*/keyvi/3rdparty/.*' \
-e pykeyvi \
-E '.*/keyvi/tests/.*' \
-E '.*/keyvi/bin/.*' \
--dump keyvi.cov_report > /dev/null
# workaround for coverage measurement: symlink keyvi
cd pykeyvi/
ln -s ../keyvi keyvi
cd ..
coveralls -r . -b pykeyvi/ -i pykeyvi \
--gcov /usr/bin/gcov-4.8 --gcov-options '\-lp' \
-e pykeyvi/keyvi/3rdparty -e build \
-E '.*/autowrap_includes/autowrap_tools.hpp' \
-E '.*/src/extra/attributes_converter.h' \
-E '.*/pykeyvi.cpp' \
--dump pykeyvi.cov_report_tmp > /dev/null
# workaround: remove 'pykeyvi' from source path before merge
sed s/"pykeyvi\/keyvi"/"keyvi"/g pykeyvi.cov_report_tmp > pykeyvi.cov_report
export COVERALLS_REPO_TOKEN=${COVERALLS_REPO_TOKEN}
coveralls-merge keyvi.cov_report pykeyvi.cov_report
|
# ----- General ---------------------------------------------------------------
# colorize grep and ls
alias grep &> /dev/null || alias grep="grep --color=auto"
alias ls='ls -p --color=auto -w $(($COLUMNS<120?$COLUMNS:120))'
alias l="ls"
# 'r' in zsh is set up to repeat the last command (!!)
alias r="true"
# manipulate files verbosely (print log of what happened)
alias cp="cp -v"
alias mv="mv -v"
alias rm="rm -v"
# so much easier to type than `cd ..`
alias cdd="cd .."
alias cddd="cd ../.."
alias cdddd="cd ../../.."
alias cddddd="cd ../../../.."
alias cdddddd="cd ../../../../.."
alias cddddddd="cd ../../../../../.."
# use popd to navigate directory stack (like "Back" in a browser)
alias b="popd"
alias bex="bundle exec"
# I'm pretty proud of these ones
alias :q="clear"
alias :qall!="clear"
alias :tabe="vim"
alias :Vs="vimv"
alias :vs="vimv"
# Redirect stderr and stdout when using GRC
which grc &> /dev/null && alias grc="grc -es"
# look up LaTeX documentation
which texdef &> /dev/null && alias latexdef="texdef --tex latex"
# Easily download an MP3 from youtube on the command line
alias youtube-mp3="noglob youtube-dl --extract-audio --audio-format mp3"
which doctoc &> /dev/null && alias doctoc='doctoc --title="## Table of Contents"'
# Pretend that tmux is XDG Base Directory conformant
which tmux &> /dev/null && alias tmux='tmux -f "$XDG_CONFIG_HOME"/tmux/tmux.conf'
# Use --no-bold for Solarized colors
alias icdiff="icdiff --no-bold"
# dump syntax highlighted file to screen
alias hicat='highlight -O truecolor --style=solarized-$SOLARIZED'
# Node module for lorem-ipsum
alias words="lorem-ipsum --units words --count"
alias paras="lorem-ipsum --units paragraphs --count"
# Show a summary of my jrnl. 'viewjrnl' is defined in https://github.com/jez/bin
alias jrnlsum="viewjrnl -from '10 days ago'"
# CMake
alias cmg="cmake -S . -B build -G Ninja -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=Debug"
alias cm="cmake --build build"
alias cmr="cmake -S . -B build-release -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo"
alias cmrb="cmake --build build-release"
# ----- aliases that are actually full-blown commands -------------------------
# list disk usage statistics for the current folder
alias duls="du -h -d1 | sort -hr"
# print my IP
alias ip="curl ifconfig.co"
# resolve a symlink in the PWD to a fully qualified directory
alias resolve='cd "`pwd -P`"'
# simple python webserver
alias py2serv="python -m SimpleHTTPServer"
alias py3serv="python3 -m http.server"
alias pyserv="py3serv"
# How much memory is Chrome using right now?
alias chromemem="ps -ev | grep -i chrome | awk '{print \$12}' | awk '{for(i=1;i<=NF;i++)s+=\$i}END{print s}'"
# Re-export SSH_AUTH_SOCK using value from outside tmux
alias reauthsock='eval "export $(tmux showenv SSH_AUTH_SOCK)"'
# Remove garbage files
alias purgeswp="find . -regex '.*.swp$' | xargs rm"
alias purgedrive='find ~/GoogleDrive/ -name Icon
-exec rm -f {} \; -print'
alias purgeicon='find . -name Icon
-exec rm -f {} \; -print'
# I don't care if Homebrew thinks this is bad, it's super convenient
alias sudo-htop='sudo chown root:wheel $(which htop) && sudo chmod u+s $(which htop)'
AG_DARK="ag --hidden --color-path '0;35' --color-match '1;37' --color-line-number '0;34'"
AG_LIGHT="ag --hidden --color-path '0;35' --color-match '1;30' --color-line-number '0;34'"
if [ "$SOLARIZED" = "dark" ]; then
alias ag="$AG_DARK"
elif [ "$SOLARIZED" = "light" ]; then
alias ag="$AG_LIGHT"
else
alias ag="$AG_DARK"
fi
alias agt="ag --ignore='*test*'"
alias ago="ag --nobreak --noheading --nofilename --nogroup --only-matching"
RG_DARK="rg -S --hidden --colors 'match:fg:white' --colors 'match:style:bold' --colors 'line:fg:blue'"
RG_LIGHT="rg -S --hidden --colors 'match:fg:black' --colors 'match:style:bold' --colors 'line:fg:blue'"
if [ "$SOLARIZED" = "dark" ]; then
alias rg="$RG_DARK"
elif [ "$SOLARIZED" = "light" ]; then
alias rg="$RG_LIGHT"
else
alias rg="$RG_DARK"
fi
alias rgt=$'rg --glob=\'!test\''
alias rgo="rg --no-heading --no-filename --no-line-number --only-matching"
alias gg="git grep"
alias payweb-time="overtime show Europe/Berlin Europe/London America/New_York America/Denver America/Los_Angeles"
# Takes output like 'foo.txt:12: ...' (i.e., output from git grep --line)
# and keeps only the foo.txt:12 part
alias fileline="cut -d : -f 1-2"
alias onlyloclines="sed -e '/^ /d; /^$/d; /^Errors:/d'"
alias onlylocs="onlyloclines | fileline"
# Given input like foo.txt:12 on their own lines, +1 / -1 to all the line numbers
alias nextline="awk 'BEGIN { FS = \":\"} {print \$1 \":\" (\$2 + 1)}'"
alias prevline="awk 'BEGIN { FS = \":\"} {print \$1 \":\" (\$2 - 1)}'"
# ----- Git aliases -----------------------------------------------------------
# hub is a command line wrapper for using Git with GitHub
eval "$(hub alias -s 2> /dev/null)"
# We want to use '#' as a markdown character, so let's use '%' for comments
alias hubmdpr="hub -c core.commentChar='%' pull-request"
alias hubmd="hub -c core.commentChar='%'"
alias hubci="hub ci-status --verbose"
alias ptal='hub issue update "$(hub pr show --format="%I")" -a'
alias git-skip-dirty-check="export PROMPT_PURE_SKIP_DIRTY_CHECK='1'"
alias git-check-dirty="unset PROMPT_PURE_SKIP_DIRTY_CHECK"
alias git-personal-ssh="git config core.sshCommand 'ssh -i ~/.ssh/id_rsa -F /dev/null'"
alias gco="git checkout"
alias gob="git checkout -b"
alias goB="git checkout -B"
alias ga="git add"
alias gap="git add --patch"
alias gc="git commit -v"
alias gca="gc -a"
alias gcmd="git -c core.commentChar='%' commit -v --template=$HOME/.util/gitmessage.md"
alias gcm="git commit -m"
alias gcam="git commit -am"
alias gcan="git commit -a --amend --no-edit"
alias gcne="git commit --amend --no-edit"
alias gs="git status"
alias gss="git status --short"
alias gd="git diff"
alias gds="git diff --staged"
alias gdw="git diff --color-words"
alias gdr="git diff-review"
alias gbc="git by-commit"
alias gicd="git icdiff"
alias gf="git fetch"
alias gfp="git fetch --prune"
alias gpf="git pull --ff-only"
alias grbc="git rebase --continue"
alias gri="git rebase -i"
alias grim="git rebase -i master"
alias gr="git review"
alias gro="git reviewone"
alias grf="git reviewf"
alias gitprune='git checkout -q master && git for-each-ref refs/heads/ "--format=%(refname:short)" | while read branch; do mergeBase=$(git merge-base master $branch) && [[ $(git cherry master $(git commit-tree $(git rev-parse $branch\^{tree}) -p $mergeBase -m _)) == "-"* ]] && git branch -D $branch; done'
# resuable format strings
GIT_PRETTY_FORMAT_AUTHOR="--pretty=\"%C(bold green)%h%Creset %C(yellow)%an%Creset%C(auto)%d%Creset %s\""
GIT_PRETTY_FORMAT_ALIGN="--pretty=\"%C(bold green)%h%Creset %C(yellow)%an%Creset %s%C(auto)%d%Creset\""
# only branches with 'jez' in them, including their remote counter parts
# (especially useful when in a repo with lots of other people)
ONLY_JEZ="--branches='*jez*' --remotes='*jez*' master origin/master"
# exclude tags (Sorbet tags are super annoying)
EXCLUDE_TAGS="--decorate-refs-exclude='tags/*'"
# pretty Git log, show authors
alias glat="git log --graph $GIT_PRETTY_FORMAT_AUTHOR"
# pretty Git log, all references, show authors
alias gllat='glat --all'
# pretty Git log, show authors, align messages
alias glalat="git log --graph $GIT_PRETTY_FORMAT_ALIGN"
# pretty Git log, all references, show authors, align messages
alias glalalt="glala --all"
# It doesn't make sense to combine $ONLY_JEZ with --all
alias glajt="glat $ONLY_JEZ"
alias glalajt="glalat $ONLY_JEZ"
# non-tag versions of the above
alias gla="glat $EXCLUDE_TAGS"
alias glla="gllat $EXCLUDE_TAGS"
alias glala="glalat $EXCLUDE_TAGS"
alias glalal="glalalt $EXCLUDE_TAGS"
alias glaj="glajt $EXCLUDE_TAGS"
alias glalaj="glalajt $EXCLUDE_TAGS"
# ----- Docker aliases --------------------------------------------------------
# docker-compose is far too long to type
alias fig="docker-compose"
alias clean-containers='docker rm -v $(docker ps -a -q -f status=exited)'
alias clean-images='docker rmi $(docker images -q -f dangling=true)'
# TODO: alias clean-volumes='...'
# ----- Linux specific --------------------------------------------------------
# it doesn't make sense to repeat this for each specific host;
# it's Linux specific
if [ "$(uname)" = "Linux" ]; then
which tree &> /dev/null && alias tree="tree -C -F --dirsfirst"
else
which tree &> /dev/null && alias tree="tree -F --dirsfirst"
fi
# if tree doesn't exist, the return condition will be false when we exit
true
|
#!/bin/zsh
rm -rf test-results
killall SpartaConnect
set -o pipefail && xcodebuild -workspace SpartaConnect.xcworkspace -scheme SpartaConnect clean build | xcpretty
time while { set -o pipefail && xcodebuild -workspace SpartaConnect.xcworkspace -scheme UITests test -resultBundlePath test-results/ui-tests-$(date +%s) | xcpretty } do;done
|
<reponame>moesoha/frc-commandbased-cpp
#include "OI.h"
OI::OI() {
joystickDriver.reset(new Joystick(0));
}
std::shared_ptr<Joystick> OI::getJoystickDriver(){
return joystickDriver;
}
double OI::getDriverRawAxis(int axis){
return joystickDriver->GetRawAxis(axis);
}
|
import UIKit
class NewExpenseFormViewController: UIViewController {
// MARK: - Properties
let context = (UIApplication.shared.delegate as! AppDelegate).persistentContainer.viewContext
// MARK: - Views
let nameTextField = UITextField()
let costTextField = UITextField()
let categoryTextField = UITextField()
let addExpenseButton = UIButton()
// MARK: - View Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// configure view
configureView()
// configure constraints
configureConstraints()
}
// MARK: - Actions
@objc private func addExpenseTapped() {
guard let name = nameTextField.text,
let costString = costTextField.text,
let cost = Double(costString),
let category = categoryTextField.text
else { return }
// create a new expense object
let expense = Expense(context: context)
expense.name = name
expense.cost = cost
expense.category = category
// save expense to database
(UIApplication.shared.delegate as! AppDelegate).saveContext()
// dismiss view controller
dismiss(animated: true, completion: nil)
}
}
class OverviewViewController: UIViewController {
var expenses: [Expense] = []
// MARK: - Views
let tableView = UITableView()
let totalLabel = UILabel()
// MARK: - View Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// fetch expenses from db
fetchExpenses()
// configure view
configureView()
// configure constraints
configureConstraints()
}
// MARK: - Fetch Data
private func fetchExpenses() {
// create fetch request
let fetchRequest: NSFetchRequest<Expense> = Expense.fetchRequest()
// configure fetch request
fetchRequest.sortDescriptors = [NSSortDescriptor(key: "name", ascending: true)]
// execute fetch request
do {
expenses = try context.fetch(fetchRequest)
} catch let error {
print(error)
}
}
}
|
<reponame>marhas/ios-sdk-examples
//
// ExamplesTableViewController.h
// Examples
//
// Created by <NAME> on 1/26/16.
// Copyright © 2016 Mapbox. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ExamplesTableViewController : UITableViewController
@end
|
total = 0
for i in range(1000):
if i % 3 == 0 or i % 5 == 0:
total += i
print(total)
|
import math
def calculateDistances(points):
distances = []
for point in points:
x, y = point
distance = math.sqrt(x**2 + y**2)
distances.append([round(distance, 15), round(distance, 15)]) # Rounding to 15 decimal places
return distances
# Test the function with the given example
input_points = [[0, 1], [1, 1], [2, 1], [3, 1], [4, 1],
[0, 2], [1, 2], [2, 2], [3, 2], [4, 2],
[0, 3], [1, 3], [2, 3], [3, 3], [4, 3],
[0, 4], [1, 4], [2, 4], [3, 4], [4, 4]]
output_distances = calculateDistances(input_points)
print(output_distances)
|
for seed in {1..2}
do
(sleep 0.3 && nohup xvfb-run -a python ppo_ac_positive_reward.py \
--gym-id MicrortsAttackHRL-v1 \
--total-timesteps 10000000 \
--wandb-project-name action-guidance \
--prod-mode \
--cuda True --shift 2000000 --adaptation 2000000 --end-e 0.5 --positive-likelihood 1.0 \
--capture-video \
--seed $seed
) >& /dev/null &
done
for seed in {1..2}
do
(sleep 0.3 && nohup xvfb-run -a python ppo_ac_positive_reward.py \
--gym-id MicrortsProduceCombatUnitHRL-v1 \
--total-timesteps 10000000 \
--wandb-project-name action-guidance \
--prod-mode \
--cuda True --shift 2000000 --adaptation 2000000 --end-e 0.5 --positive-likelihood 1.0 \
--capture-video \
--seed $seed
) >& /dev/null &
done
for seed in {1..2}
do
(sleep 0.3 && nohup xvfb-run -a python ppo_ac_positive_reward.py \
--gym-id MicrortsRandomEnemyHRL3-v1 \
--total-timesteps 10000000 \
--wandb-project-name action-guidance \
--prod-mode \
--cuda True --shift 2000000 --adaptation 2000000 --end-e 0.5 --positive-likelihood 1.0 \
--capture-video \
--seed $seed
) >& /dev/null &
done
|
import { Router } from "express";
const app = Router();
// Present Value
app.get("/PV", (req, res) => {
var numberCF = parseInt(req.query.numberCF); // Number of Cashflows
// console.log(numberCF);
var A = [];
var interest = parseInt(req.query.interest); // interest
for (let i = 1; i <= numberCF; i++) {
A.push(parseInt(req.query["CF" + i]));
}
var PV = [];
var temp = 0;
for (let x = 0; x < A.length; x++) {
temp = temp + A[x] / ((1 + interest) ^ x);
PV.push(temp);
}
res.send(temp + "");
});
// Future Value/Interest
app.get("/FV", (req, res) => {
if (req.query.m == undefined) {
m = 1;
} else {
m = parseInt(req.query.m);
}
var CF = parseInt(req.query.CF);
var time = parseInt(req.query.time);
var interest = parseInt(req.query.interest); // interest
var m = parseInt(req.query.m); // periods
var FV = CF * ((1 + interest / m) ^ (time * m));
res.send(FV + "");
});
// Continuous Compounding Interest
app.get("/CCI", (req, res) => {
var CF = parseInt(req.query.CF);
var time = parseInt(req.query.time);
var interest = parseInt(req.query.interest); // interest
var m = parseInt(req.query.m); // periods
var FV = CF * (Math.E ^ (interest * time));
res.send(FV + "");
});
export default app;
|
/**
* @name: mutations
* @author: yiyun <<EMAIL>>
* @profile: https://github.com/yiyungent
* @date: 7/4/2020 4:37 PM
* @description:mutations
* @update: 7/4/2020 4:37 PM
*/
import types from "./types";
export default {
// this.$store.commit('方法的名称', '按需传递唯一的参数')
// 获取当前登录账号并保存到 user
[types.GET_USER](state, user) {
state.user = user;
},
// 注销账号
[types.LOGOUT](state) {
state.user = null;
localStorage.removeItem("token");
localStorage.removeItem("token_expire");
}
};
|
import React from 'react';
import useGlobalGame from 'hooks/useGlobalGame';
export default ({ children }) => {
useGlobalGame();
return <>{children}</>;
};
|
'use strict';
const cp = require('child_process');
const electron = require('electron');
const { promisify } = require('util');
const os = require('os');
module.exports = function buildNative() {
const exec = promisify(cp.exec);
return Promise.all([
exec(`${electron} --version`, { encoding: 'utf8' }),
exec(`${electron} --abi`, { encoding: 'utf8' }),
])
.then(([version, abi]) => {
const electronVersion = version.match(/v(\d+\.\d+\.\d+)/)[1];
const electronABI = abi.match(/(\d+)/)[1];
return new Promise((resolve, reject) => {
let cmd = 'npm';
if (os.platform() === 'win32') {
cmd = 'npm.cmd';
}
const build = cp.spawn(cmd, [
'rebuild',
'--build-from-source',
'leveldown',
'--depth=0',
'--runtime=electron',
`--target=${electronVersion}-beta.3`,
'--disturl=https://atom.io/download/atom-shell',
`--abi=${electronABI}`,
]);
build.stdout.pipe(process.stdout);
build.stderr.pipe(process.stderr);
build.on('close', (code) => {
if (code) {
reject(new Error(`Rebuild exited with code ${code}`));
} else {
resolve();
}
});
});
})
.then(() => {
console.log('Done building native modules'); // eslint-disable-line no-console
})
.catch((error) => {
console.log('There was a problem building', error); // eslint-disable-line no-console
});
};
|
import {Customer} from "./customer";
import {Message} from "./message";
export class Complaint {
complaintId: number;
complaintMessages: Array<Message>;
submitDate: string;
considerDate: string;
status: string;
customerResponse: Customer;
productDescription: string;
invoiceNumber: string;
purchaseDate: string;
price: number;
iban: string;
}
|
import numpy as np
import gym
from gym import wrappers
env = gym.make('Connect4-v0')
# Define the reinforcement learning model
model = keras.Sequential()
model.add(Dense(64, input_dim = 7*6, activation='relu'))
model.add(Dense(64, activation='relu'))
model.add(Dense(64, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# Train the model
for i in range(1000):
observation = env.reset()
done = False
while done is False:
env.render()
action = env.action_space.sample()
observation, reward, done,info = env.step(action)
model.fit(observation, reward, epochs=1, verbose=0)
if done:
print("Episode "+str(i)+" Finished after {} timesteps".format(t+1))
break
# Save the trained model
model.save("AI-Agent.h5")
|
package com.saucedemo.appmanager;
import org.openqa.selenium.*;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.FirefoxOptions;
import org.openqa.selenium.opera.OperaDriver;
import org.openqa.selenium.opera.OperaOptions;
import org.openqa.selenium.remote.BrowserType;
import java.util.concurrent.TimeUnit;
import static org.testng.Assert.fail;
public class ApplicationManager {
protected WebDriver wd;
private SessionHelper sessionHelper;
private NavigationHelper navigationHelper;
private OrderHelper orderHelper;
private String baseUrl;
private String browserType = BrowserType.CHROME;
private StringBuffer verificationErrors = new StringBuffer();
public void init() {
switch (browserType) {
case "OPERA":
OperaOptions oo = new OperaOptions();
oo.addArguments("--headless");
wd = new OperaDriver();
break;
case "FIREFOX":
FirefoxOptions fo = new FirefoxOptions();
fo.addArguments("--headless");
wd = new FirefoxDriver();
break;
default:
ChromeOptions co = new ChromeOptions();
co.addArguments("--headless");
wd = new ChromeDriver();
break;
}
baseUrl = "https://www.google.com/";
wd.manage().timeouts().implicitlyWait(1, TimeUnit.SECONDS);
orderHelper = new OrderHelper(wd);
navigationHelper = new NavigationHelper(wd);
sessionHelper = new SessionHelper(wd);
sessionHelper.login("standard_user", "secret_sauce");
}
public void stop() {
wd.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
public OrderHelper getOrderHelper() {
return orderHelper;
}
public NavigationHelper getNavigationHelper() {
return navigationHelper;
}
public SessionHelper getSessionHelper() {
return sessionHelper;
}
}
|
#!/usr/bin/env bash
set -e
set -u
set -o pipefail
SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
SOURCEPATH="${SCRIPTPATH}/../../.lib/conf.sh"
BINARY="${SCRIPTPATH}/../../../bin/pwncat"
# shellcheck disable=SC1090
source "${SOURCEPATH}"
# -------------------------------------------------------------------------------------------------
# GLOBALS
# -------------------------------------------------------------------------------------------------
RHOST="${1:-localhost}"
RPORT="${2:-4444}"
STARTUP_WAIT="${3:-4}"
RUNS="${4:-1}"
PYTHON="python${5:-}"
PYVER="$( "${PYTHON}" -V 2>&1 | head -1 || true )"
# -------------------------------------------------------------------------------------------------
# TEST FUNCTIONS
# -------------------------------------------------------------------------------------------------
print_test_case "${PYVER}"
run_test() {
local srv_opts="${1// / }"
local cli_opts="${2// / }"
local curr_mutation="${3}"
local total_mutation="${4}"
local curr_round="${5}"
local total_round="${6}"
local data=
print_h1 "[ROUND: ${curr_round}/${total_round}] (mutation: ${curr_mutation}/${total_mutation}) Starting Test Round (srv '${srv_opts}' vs cli '${cli_opts}')"
run "sleep 1"
###
### Create data and files
###
data="$(tmp_file)"
printf "abcdefghijklmnopqrstuvwxyz1234567890\\r" > "${data}"
expect="abcdefghijklmnopqrstuvwxyz1234567890\\n"
srv_stdout="$(tmp_file)"
srv_stderr="$(tmp_file)"
cli_stdout="$(tmp_file)"
cli_stderr="$(tmp_file)"
# --------------------------------------------------------------------------------
# START: SERVER
# --------------------------------------------------------------------------------
print_h2 "(1/4) Start: Server"
# Start Server
print_info "Start Server"
# shellcheck disable=SC2086
if ! srv_pid="$( run_bg "" "${PYTHON}" "${BINARY}" ${srv_opts} "${srv_stdout}" "${srv_stderr}" )"; then
printf ""
fi
# Wait until Server is up
run "sleep ${STARTUP_WAIT}"
# [SERVER] Ensure Server is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# START: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(2/4) Start: Client"
# Start Client
print_info "Start Client"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "cat ${data}" "${PYTHON}" "${BINARY}" ${cli_opts} "${cli_stdout}" "${cli_stderr}" )"; then
printf ""
fi
# Wait until Client is up
run "sleep ${STARTUP_WAIT}"
# [CLIENT] Ensure Client is running
test_case_instance_is_running "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server is still is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}"
# [SERVER] Ensure Server still has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}"
# --------------------------------------------------------------------------------
# DATA TRANSFER
# --------------------------------------------------------------------------------
print_h2 "(3/4) Transfer: Client -> Server"
# [CLIENT -> SERVER]
wait_for_data_transferred "" "${expect}" "" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}"
# --------------------------------------------------------------------------------
# TEST: Errors
# --------------------------------------------------------------------------------
print_h2 "(4/4) Test: Errors"
# [SERVER] Ensure Server has has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}"
# [CLIENT] Ensure Client has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli_stdout}" "${cli_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
kill_pid "${cli_pid}"
kill -9 "${srv_pid}" >/dev/null 2>/dev/null || true
}
# -------------------------------------------------------------------------------------------------
# MAIN ENTRYPOINT
# -------------------------------------------------------------------------------------------------
for curr_round in $(seq "${RUNS}"); do
echo
# server opts client opts
run_test "-l ${RPORT} --no-shutdown --crlf lf -vvvv" "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvvv" "1" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vvv " "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvvv" "2" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vv " "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvvv" "3" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -v " "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvvv" "4" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf " "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvvv" "5" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vvvv" "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvv " "6" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vvvv" "${RHOST} ${RPORT} --no-shutdown --crlf lf -vv " "7" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vvvv" "${RHOST} ${RPORT} --no-shutdown --crlf lf -v " "8" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vvvv" "${RHOST} ${RPORT} --no-shutdown --crlf lf " "9" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vvv " "${RHOST} ${RPORT} --no-shutdown --crlf lf -vvv " "10" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -vv " "${RHOST} ${RPORT} --no-shutdown --crlf lf -vv " "11" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf -v " "${RHOST} ${RPORT} --no-shutdown --crlf lf -v " "12" "13" "${curr_round}" "${RUNS}"
#run_test "-l ${RPORT} --no-shutdown --crlf lf " "${RHOST} ${RPORT} --no-shutdown --crlf lf " "13" "13" "${curr_round}" "${RUNS}"
done
|
<filename>postcss.config.js
const purgecss = require('@fullhuman/postcss-purgecss')({
content: ['./src/**/*.html', './src/**/*.jsx'],
// regular expression for css classes: NOTE: Node.js 9.11.2 and above only
defaultExtractor: content => content.match(/[\w-/:]+(?<!:)/g) || [],
})
module.exports = () => ({
plugins: [
require('tailwindcss'),
require('postcss-preset-env'),
...(process.env.NODE_ENV === 'production' ? [purgecss] : []),
],
})
|
#!/bin/sh
##############################################################################
# Script aimed at OCRing a single page of a PDF file or a single image
#
# Copyright (c) 2013-14: fritz-hh from Github (https://github.com/fritz-hh)
# Copyright (c) 2014 Daniel Berthereau (https://github.com/Daniel-KM)
##############################################################################
. "./src/config.sh"
# Initialization of variables passed by arguments
FILE_INPUT="$1" # Image file or PDF file containing the page to be OCRed
PAGE_INFO="$2" # Various characteristics of the page to be OCRed
TOTAL_PAGES="$3" # Total number of page of the PDF file (required for logging)
TMP_FLD="$4" # Folder where the temporary files should be placed
VERBOSITY="$5" # Requested verbosity
LANGUAGE="$6" # Language of the file to be OCRed
KEEP_TMP="$7" # Keep the temporary files after processing (helpful for debugging)
PREPROCESS_DESKEW="$8" # Deskew the page to be OCRed
PREPROCESS_CLEAN="$9" # Clean the page to be OCRed
PREPROCESS_CLEANTOPDF="${10}" # Put the cleaned paged in the OCRed PDF
OVERSAMPLING_DPI="${11}" # Oversampling resolution in dpi
PDF_NOIMG="${12}" # Request to generate also a PDF page containing only the OCRed text but no image (helpful for debugging)
TESS_CFG_FILES="${13}" # Specific configuration files to be used by Tesseract during OCRing
FORCE_OCR="${14}" # Force to OCR, even if the page already contains fonts
SKIP_TEXT="${15}" # Skip OCR on pages that contain fonts and include the page anyway
EXTRACT_HOCR_ONLY="${16}" # Extract content only (hocr file)
##################################
# Detect the characteristics of an image or an embedded image of a PDF file, for
# the page number provided as parameter
#
# Param 1: page number (used when processing a PDF file)
# Param 2: image or PDF page width in pt
# Param 3: image or PDF page height in pt
# Param 4: temporary file path (Path of the file in which the output should be written)
# Output: A file containing the characteristics of the embedded image. File structure:
# DPI=<dpi>
# COLOR_SPACE=<colorspace>
# DEPTH=<colordepth>
# Returns:
# - 0: if no error occurs
# - 1: in case the page already contains fonts (which should be the case for PDF generated from scanned pages)
# - 2: in case the page contains more than one image
##################################
getImgInfo() {
local page widthFile heightFile curImgInfo nbImg curImg propCurImg widthCurImg heightCurImg colorspaceCurImg depthCurImg dpi typeFile
# page number
page="$1"
# width / height of image or PDF page (in pt)
widthFile="$2"
heightFile="$3"
# path of the file in which the output should be written
curImgInfo="$4"
# Image or Page of a PDF?
typeFile="$5"
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Size ${heightFile}x${widthFile} (h*w in pt)"
# If the file is a PDF, the page should be extracted.
if [ "$typeFile" = "Page" ]; then
# check if the page already contains fonts (which should not be the case for PDF based on scanned files
[ `pdffonts -f $page -l $page "${FILE_INPUT}" | wc -l` -gt 2 ] && echo "Page $page: Page already contains font data !!!" && return 1
# extract raw image from pdf file to compute resolution
# unfortunately this image can have another orientation than in the pdf...
# so we will have to extract it again later using pdftoppm
pdfimages -f $page -l $page -j "$FILE_INPUT" "$curOrigImg" 1>&2
# count number of extracted images
nbImg=$((`ls -1 "$curOrigImg"* 2>/dev/null | wc -l`))
if [ "$nbImg" -ne 1 ]; then
[ $VERBOSITY -ge $LOG_WARN ] && echo "Page $page: Expecting exactly 1 image covering the whole page (found $nbImg). Cannot compute dpi value."
return 2
fi
else
# Link image into temp folder.
ln -s "$FILE_INPUT" "$curOrigImg"
fi
# Get characteristics of the extracted image
curImg=`ls -1 "$curOrigImg"* 2>/dev/null`
propCurImg=`identify -format "%[width] %[height] %[colorspace] %[depth] %[resolution.x] %[resolution.y]" "$curImg"`
widthCurImg=`echo "$propCurImg" | cut -f1 -d" "`
heightCurImg=`echo "$propCurImg" | cut -f2 -d" "`
colorspaceCurImg=`echo "$propCurImg" | cut -f3 -d" "`
depthCurImg=`echo "$propCurImg" | cut -f4 -d" "`
dpi=`echo "$propCurImg" | cut -f5 -d" "`
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Size ${heightCurImg}x${widthCurImg} (in pixel)"
# Get resolution (dpi), assuming it is the same for x & y.
if [ -r "$dpi" ] || [ "$typeFile" = "Image" ]; then
# PNG format is by dot per centimeter, other formats dot per inch.
mime=`file --mime-type --brief "${curImg}" | cut --characters=7-9`
if [ "$mime" = "png" ]; then
dpi=`echo "scale=10;($dpi*2.54)+0.5" | bc`
fi
dpi=`echo "scale=0;$dpi/1" | bc`
else
# compute the resolution of the image (making the assumption that x & y resolution are equal)
# and round it to the nearest integer
dpi=`echo "scale=5;sqrt($widthCurImg*72*$heightCurImg*72/$widthFile/$heightFile)+0.5" | bc`
dpi=`echo "scale=0;$dpi/1" | bc`
fi
# save the image characteristics
echo "DPI=$dpi" > "$curImgInfo"
echo "COLOR_SPACE=$colorspaceCurImg" >> "$curImgInfo"
echo "DEPTH=$depthCurImg" >> "$curImgInfo"
return 0
}
# Get the type of file (image or PDF)
typeFile=`file --mime-type --brief "${FILE_INPUT}" | cut -d"/" -f1`
if [ "$typeFile" = "image" ]; then
typeFile="Image"
else
typeFile="Page"
fi
page=`echo $PAGE_INFO | cut -f1 -d" "`
[ $VERBOSITY -ge $LOG_INFO ] && echo "Processing $typeFile $page / $TOTAL_PAGES"
# get width / height of PDF page or image file (in pt)
widthFile=`echo $PAGE_INFO | cut -f2 -d" "`
heightFile=`echo $PAGE_INFO | cut -f3 -d" "`
hocrFile="`echo $PAGE_INFO | cut -f4- -d" "`"
# create the name of the required temporary files
curOrigImg="$TMP_FLD/${page}.orig-img" # original image available in the current PDF page
# (the image file may have a different orientation than in the pdf file)
curHocr="$TMP_FLD/${page}.hocr" # hocr file to be generated by the OCR SW for the current page
curOCRedPDF="$TMP_FLD/${page}.ocred.pdf" # PDF file containing the image + the OCRed text for the current page
curOCRedPDFDebug="$TMP_FLD/${page}.ocred.todebug.pdf" # PDF file containing data required to find out if OCR worked correctly
curImgInfo="$TMP_FLD/${page}.orig-img-info.txt" # Detected characteristics of the embedded image
# Detect the characteristics of the embedded page or the image.
dpi=`echo "scale=0;$DEFAULT_DPI/1" | bc` # default resolution
colorspaceCurImg="sRGB" # default color space
depthCurImg="8" # default color depth
getImgInfo "$page" "$widthFile" "$heightFile" "$curImgInfo" "$typeFile"
ret_code="$?"
[ $VERBOSITY -ge $LOG_DEBUG ] && [ -z "$hocrFile" ] && echo "$typeFile $page: width $widthFile, height $heightFile"
[ $VERBOSITY -ge $LOG_DEBUG ] && [ ! -z "$hocrFile" ] && echo "$typeFile $page: width $widthFile, height $heightFile (hocr file $hocrFile)"
# In case there is an hocr file, do not OCR.
if [ ! -z "$hocrFile" ]; then
[ $VERBOSITY -ge $LOG_WARN ] && echo "$typeFile $page: Using hocr file, assuming a default resolution of $dpi dpi"
# Handle pages that already contain a text layer
elif [ "$ret_code" -eq 1 ] && [ "$SKIP_TEXT" = "1" ]; then
echo "Page $page: Skipping processing because page contains text..."
pdfseparate -f $page -l $page "${FILE_INPUT}" "$curOCRedPDF"
exit 0
# In case the page contains text, do not OCR, unless the FORCE_OCR flag is set.
elif [ "$ret_code" -eq 1 ] && [ "$FORCE_OCR" = "0" ]; then
echo "Page $page: Exiting... (Use the -f option to force OCRing, even though fonts are available in the input file)" && exit $EXIT_BAD_INPUT_FILE
elif [ "$ret_code" -eq 1 ] && [ "$FORCE_OCR" = "1" ]; then
[ $VERBOSITY -ge $LOG_WARN ] && echo "Page $page: OCRing anyway, assuming a default resolution of $dpi dpi"
# in case the page contains more than one image, warn the user but go on with default parameters
elif [ "$ret_code" -eq 2 ]; then
[ $VERBOSITY -ge $LOG_WARN ] && echo "Page $page: Continuing anyway, assuming a default resolution of $dpi dpi"
# Else, this is a normal PDF without any OCR, or a single image file, with or without hocr.
else
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Continuing process with a normal page."
# read the image characteristics from the file
dpi=`cat "$curImgInfo" | grep "^DPI=" | cut -f2 -d"="`
colorspaceCurImg=`cat "$curImgInfo" | grep "^COLOR_SPACE=" | cut -f2 -d"="`
depthCurImg=`cat "$curImgInfo" | grep "^DEPTH=" | cut -f2 -d"="`
fi
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: $dpi dpi, colorspace $colorspaceCurImg, depthCurImg $depthCurImg"
# perform oversampling if the resolution is not sufficient to get good OCR results
if [ "$dpi" -lt "$OVERSAMPLING_DPI" ]; then
[ $VERBOSITY -ge $LOG_WARN ] && echo "$typeFile $page: Low image resolution detected ($dpi dpi). Performing oversampling ($OVERSAMPLING_DPI dpi) to try to get better OCR results."
dpi="$OVERSAMPLING_DPI"
elif [ "$dpi" -lt 200 ]; then
[ $VERBOSITY -ge $LOG_WARN ] && echo "$typeFile $page: Low image resolution detected ($dpi dpi). If needed, please use the \"-o\" to try to get better OCR results."
fi
# Identify if page image should be saved as ppm (color), pgm (gray) or pbm (b&w)
ext="ppm" # by default (color image) the extension of the extracted image is ppm
opt="" # by default (color image) no option as to be passed to pdftoppm
if [ "$colorspaceCurImg" = "Gray" ] && [ "$depthCurImg" = "1" ]; then # if monochrome (b&w)
ext="pbm"
opt="-mono"
elif [ "$colorspaceCurImg" = "Gray" ]; then # if gray
ext="pgm"
opt="-gray"
fi
curImgPixmap="$TMP_FLD/$page.$ext"
curImgPixmapDeskewed="$TMP_FLD/$page.deskewed.$ext"
curImgPixmapClean="$TMP_FLD/$page.cleaned.$ext"
# extract current page as image with correct orientation and resolution
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Extracting image as $ext file (${dpi} dpi)"
if [ "$typeFile" = "Page" ]; then
! pdftoppm -f $page -l $page -r $dpi $opt "$FILE_INPUT" > "$curImgPixmap" \
&& echo "Could not extract $typeFile $page as $ext from \"$FILE_INPUT\". Exiting..." && exit $EXIT_OTHER_ERROR
widthCurImg=$(($dpi*$widthFile/72))
heightCurImg=$(($dpi*$heightFile/72))
else
# Avoid a convert process if possible,
if [ "$PREPROCESS_DESKEW" = "1" ] && [ "$DESKEW_TOOL" != "Leptonica" ]; then
ln -s "$FILE_INPUT" "$curImgPixmap"
else
! convert "$FILE_INPUT" "$curImgPixmap" \
&& echo "Could not extract $typeFile $page as $ext from \"$FILE_INPUT\". Exiting..." && exit $EXIT_OTHER_ERROR
fi
widthCurImg=$widthFile
heightCurImg=$heightFile
fi
# if requested deskew image (without changing its size in pixel)
if [ "$PREPROCESS_DESKEW" = "1" ]; then
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Deskewing image"
if [ "$DESKEW_TOOL" = "Leptonica" ]; then
! python2 $SRC/leptonica.py deskew -r $dpi "$curImgPixmap" "$curImgPixmapDeskewed" && echo "Problem file: $curImgPixmap" && exit $?
else
! convert "$curImgPixmap" -deskew 40% -gravity center -extent ${widthCurImg}x${heightCurImg} "$curImgPixmapDeskewed" \
&& echo "Could not deskew \"$curImgPixmap\". Exiting..." && exit $EXIT_OTHER_ERROR
fi
# Check result of deskew.
if [ ! -s "$curImgPixmapDeskewed" ]; then
echo "Fail when deskew \"$curImgPixmap\" (size: ${widthCurImg}x${heightCurImg}). Exiting..." && exit $EXIT_OTHER_ERROR
fi
else
ln -s `basename "$curImgPixmap"` "$curImgPixmapDeskewed"
fi
# if requested clean image with unpaper to get better OCR results
if [ "$PREPROCESS_CLEAN" = "1" ]; then
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Cleaning image with unpaper"
! unpaper --dpi $dpi --mask-scan-size 100 \
--no-deskew --no-grayfilter --no-blackfilter --no-mask-center --no-border-align \
"$curImgPixmapDeskewed" "$curImgPixmapClean" 1> /dev/null \
&& echo "Could not clean \"$curImgPixmapDeskewed\". Exiting..." && exit $EXIT_OTHER_ERROR
else
ln -s `basename "$curImgPixmapDeskewed"` "$curImgPixmapClean"
fi
# perform OCR if needed (without hocr file).
if [ -z "$hocrFile" ]; then
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Performing OCR"
! tesseract -l "$LANGUAGE" "$curImgPixmapClean" "$curHocr" hocr $TESS_CFG_FILES 1> /dev/null 2> /dev/null \
&& echo "Could not OCR file \"$curImgPixmapClean\". Exiting..." && exit $EXIT_OTHER_ERROR
# Tesseract names the output files differently in some distributions.
if [ -e "$curHocr.html" ]; then
mv "$curHocr.html" "$curHocr"
elif [ -e "$curHocr.hocr" ]; then
mv "$curHocr.hocr" "$curHocr"
elif [ ! -e "$curHocr" ]; then
echo "\"$curHocr[.html|.hocr]\" not found. Exiting..." && exit $EXIT_OTHER_ERROR
fi
else
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Using existing HOCR file"
ln -s "$hocrFile" "$curHocr"
fi
# Keep hocr file as xml files.
if [ "$EXTRACT_HOCR_ONLY" = "1" ]; then
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Saving text into \"$curHocr.xml\""
mv "$curHocr" "$curHocr.xml"
# Keep original path to simplify debugging.
ln -s "$curHocr.xml" "$curHocr"
# embed text and image to new pdf file
else
if [ "$PREPROCESS_CLEANTOPDF" = "1" ]; then
image4finalPDF="$curImgPixmapClean"
else
image4finalPDF="$curImgPixmapDeskewed"
fi
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Embedding text in PDF"
! python2 $SRC/hocrTransform.py -r $dpi -i "$image4finalPDF" "$curHocr" "$curOCRedPDF" \
&& echo "Could not create PDF file from \"$curHocr\". Exiting..." && exit $EXIT_OTHER_ERROR
# if requested generate special debug PDF page with visible OCR text
if [ "$PDF_NOIMG" = "1" ] ; then
[ $VERBOSITY -ge $LOG_DEBUG ] && echo "$typeFile $page: Embedding text in PDF (debug page)"
! python2 $SRC/hocrTransform.py -b -r $dpi "$curHocr" "$curOCRedPDFDebug" \
&& echo "Could not create PDF file from \"$curHocr\". Exiting..." && exit $EXIT_OTHER_ERROR
fi
fi
# delete temporary files created for the current page
# to avoid using to much disk space in case of PDF files having many pages
if [ "$KEEP_TMP" = "0" ]; then
rm -f "$curOrigImg"*
rm -f "$curHocr"
rm -f "$curImgPixmap"
rm -f "$curImgPixmapDeskewed"
rm -f "$curImgPixmapClean"
rm -f "$curImgInfo"
fi
exit 0
|
import React, { useContext } from "react";
import FontAwesome from 'react-fontawesome';
import {
ToolTipForeignObject,
ToolTipButtonGroupDiv,
ToolTipInnerButton,
} from "./tooltip-styles";
import axios from "axios";
import { D3Context } from "../../contexts/D3Context";
import { teamSearchResults } from "../../data/SearchResultData/teamSearchResults";
/**
* A functional component that renders a tooltip at a given x,y position
*/
function Tooltip() {
// Get the d3 state and action dispatcher
const { d3State, dispatch } = useContext(D3Context);
if (d3State.tooltipCoordinates) {
// TODO: could display metadata from the hoveredNode
// console.log("hovered node is ", hoveredNode);
var { xScale, yScale, hovered} = d3State.tooltipCoordinates;
}
/**
* Sets fake results to the "See the team" section
* @param {Object} e
* @param {Object} hoveredNode
*/
const onTooltipClick = (e, hoveredNode) => {
const orgID = hoveredNode.data.org_id;
axios
.get(
`http://127.0.0.1:5000/api/v1/employees/${orgID}?lang=en`
)
.then(({ data })=> {
console.log("response is ", data)
dispatch({
type: "SET_SEE_TEAM_RESULTS",
seeTeamTitle: hoveredNode.data.name,
seeTeamSearchResults: data,
})
})
// dispatch({
// type: "SET_SEE_TEAM_RESULTS",
// seeTeamTitle: hoveredNode.data.name,
// seeTeamSearchResults: teamSearchResults,
// })
}
// The <foreignObject> SVG element includes elements from a different XML namespace. In the context of a browser, it is most likely (X)HTML.
return (
<ToolTipForeignObject
id="TooltipID"
xScale={xScale}
yScale={yScale}
>
<ToolTipInnerButton
onClick={(e) => {
onTooltipClick(e, d3State.tooltipHoveredNode);
}}
>
<FontAwesome
style={{
paddingRight: "5px",
}}
name="fas fa-users" />
See the team
</ToolTipInnerButton>
</ToolTipForeignObject>
)
}
export default Tooltip
|
<!DOCTYPE html>
<html>
<head>
<title>Current Time</title>
</head>
<body>
<h1>The current date and time is:
<script>
document.write(new Date().toLocaleString());
</script>
</h1>
</body>
</html>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.