text stringlengths 1 1.05M |
|---|
<reponame>NosCoreLegend/Launcher
const path = require('path')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
const HtmlWebpackHotPlugin = require('html-webpack-hot-plugin')
let htmlHotPlugin = new HtmlWebpackHotPlugin({ hot: true });
let mode = process.argv[process.argv.indexOf('--mode') + 1];
console.log(`webpack mode is ${process.argv[process.argv.indexOf('--mode') + 1]}`)
if (mode === 'development') {
htmlHotPlugin = new HtmlWebpackHotPlugin({ hot: true });
}
const commonConfig = {
mode: mode,
devtool: mode === 'production' ? "" : "source-map",
node: {
__dirname: false
},
output: {
path: path.resolve(__dirname, 'dist'),
filename: '[name].js'
},
devServer: {
writeToDisk: true,
before(app, server) {
if (mode === 'development') {
htmlHotPlugin.setDevServer(server)
}
}
},
module: {
rules: [
{
test: /\.ts$/,
enforce: 'pre',
loader: 'tslint-loader',
options: {
typeCheck: true,
emitErrors: true
}
},
{
test: /\.tsx?$/,
loader: ['babel-loader', 'ts-loader']
},
{
test: /\.js$/,
enforce: 'pre',
loader: 'standard-loader',
options: {
typeCheck: true,
emitErrors: true
}
},
{
test: /\.jsx?$/,
loader: ['babel-loader']
},
{
test: /\.(png|jpe?g|gif|svg|woff2?|eot|ttf|otf)(\?.*)?$/,
loader: 'url-loader',
},
{
test: /\.css$/,
use: ['style-loader', 'css-loader']
},
{
test: /\.less$/,
use: [
'style-loader',
'@teamsupercell/typings-for-css-modules-loader',
{ loader: 'css-loader', options: { modules:true, sourceMap: true } },
"less-loader"
]
},
]
},
resolve: {
extensions: ['.js', '.ts', '.tsx', '.jsx', '.json', '.gif', '.png', '.jpg', '.jpeg', '.svg', '.less', '.css']
}
}
module.exports = [
Object.assign(
{
target: 'electron-main',
entry: { main: './src/main.ts' },
plugins: [
mode === 'production' ? new CleanWebpackPlugin() : false,
].filter(Boolean)
},
commonConfig),
Object.assign(
{
target: 'electron-renderer',
entry: { gui: './src/gui.tsx' },
plugins: [
new HtmlWebpackPlugin({
hash: true,
filename: 'index.html',
title: 'NosCoreLegend',
}),
mode === 'development' ? htmlHotPlugin : false].filter(Boolean)
},
commonConfig)
]; |
/*
* Copyright (c) 2009, <NAME>, Inc.
* Copyright (c) 2012-2015, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <NAME>, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "sightedturtlesim/TurtleFrame.hpp"
#include "sightedturtlesim/VisionTurtle.hpp"
#include "sightedturtlesim/AbstractImageServer.hpp"
#include <cstdlib>
#include <ctime>
TurtleFrame::TurtleFrame(ros::NodeHandle& nh,
AbstractImageServer* server) : nh_(nh),
freeRobotID(1), imageServer(server), turtlesMutex() {
reset_srv_ = nh_.advertiseService("reset", &TurtleFrame::resetCallback, this);
};
TurtleFrame::~TurtleFrame() {
reset();
};
bool TurtleFrame::deleteTurtle(const std::string& name) {
bool deleted = false;
turtlesMutex.lock();
M_Turtle::iterator itTurtles = turtles_.begin();
M_Turtle::iterator itTurtlesEnd = turtles_.end();
for (; itTurtles != itTurtlesEnd; itTurtles++) {
if (itTurtles->first == name) {
delete itTurtles->second;
turtles_.erase(itTurtles);
deleted = true;
break;
}
}
turtlesMutex.unlock();
return deleted;
};
bool TurtleFrame::hasTurtle(const std::string& name) {
turtlesMutex.lock();
bool result = turtles_.find(name) != turtles_.end();
turtlesMutex.unlock();
return result;
};
std::string TurtleFrame::spawnVisionTurtle(double x, double y, double z, double theta,
double hfovDeg, double aspectRatio,
unsigned int camW, unsigned int camH, double fps, double scale) {
std::string real_name;
do {
real_name = "turtle" + boost::lexical_cast<std::string>(freeRobotID++);
} while (hasTurtle(real_name));
sightedturtlesim::PoseXYZ initPose;
initPose.x = x; initPose.y = y; initPose.z = z; initPose.theta = theta;
sightedturtlesim::TurtleParams params;
params.spatial_scale = scale; params.ctrl_mode = sightedturtlesim::TurtleParams::VELOCITY_CTRL_MODE;
Turtle* t = new VisionTurtle(ros::NodeHandle(real_name), initPose, params,
imageServer, freeRobotID - 1, hfovDeg, aspectRatio, camW, camH, fps);
turtlesMutex.lock();
turtles_[real_name] = t;
turtlesMutex.unlock();
ROS_INFO_STREAM("Spawning turtle [" << real_name << "] at x=[" << x <<
"], y=[" << y << "], z=[" << z << "], theta=[" << theta << "]");
return real_name;
};
void TurtleFrame::reset() {
turtlesMutex.lock();
M_Turtle::iterator itTurtles = turtles_.begin();
M_Turtle::iterator itTurtlesEnd = turtles_.end();
for (; itTurtles != itTurtlesEnd; itTurtles++) {
delete itTurtles->second;
}
turtles_.clear();
turtlesMutex.unlock();
freeRobotID = 1;
};
void TurtleFrame::updateTurtles() {
if (imageServer == NULL) return;
if (last_turtle_update_.isZero()) {
last_turtle_update_ = ros::WallTime::now();
return;
}
ros::WallTime now = ros::WallTime::now();
ros::WallDuration td = now - last_turtle_update_;
last_turtle_update_ = now;
turtlesMutex.lock();
M_Turtle::iterator itTurtles = turtles_.begin();
M_Turtle::iterator itTurtlesEnd = turtles_.end();
for (; itTurtles != itTurtlesEnd; itTurtles++) {
itTurtles->second->update(td.toSec(),
imageServer->width() / imageServer->pixelsPerMeter(),
imageServer->height() / imageServer->pixelsPerMeter());
}
turtlesMutex.unlock();
};
bool TurtleFrame::resetCallback(std_srvs::Empty::Request&,
std_srvs::Empty::Response&) {
ROS_INFO("Resetting turtlesim.");
reset();
return true;
};
|
#SBATCH -t 00:20:00
#SBATCH --nodes=1
#SBATCH --tasks-per-node=1
#SBATCH --cpus-per-task=24
#SBATCH -A p_readex
#SBATCH --mem-per-cpu=2500M
if [ "$PBS_ENVIRONMENT" == "PBS_BATCH" ]; then
export FM_DIR=$PBS_O_WORKDIR
else
export FM_DIR=$(pwd)
fi
source readex_env/set_env_ptf.source
source scripts_$READEX_MACHINE/environment.sh
cd $FM_DIR/OpenFOAM-v1612+/
#edit make rules - wmake/rules/linux64Gcc/c++
export FOAM_CC="scorep --online-access --user --mpp=mpi --thread=none --nomemory --nocompiler g++ -std=c++11 -m64 -I$MERIC_ROOT/include -DUSE_SCOREP"
source $FM_DIR/OpenFOAM-v1612+/etc/bashrc
export WM_NCOMPPROCS=24
export BOOST_ARCH_PATH=$BOOST_ROOT
cd applications/solvers/incompressible/simpleFoam/
wclean
wmake
cd $FM_DIR/../motorBike96ACP/
./Allclean
./Allrun
|
install-docker-compose() {
sudo /bin/bash -c 'curl -L "https://github.com/docker/compose/releases/download/1.24.1/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
'
}
|
import { Component } from '@angular/core';
import { NotificationsService } from 'angular2-notifications';
import { NotificationConfig } from '../../../../config/notification.config';
@Component({
selector: 'notification',
templateUrl: './view/simple-notification.html',
providers: [NotificationsService, NotificationConfig]
})
export class SimpleNotificationComponent {
constructor(private _notificationsService: NotificationsService, public options: NotificationConfig) { }
//initial values
public title: string = 'Title';
public content: string = 'Some Content';
//method to display the alert based on the alert type requested by the user
public showAlert(type: string): void {
switch (type) {
case 'success':
this._notificationsService.success(this.title, this.content, this.options); break;
case 'info':
this._notificationsService.info(this.title, this.content, this.options); break;
case 'alert':
this._notificationsService.alert(this.title, this.content, this.options); break;
case 'error':
this._notificationsService.error(this.title, this.content, this.options); break;
case 'bare':
this._notificationsService.bare(this.title, this.content, this.options); break;
case 'html':
this._notificationsService.html(this.title, this.content, this.options); break;
default: break;
}
}
//method to display the alert
public clearAlert(): void {
this._notificationsService.remove();
}
} |
<reponame>equal-l2/cvpn
package subcmd
import (
"encoding/json"
"errors"
"fmt"
"log"
"os"
"path"
"github.com/Shizuoka-Univ-dev/cvpn/api"
"github.com/Shizuoka-Univ-dev/cvpn/pkg/config"
"github.com/Shizuoka-Univ-dev/cvpn/pkg/util"
"github.com/spf13/cobra"
)
func NewLoginCmd() *cobra.Command {
return &cobra.Command{
Use: "login",
Short: "login to vpn service",
Run: func(cmd *cobra.Command, args []string) {
cmd.SetOut(os.Stderr)
// ログイン処理
var username, password string
//ConfigDirPathを取得
configDir, err := os.UserConfigDir()
if err != nil {
log.Fatal(err)
}
// ConfigFilePath(configFileを書き込むパス)の設定
configFilePath := path.Join(configDir, "cvpn/config.json")
//入力
fmt.Print("username >> ")
fmt.Scan(&username)
fmt.Print("password >> ")
fmt.Scan(&password)
//接続
client := api.NewClient()
// ログイン処理
if err := client.LoadCookiesOrLogin(username, password); err != nil {
fmt.Println("Either the username or password is invalid.")
log.Fatal(err)
}
// 生成確認
if flag, err := util.InputYN("Creating configFile? [Y/n] >> "); err == nil && flag {
if err := os.MkdirAll(path.Dir(configFilePath), 0700); err != nil {
log.Fatal(err)
}
fp, err := os.OpenFile(configFilePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
log.Fatal(err)
}
defer fp.Close()
//JSONデータ
data := config.Config{
Username: username,
Password: password,
}
bytes, _ := json.Marshal(&data)
if _, err = fp.WriteString(string(bytes)); err != nil {
log.Fatal(err)
}
// ファイル生成(更新)ログ
log.Printf("Created configFile into %q.\n", configFilePath)
} else {
// ファイル生成(更新)中止ログ
log.Printf("Not created configFile.\n")
}
},
Args: func(cmd *cobra.Command, args []string) error {
if len(args) > 0 {
return errors.New("too many args")
}
return nil
},
}
}
|
exports.run = (client, message, Discord) => {
message.channel.send("huh?...")
setTimeout(() => {
message.channel.send("IT'S YOU?")
message.channel.send("¿Tú, ahora?")
}, 1000);
setTimeout(() => {
message.channel.send("Leon forgot to tell me you'll arrive... Let me give him a **friendly** call")
message.channel.send("Lionel olvidó decirme que venías... que novedad... Le haré una llamada **amistosa** por este imprevisto")
}, 2000);
setTimeout(() => {
message.channel.send("...")
}, 3000);
setTimeout(() => {
message.channel.send("... huh...")
}, 4000);
setTimeout(() => {
message.channel.send("It's here. Yeah... I was waiting for your call to be ready... you know, use slippers and pijamas all day is comfy but not socially acceptable , you know?")
message.channel.send("Está aquí... si... Esperaba tu llamada para estar lista... usar pantuflas y pijamas todo el día es bastante cómodo, pero no muy aceptado socialmente...")
}, 5000);
setTimeout(() => {
message.channel.send("Leon can't remember your language... galarian is kinda hard to understand to outsiders... He says it was spanish, but since he remembers you liked fish and chips and use a lot of internet slang... so... **english** or **spanish**?")
message.channel.send("Lionel no recuerda tu lengua... No lo malinterpretes. El galarience es un poco difícil de entender para los extranjeros... Él dice que hablabas español, pero también recalcó que amabas el pescado con papas fritas y usar siglas del internet para expresarte... así que... **Inglés** o **español**?")
.then(() => {
message.channel.awaitMessages(response => response.content, {
max: 1,
time: 60000,
errors: ['time'],
})
.then((collected) => {
switch (collected.first().content) {
case "esp":
case "Español":
case "español":
case "spa":
case "Spa":
case "spanish":
case "Spanish":
message.channel.send(`¡Muchas muchas gracias!`);
break;
case "Ing":
case "ing":
case "Eng":
case "eng":
case "inglés":
case "Inglés":
case "Ingles":
message.channel.send(`Thank you so much! `);
break;
default:
message.channel.send(`Restart. Can't read that.`);
}})
.catch(() => {
message.channel.send("Out of time. Try again.");
});
});
}, 7000)
}
/*
var start = new Discord.RichEmbed()
.setColor('#ffa420')
.setTitle('Hey there!')
.setDescription("Heard you wanted to start a new Pokémon Journey... That's soooo cute!")
.setThumbnail('https://i.ibb.co/KDYybj5/soniapokemon.jpg')
.addField('I could be useful In that case...', 'Somehow...', false)
.addField('Greetings, fellow!', "My name is Sonia, and I'm professor Magnolia's assistant. You have the face of someone who isn't from around here...", false)
.addField('So... Tell me about you...', "Where are you from?", false)
.setImage("https://i.ibb.co/j4yp5nW/giphy.gif")
.setFooter("Answer tip: Kanto, Jotho, Hoenn, Sinnoh, Unova, Kalos, Alola or Galar. Or simply wherever you live", 'https://i.ibb.co/KDYybj5/soniapokemon.jpg');
message.channel.send(start);
};
*/ |
package testapp
import (
"testing"
"time"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/store"
sdk "github.com/cosmos/cosmos-sdk/types"
authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper"
paramstypes "github.com/cosmos/cosmos-sdk/x/params/types"
bep3 "github.com/e-money/bep3/module"
bep3types "github.com/e-money/bep3/module/types"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/libs/log"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
dbm "github.com/tendermint/tm-db"
)
func CreateTestComponents(t *testing.T) (
sdk.Context,
codec.JSONMarshaler,
bep3.Keeper,
bep3types.AccountKeeper,
bep3types.BankKeeper,
bep3.AppModule) {
encoding := bep3.MakeProtoEncodingConfig()
db := dbm.NewMemDB()
ms := store.NewCommitMultiStore(db)
keys := sdk.NewKVStoreKeys(bep3.StoreKey, authtypes.StoreKey, banktypes.StoreKey, paramstypes.StoreKey)
for _, k := range keys {
ms.MountStoreWithDB(k, sdk.StoreTypeIAVL, db)
}
tkeys := sdk.NewTransientStoreKeys(paramstypes.TStoreKey)
for _, k := range tkeys {
ms.MountStoreWithDB(k, sdk.StoreTypeTransient, db)
}
err := ms.LoadLatestVersion()
require.NoError(t, err)
ctx := sdk.NewContext(ms, tmproto.Header{
ChainID: "test-chain",
}, true, log.NewNopLogger())
ctx = ctx.WithBlockTime(time.Now())
mAccPerms := map[string][]string{
bep3.ModuleName: {authtypes.Minter, authtypes.Burner},
}
paramsKeeper := paramskeeper.NewKeeper(encoding.Marshaller, encoding.Amino, keys[paramstypes.StoreKey], tkeys[paramstypes.TStoreKey])
var (
authSubspace = paramsKeeper.Subspace(authtypes.ModuleName)
bankSubspace = paramsKeeper.Subspace(banktypes.ModuleName)
bep3Subspace = paramsKeeper.Subspace(bep3.DefaultParamspace)
)
var (
accountKeeper = authkeeper.NewAccountKeeper(encoding.Marshaller, keys[authtypes.StoreKey], authSubspace, authtypes.ProtoBaseAccount, mAccPerms)
bankKeeper = bankkeeper.NewBaseKeeper(encoding.Marshaller, keys[banktypes.ModuleName], accountKeeper, bankSubspace, make(map[string]bool))
bep3Keeper = bep3.NewKeeper(encoding.Marshaller, keys[bep3.StoreKey], bankKeeper, accountKeeper, bep3Subspace, make(map[string]bool))
)
bankKeeper.SetSupply(ctx, banktypes.NewSupply(sdk.NewCoins()))
return ctx,
codec.NewProtoCodec(nil),
bep3Keeper,
accountKeeper,
bankKeeper,
bep3.NewAppModule(bep3Keeper, accountKeeper, bankKeeper)
}
|
<reponame>osteele/skillz<gh_stars>0
import request from 'supertest';
import { app, client, getRealAddress } from './server';
beforeAll(() => client.connect());
afterAll(() => client.end());
describe('app', () => {
test('GET /home', async () => {
await request(app).get('/')
.expect(200);
});
test('POST /person/:id/skill/:id', async () => {
await request(app).post('/person/1/skill/1')
.send({ experience: 3 })
.expect(200)
.expect({ experience: 3 });
});
test.skip('POST /person/:id/skill/:id with invalid id', async () => {
await request(app).post('/person/12341234/skill/1')
.send({ experience: 3 })
.expect(400);
});
test.skip('POST /person/:id/skill/:id with invalid data', async () => {
await request(app).post('/person/1/skill/1')
.send({ invalid_field: 3 })
.expect(400);
});
});
describe(getRealAddress, () => {
test('reads x-real-ip', () => {
expect(getRealAddress({ headers: { 'x-real-ip': '8.8.8.8' } })).toBe('8.8.8.8');
});
test('reads x-forwarded-for', () => {
expect(getRealAddress({ headers: { 'x-forwarded-for': '8.8.8.8' } })).toBe('8.8.8.8');
});
test('returns the last value from a list of IPs', () => {
expect(getRealAddress({ headers: { 'x-forwarded-for': '4.4.4.4, 8.8.8.8' } })).toBe('8.8.8.8');
});
test('return connection address when no headers are present', () => {
expect(getRealAddress({ headers: {}, connection: { remoteAddress: '8.8.8.8' } })).toBe('8.8.8.8');
});
test('prefers header address over the connection address', () => {
expect(getRealAddress({ headers: { 'x-real-ip': '8.8.8.8' }, connection: { remoteAddress: '4.4.4.4' } })).toBe('8.8.8.8');
});
});
|
import AbstractCommand from "./AbstractCommand";
export default class InsertTableCommand
extends AbstractCommand
{
async execute(): Promise<void>
{
// get column count
const columnsInput = await this
.showInputBox("Number of columns", "2");
if (columnsInput === undefined)
{
return;
}
const columns = parseInt(columnsInput) || 0;
if (columns < 1)
{
this.warning(`Invalid number of columns: ${columnsInput}`);
return;
}
// get row count
const rowsInput = await this
.showInputBox("Number of rows", "2");
if (rowsInput === undefined)
{
return;
}
const rows = parseInt(rowsInput) || 0;
if (rows < 1)
{
this.warning(`Invalid number of rows: ${rowsInput}`);
return;
}
// build snippet
const separator =
"+" +
"---+".repeat(columns) +
this.eol();
let snippet = separator;
for (let i = 0; i < rows; i++)
{
snippet += "| ";
if (i === 0)
{
// add anchor to return to after the placeholders have been looped
snippet += "$0";
}
for (let j = 0; j < columns; j++)
{
if (j > 0)
{
snippet += " ";
}
snippet += `\${${i * columns + j + 1}: } |`;
}
snippet += this.eol();
// add separator
snippet += i === 0 ?
separator.replace(/-/g, "=") : // header separator
separator;
}
// determine line
const line = this
.position()
.line;
this.insertSnippet(
snippet,
line);
}
} |
import json
def serialize_dag(dag):
# Convert the DAG dictionary to a JSON serializable format
serializable_format = json.loads(json.dumps(dag))
return serializable_format |
#!/bin/bash
SYS_NAME="$(uname -s)";
SYS_NAME="$(basename $SYS_NAME)";
CC=gcc;
CXX=g++;
CCACHE="$(which ccache)";
CMAKE_OPTIONS="";
CMAKE_CLANG_TIDY="";
CMAKE_CLANG_ANALYZER=0;
CMAKE_CLANG_ANALYZER_PATH="";
BUILD_DIR=$(echo "build_jobs_$SYS_NAME" | tr '[:upper:]' '[:lower:]');
CUSTOM_BUILD_DIR=;
CMAKE_BUILD_TYPE=Debug;
if [ ! -z "$MSYSTEM" ]; then
CHECK_MSYS=$(echo "${MSYSTEM:0:5}" | tr '[:upper:]' '[:lower:]');
else
CHECK_MSYS="";
fi
while getopts "ab:c:d:e:hlr:tus-" OPTION; do
case $OPTION in
a)
echo "Ready to check ccc-analyzer and c++-analyzer, please do not use -c to change the compiler when using clang-analyzer.";
CC=$(which ccc-analyzer);
CXX=$(which c++-analyzer);
if [ 0 -ne $? ]; then
# check mingw path
if [ "mingw" == "$CHECK_MSYS" ]; then
if [ ! -z "$MINGW_MOUNT_POINT" ] && [ -e "$MINGW_MOUNT_POINT/libexec/ccc-analyzer.bat" ] && [ -e "$MINGW_MOUNT_POINT/libexec/ccc-analyzer.bat" ]; then
echo "clang-analyzer found in $MINGW_MOUNT_POINT";
export PATH=$PATH:$MINGW_MOUNT_POINT/libexec ;
CC="$MINGW_MOUNT_POINT/libexec/ccc-analyzer.bat";
CXX="$MINGW_MOUNT_POINT/libexec/ccc-analyzer.bat";
CMAKE_CLANG_ANALYZER_PATH="$MINGW_MOUNT_POINT/libexec";
elif [ ! -z "$MINGW_PREFIX" ] && [ -e "$MINGW_PREFIX/libexec/ccc-analyzer.bat" ] && [ -e "$MINGW_PREFIX/libexec/c++-analyzer.bat" ]; then
echo "clang-analyzer found in $MINGW_PREFIX";
export PATH=$PATH:$MINGW_PREFIX/libexec ;
CC="$MINGW_PREFIX/libexec/ccc-analyzer.bat";
CXX="$MINGW_PREFIX/libexec/ccc-analyzer.bat";
CMAKE_CLANG_ANALYZER_PATH="$MINGW_PREFIX/libexec";
fi
fi
fi
if [ -z "$CC" ] || [ -z "$CXX" ]; then
echo "ccc-analyzer=$CC";
echo "c++-analyzer=$CXX";
echo "clang-analyzer not found, failed.";
exit 1;
fi
echo "ccc-analyzer=$CC";
echo "c++-analyzer=$CXX";
echo "clang-analyzer setup completed.";
CMAKE_CLANG_ANALYZER=1;
BUILD_DIR="${BUILD_DIR}_analyzer";
;;
b)
CMAKE_BUILD_TYPE="$OPTARG";
;;
c)
if [[ $CMAKE_CLANG_ANALYZER -ne 0 ]]; then
CCC_CC="$OPTARG";
CCC_CXX="${CCC_CC/%clang/clang++}";
CCC_CXX="${CCC_CXX/%gcc/g++}";
export CCC_CC;
export CCC_CXX;
else
CC="$OPTARG";
CXX="$(echo "$CC" | sed 's/\(.*\)clang/\1clang++/')";
CXX="$(echo "$CXX" | sed 's/\(.*\)gcc/\1g++/')";
fi
;;
d)
if [ ! -z "$OPTARG" ]; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -DLIBSODIUM_ROOT=$OPTARG";
fi
;;
e)
CCACHE="$OPTARG";
;;
h)
echo "usage: $0 [options] [-- [cmake options...] ]";
echo "options:";
echo "-a using clang-analyzer.";
echo "-b <build type> set build type(Debug, RelWithDebINfo, Release, MinSizeRel).";
echo "-c <compiler> compiler toolchains(gcc, clang or others).";
echo "-d [libsodium root] set root of libsodium.";
echo "-e <ccache path> try to use specify ccache to speed up building.";
echo "-h help message.";
echo "-l enable tools of submodules.";
echo "-t enable clang-tidy.";
echo "-u enable unit test.";
echo "-s enable sample.";
exit 0;
;;
l)
CMAKE_OPTIONS="$CMAKE_OPTIONS -DPROJECT_ENABLE_TOOLS=YES";
;;
r)
CUSTOM_BUILD_DIR="$OPTARG";
;;
t)
CMAKE_CLANG_TIDY="-D -checks=* --";
;;
u)
CMAKE_OPTIONS="$CMAKE_OPTIONS -DPROJECT_ENABLE_UNITTEST=YES -DPROJECT_HIREDIS_HAPP_ENABLE_UNITTEST=YES";
;;
s)
CMAKE_OPTIONS="$CMAKE_OPTIONS -DPROJECT_ENABLE_SAMPLE=YES -DPROJECT_HIREDIS_HAPP_ENABLE_SAMPLE=YES";
;;
-)
break;
;;
?)
echo "unkonw argument detected";
exit 1;
;;
esac
done
shift $(($OPTIND - 1));
SCRIPT_DIR="$(cd $(dirname $0) && pwd)";
if [[ "x$CUSTOM_BUILD_DIR" != "x" ]]; then
BUILD_DIR="$CUSTOM_BUILD_DIR";
fi
mkdir -p "$SCRIPT_DIR/$BUILD_DIR";
cd "$SCRIPT_DIR/$BUILD_DIR";
if [ ! -z "$CCACHE" ] && [ "$CCACHE" != "disable" ] && [ "$CCACHE" != "disabled" ] && [ "$CCACHE" != "no" ] && [ "$CCACHE" != "false" ] && [ -e "$CCACHE" ]; then
#CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_C_COMPILER=$CCACHE -DCMAKE_CXX_COMPILER=$CCACHE -DCMAKE_C_COMPILER_ARG1=$CC -DCMAKE_CXX_COMPILER_ARG1=$CXX";
CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_C_COMPILER_LAUNCHER=$CCACHE -DCMAKE_CXX_COMPILER_LAUNCHER=$CCACHE -DCMAKE_C_COMPILER=$CC -DCMAKE_CXX_COMPILER=$CXX";
else
CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_C_COMPILER=$CC -DCMAKE_CXX_COMPILER=$CXX";
fi
if [ "$CHECK_MSYS" == "mingw" ]; then
cmake .. -G "MSYS Makefiles" -DCMAKE_BUILD_TYPE=$CMAKE_BUILD_TYPE $CMAKE_OPTIONS "$@";
else
cmake .. -DCMAKE_BUILD_TYPE=$CMAKE_BUILD_TYPE $CMAKE_OPTIONS "$@";
fi
if [ 1 -eq $CMAKE_CLANG_ANALYZER ]; then
echo "=========================================================================================================";
CMAKE_CLANG_ANALYZER_OPTIONS="";
if [ -e "$SCRIPT_DIR/.scan-build.enable" ]; then
for OPT in $(cat "$SCRIPT_DIR/.scan-build.enable"); do
CMAKE_CLANG_ANALYZER_OPTIONS="$CMAKE_CLANG_ANALYZER_OPTIONS -enable-checker $OPT";
done
fi
if [ -e "$SCRIPT_DIR/.scan-build.disable" ]; then
for OPT in $(cat "$SCRIPT_DIR/.scan-build.disable"); do
CMAKE_CLANG_ANALYZER_OPTIONS="$CMAKE_CLANG_ANALYZER_OPTIONS -disable-checker $OPT";
done
fi
if [ -z "$CMAKE_CLANG_ANALYZER_PATH" ]; then
echo "cd '$SCRIPT_DIR/$BUILD_DIR' && scan-build -o report --html-title='atframe_utils static analysis' $CMAKE_CLANG_ANALYZER_OPTIONS make -j4";
else
echo "cd '$SCRIPT_DIR/$BUILD_DIR' && env PATH=\"\$PATH:$CMAKE_CLANG_ANALYZER_PATH\" scan-build -o report --html-title='libmt_core static analysis' $CMAKE_CLANG_ANALYZER_OPTIONS make -j4";
fi
echo "Now, you can run those code above to get a static analysis report";
echo "You can get help and binary of clang-analyzer and scan-build at http://clang-analyzer.llvm.org/scan-build.html"
fi
|
<reponame>arkav/open-oryx
package dev.arkav.openoryx.impl;
import dev.arkav.openoryx.game.StatusParser;
import dev.arkav.openoryx.game.appspot.Endpoints;
import dev.arkav.openoryx.game.models.*;
import dev.arkav.openoryx.net.PacketIO;
import dev.arkav.openoryx.net.crypto.RSA;
import dev.arkav.openoryx.net.data.MoveRecord;
import dev.arkav.openoryx.net.data.ObjectData;
import dev.arkav.openoryx.net.data.ObjectStatusData;
import dev.arkav.openoryx.net.data.WorldPosData;
import dev.arkav.openoryx.net.listeners.ListenerStore;
import dev.arkav.openoryx.net.listeners.ListenerType;
import dev.arkav.openoryx.net.packets.PacketType;
import dev.arkav.openoryx.net.packets.c2s.*;
import dev.arkav.openoryx.net.packets.s2c.*;
import dev.arkav.openoryx.util.Http;
import dev.arkav.openoryx.util.XML;
import dev.arkav.openoryx.util.logging.Logger;
import org.w3c.dom.NodeList;
import java.io.IOException;
import java.net.Proxy;
@SuppressWarnings("WeakerAccess")
public class Client {
// Basic information from the constructor
protected Account account;
// client information
protected int objectId;
protected MapInfo mapInfo;
protected WorldPosData pos;
protected ObjectStatus data;
protected Server currentServer;
protected GameState gameState;
protected Proxy proxy;
private long time = System.currentTimeMillis();
protected int getTime() {
return (int)(System.currentTimeMillis() - this.time);
}
// various garbage
protected boolean sentLoad;
// Packet io
protected PacketIO io;
protected ListenerStore ls;
public boolean isConnected() {
return connected;
}
protected boolean connected;
protected boolean destroyed;
public Client(Account account) {
this.account = account;
this.ls = new ListenerStore();
this.destroyed = false;
this.connected = false;
this.loadDefaultHooks();
}
private void loadDefaultHooks() {
this.ls.hook(PacketType.AOE, (AoePacket aoe) -> {
AoeAckPacket ack = new AoeAckPacket();
ack.pos = this.pos;
ack.time = this.getTime();
this.io.send(ack);
});
this.ls.hook(PacketType.CREATESUCCESS, p -> {
CreateSuccessPacket createSuccess = (CreateSuccessPacket)p;
this.objectId = createSuccess.objectId;
this.gameState.characterId = createSuccess.charId;
});
this.ls.hook(PacketType.UPDATE, (UpdatePacket update) -> {
this.io.send(new UpdateAckPacket());
for (ObjectData obj : update.newObjects) {
if (obj.status.objectId == this.objectId) {
this.data = StatusParser.parseObject(obj.status.stats);
this.pos = obj.status.pos.clone();
Logger.log(this.account.getGuid(), "Logged in as: " + this.data.name);
}
}
});
this.ls.hook(PacketType.MAPINFO, (MapInfoPacket mapInfo) -> {
this.mapInfo = new MapInfo(mapInfo.name, mapInfo.height, mapInfo.width);
this.gameState.connectionGuid = mapInfo.connectionGuid;
Logger.log(account.getGuid(), "Connected to " + mapInfo.name);
if(!this.sentLoad) {
if (this.gameState.characterId > 0) {
this.sentLoad = true;
LoadPacket load = new LoadPacket();
load.charId = this.gameState.characterId;
load.isFromArena = false;
load.isChallenger = false;
this.io.send(load);
} else {
CreatePacket createPacket = new CreatePacket();
createPacket.classType = 782; // Wizard
createPacket.skinType = 0;
createPacket.isChallenger = false;
Logger.log(this.account.getGuid(), "Creating new character!");
this.io.send(createPacket);
}
}
});
this.ls.hook(PacketType.FAILURE, (FailurePacket failure) -> {
Logger.log(this.account.getGuid(), "Received failure: " + failure.errorDescription);
this.io.disconnect(true);
});
this.ls.hook(PacketType.GOTO, (GotoPacket to) -> {
GotoAckPacket ack = new GotoAckPacket();
ack.time = this.getTime();
this.io.send(ack);
if(to.objectId == this.objectId) {
this.pos = to.pos.clone();
}
});
this.ls.hook(PacketType.PING, (PingPacket ping) -> {
PongPacket ack = new PongPacket();
ack.serial = ping.serial;
ack.time = this.getTime();
this.io.send(ack);
});
this.ls.hook(PacketType.NEWTICK, (NewTickPacket newTick) -> {
MovePacket ack = new MovePacket();
ack.tickId = newTick.tickId;
ack.time = this.getTime();
ack.worldPos = this.pos.clone();
ack.moveRecords = new MoveRecord[0];
this.io.send(ack);
for (ObjectStatusData status : newTick.statuses) {
if (status.objectId == this.objectId) {
StatusParser.parseObject(status.stats, this.data);
}
}
});
this.ls.hook(ListenerType.DISCONNECT, () -> {
this.connected = false;
Logger.log(this.account.getGuid(), "Disconnected!");
});
this.ls.hook(ListenerType.CONNECT, () -> this.connected = true);
}
public void connect(Server server, GameState gs) {
if (this.destroyed) return;
if (this.io != null) this.io.disconnect(false);
int oldCharId = this.gameState != null ? this.gameState.characterId : 0;
this.gameState = gs;
if (oldCharId > 0) this.gameState.characterId = oldCharId;
this.currentServer = server;
try {
if (this.gameState.characterId < 1 && this.gameState.characterId != -1) { // Get characterId from appspot
String raw = this.loadCharList();
NodeList el = XML.parseText(raw).getDocumentElement().getElementsByTagName("Char");
this.gameState.characterId = el.getLength() > 0 ? Integer.parseInt(el.item(0).getAttributes().getNamedItem("id").getTextContent()) : 0;
}
} catch (Exception e) {
e.printStackTrace();
}
this.time = System.currentTimeMillis();
Logger.log(this.account.getGuid(), "Connecting to: " + server.getName());
HelloPacket hello = HelloPacket.createDefault();
hello.buildVersion = gs.buildVersion;
hello.gameId = gs.gameId;
hello.guid = RSA.encrypt(this.account.getGuid());
hello.password = <PASSWORD>.encrypt(this.account.getPassword());
hello.key = gs.key;
hello.keyTime = gs.keyTime;
hello.gameNet = "rotmg";
hello.playPlatform = "rotmg";
hello.previousConnectionGuid = gs.connectionGuid;
this.sentLoad = false;
try {
this.io = this.proxy == null ? new PacketIO(server, hello, this.ls) : new PacketIO(server, this.proxy, hello, this.ls);
} catch (Exception e) {
e.printStackTrace();
}
}
private String loadCharList() throws IOException {
String endpoint = Endpoints.CHAR_LIST.builder()
.append("guid", this.account.getGuid())
.append("password", <PASSWORD>.<PASSWORD>.<PASSWORD>())
.append("challenger", "false")
.build();
return this.proxy == null ? Http.get(endpoint) : Http.proxiedGet(endpoint, 5000, this.proxy);
}
public void disconnect() {
this.io.disconnect(false);
this.connected = false;
}
public void setProxy(Proxy proxy) {
this.proxy = proxy;
}
public void removeProxy() {
this.proxy = null;
}
public void destroy() {
this.disconnect();
this.destroyed = true;
}
}
|
<gh_stars>10-100
/*eslint-disable*/
/* global Phaser, players
*/
/*eslint-enable*/
var Horizontal = function (game, x, y) {
this.game = game
this.x = x
this.y = y
this.w = 224
this.h = 32
this.dist = 160
this.sprite = null
this.spriteAxis = null
this.isDoor = false
}
Horizontal.prototype = {
create: function () {
var bmd = this.game.make.bitmapData(this.w, this.h)
bmd.fill(0xFF, 0xFF, 0xFF, 1)
var x = this.isDoor ? this.x - this.h/2 : this.x - this.dist
this.sprite = this.game.add.sprite(x, this.y, bmd)
var anchorX = this.isDoor ? 0 : 0.5
this.sprite.anchor.set(anchorX, 0.5)
this.game.physics.arcade.enable(this.sprite)
var axisW= this.isDoor ? this.w + this.dist * 2 - this.h/2 : this.w + this.dist * 2
var bmdAxis = this.game.make.bitmapData(axisW, this.h * 0.7)
bmdAxis.fill(0xFF, 0xFF, 0xFF, 1)
this.spriteAxis = this.game.add.sprite(this.x, this.y, bmdAxis)
this.spriteAxis.alpha = 0.2
this.spriteAxis.anchor.set(anchorX, 0.5)
if (!this.isDoor) {
this.tween = this.game.add.tween(this.sprite.position).to({ x: this.x + this.dist }, 1500, Phaser.Easing.Sinusoidal.InOut, true)
this.tween.yoyo(true)
this.tween.repeat(-1)
}
},
update: function () {
this.game.physics.arcade.overlap(this.sprite, players[0].sprite, players[0].kill, null, players[0])
if (this.isDoor) {
var w = this.sprite.width
var dir = players[0].direction
var inc = 0
if (dir === 1 && w > this.h) inc = -1
else if (dir === -1 && w < this.spriteAxis.width) inc = 1
this.sprite.width += 5 * inc
}
},
setPosition: function (x, y) {
this.y = y
this.x = x
this.sprite.position.set(x, y)
this.spriteAxis.position.set(x, y)
if (this.center) this.center.position.set(x, y)
},
stop: function () {
if (this.tween) this.tween.stop()
this.setPosition(this.x, this.y)
var graphics = this.game.add.graphics(0, 0)
graphics.lineStyle(5)
graphics.lineColor = 0xFFFFFF
graphics.drawRect(this.x - 16, this.y - 16, 32, 32)
graphics.endFill()
this.center = this.game.add.sprite(this.x, this.y, graphics.generateTexture())
this.center.anchor.set(0.5)
graphics.destroy()
},
sendToBack: function () {
this.game.world.sendToBack(this.sprite)
this.game.world.sendToBack(this.spriteAxis)
if (this.center) this.game.world.sendToBack(this.center)
},
destroy: function () {
this.sprite.destroy()
this.spriteAxis.destroy()
if (this.center) this.center.destroy()
},
hide: function () {
this.sprite.visible = false
this.spriteAxis.visible = false
if (this.center) this.center.visible = false
},
show: function () {
this.sprite.visible = true
this.spriteAxis.visible = true
if (this.center) this.center.visible = true
},
setScale: function (scale) {
this.sprite.scale.set(scale)
this.spriteAxis.scale.set(scale)
if (this.center) this.center.scale.set(scale)
},
setAlpha: function (alpha) {
this.sprite.alpha = 0.8 * alpha
this.spriteAxis.alpha = 0.4 * alpha
if (this.circle) this.circle.alpha = 0.4 * alpha
}
} |
<filename>app/static/js/utils/form_utils.js<gh_stars>0
function clearValidityIndicators(element) {
element.removeClass('is-invalid')
element.removeClass('is-valid')
}
function callOnEnter(func) {
return ev => {
let enterPressed = false
// have to check both for portability
if (ev.key !== undefined) {
enterPressed = ev.key === 'Enter';
} else if (ev.keyCode !== undefined) {
enterPressed = ev.keyCode === 13
}
if (enterPressed) {
func()
}
}
}
function validateName(name, nameElemSelector) {
clearValidityIndicators(nameElemSelector)
if (name.length === 0) {
nameElemSelector.addClass('is-invalid')
return false
} else {
nameElemSelector.addClass('is-valid')
return true
}
}
function validatePhoneNumber(phoneNumber, phoneElemSelector) {
const phoneRegex = /^(\+27|27|0)[0-9]{2}( |-)?[0-9]{3}( |-)?[0-9]{4}$/
const matches = phoneNumber.match(phoneRegex)
clearValidityIndicators(phoneElemSelector)
if (matches !== null && matches[0].length === matches.input.length) {
phoneElemSelector.addClass('is-valid')
return true
} else {
phoneElemSelector.addClass('is-invalid')
return false
}
}
|
<filename>Examples/FeatureExtraction/LocalHoughExample.cxx
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Software Guide : BeginCommandLineArgs
// INPUTS: {detected_lines.png}
// OUTPUTS: {detected_local_hough.png}
// 30 10 1 50
// Software Guide : EndCommandLineArgs
// Software Guide : BeginLatex
//
// This example illustrates the use of the \doxygen{otb}{ExtractSegmentsImageFilter}.
//
// The first step required to use this filter is to include its header file.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
#include "otbLocalHoughFilter.h"
#include "otbDrawLineSpatialObjectListFilter.h"
// Software Guide : EndCodeSnippet
#include "otbImage.h"
#include "otbImageFileReader.h"
#include "itkRescaleIntensityImageFilter.h"
#include "otbImageFileWriter.h"
int main(int argc, char * argv[])
{
if (argc != 7)
{
std::cerr << "Usage: " << argv[0] << " inputImageFile ";
std::cerr <<
" outputImageFile LocalHoughRadius LocalHoughOverlap LocalHoughNumberOfLines LocalHoughThreshold"
<< std::endl;
return EXIT_FAILURE;
}
// Software Guide : BeginLatex
//
// Then we must decide what pixel type to use for the image. We
// choose to make all computations with floating point precision
// and rescale the results between 0 and 255 in order to export PNG images.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef float InternalPixelType;
typedef unsigned char OutputPixelType;
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// The images are defined using the pixel type and the dimension.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::Image<InternalPixelType, 2> InternalImageType;
typedef otb::Image<OutputPixelType, 2> OutputImageType;
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// The filter can be instantiated using the image types defined above.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::LocalHoughFilter<InternalImageType> LocalHoughType;
typedef otb::DrawLineSpatialObjectListFilter<InternalImageType,
OutputImageType>
DrawLineListType;
// Software Guide : EndCodeSnippet
typedef itk::RescaleIntensityImageFilter<InternalImageType,
OutputImageType> RescalerType;
RescalerType::Pointer rescaler = RescalerType::New();
rescaler->SetOutputMinimum(itk::NumericTraits<OutputPixelType>::min());
rescaler->SetOutputMaximum(itk::NumericTraits<OutputPixelType>::max());
// Software Guide : BeginLatex
//
// An \doxygen{otb}{ImageFileReader} class is also instantiated in order to read
// image data from a file.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::ImageFileReader<InternalImageType> ReaderType;
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// An \doxygen{otb}{ImageFileWriter} is instantiated in order to write the
// output image to a file.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::ImageFileWriter<OutputImageType> WriterType;
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// Both the filter and the reader are created by invoking their \code{New()}
// methods and assigning the result to SmartPointers.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
ReaderType::Pointer reader = ReaderType::New();
LocalHoughType::Pointer localHough = LocalHoughType::New();
DrawLineListType::Pointer drawLineList = DrawLineListType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// The same is done for the writer.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
WriterType::Pointer writer = WriterType::New();
// Software Guide : EndCodeSnippet
reader->SetFileName(argv[1]);
unsigned int LocalHoughRadiusX((unsigned int) ::atoi(argv[3]));
unsigned int LocalHoughRadiusY((unsigned int) ::atoi(argv[3]));
unsigned int LocalHoughOverlapX((unsigned int) ::atoi(argv[4]));
unsigned int LocalHoughOverlapY((unsigned int) ::atoi(argv[4]));
unsigned int LocalHoughNumberOfLines((unsigned int) ::atoi(argv[5]));
float LocalHoughThreshold((float) ::atoi(argv[6]));
LocalHoughType::SizeType LocalHoughRadius;
LocalHoughRadius[0] = LocalHoughRadiusX;
LocalHoughRadius[1] = LocalHoughRadiusY;
LocalHoughType::SizeType LocalHoughOverlap;
LocalHoughOverlap[0] = LocalHoughOverlapX;
LocalHoughOverlap[1] = LocalHoughOverlapY;
localHough->SetRadius(LocalHoughRadius);
localHough->SetOverlap(LocalHoughOverlap);
localHough->SetNumberOfLines(LocalHoughNumberOfLines);
localHough->SetThreshold(LocalHoughThreshold);
// Software Guide : BeginLatex
//
// The image obtained with the reader is passed as input to the
// \doxygen{otb}{ExtractSegmentsImageFilter}. The pipeline is built as follows.
//
// \index{otb::ExtractSegmentsImageFilter!SetInput()}
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
localHough->SetInput(reader->GetOutput());
drawLineList->SetInput(reader->GetOutput());
drawLineList->SetInputLineSpatialObjectList(localHough->GetOutput());
writer->SetFileName(argv[2]);
writer->SetInput(drawLineList->GetOutput());
writer->Update();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
// Figure~\ref{fig:LOCAL_HOUGH}
// shows the result of applying the \doxygen{otb}{LocalHoughImageFilter}.
// \begin{figure} \center
// \includegraphics[width=0.25\textwidth]{detected_lines.eps}
// \includegraphics[width=0.25\textwidth]{detected_local_hough.eps}
// \itkcaption[Line Correlation Detector Application]{Result of applying
// the \doxygen{otb}{LocalHoughImageFilter}. From left to right :
// original image, extracted segments.} \label{fig:LOCAL_HOUGH} \end{figure}
//
// Software Guide : EndLatex
return EXIT_SUCCESS;
}
|
<table>
<tr>
<th>Name</th>
<th>Email</th>
<th>Phone</th>
</tr>
<tr>
<td contenteditable="true"></td>
<td contenteditable="true"></td>
<td contenteditable="true"></td>
</tr>
</table> |
# Sample usage of the RpiPrimary class
# Create a primary node with id 'P1'
primary_node = RpiPrimary('P1')
# Connect secondary nodes 'S1' and 'S2' to the primary node
primary_node.connect_secondary('S1')
primary_node.connect_secondary('S2')
# Handle incoming data from 'S1' and 'S2'
primary_node.handle_connection('S1', 'Data from S1')
primary_node.handle_connection('S2', 'Data from S2')
# Disconnect 'S1' from the primary node
primary_node.disconnect_secondary('S1')
# Handle incoming data from 'S1' after disconnection
primary_node.handle_connection('S1', 'Data from S1') # This should not produce any output as 'S1' is disconnected |
import argparse
import os
class ArgumentValidator:
def __init__(self):
self.parser = argparse.ArgumentParser(description='Argument Validator')
def parse_arguments(self):
self.parser.add_argument('--config', help='Configuration file path')
self.parser.add_argument('--model', help='Model name')
self.parser.add_argument('--task', help='Task name')
return self.parser.parse_args()
def validate_arguments(self):
self.validate_configuration_path()
self.validate_model()
self.validate_task()
def validate_configuration_path(self):
args = self.parse_arguments()
config_path = args.config
if not config_path or not os.path.isfile(config_path):
raise ValueError("Invalid configuration file path")
def validate_model(self):
args = self.parse_arguments()
model_name = args.model
# Add model validation logic here
if not model_name: # Example: Check if model name is not empty
raise ValueError("Invalid model name")
def validate_task(self):
args = self.parse_arguments()
task_name = args.task
# Add task validation logic here
if not task_name: # Example: Check if task name is not empty
raise ValueError("Invalid task name")
# Instantiate the ArgumentValidator class
validator = ArgumentValidator()
args = validator.parse_arguments()
validator.validate_arguments() |
#!/bin/bash
#SBATCH --mail-user=ar.aamer@gmail.com
#SBATCH --mail-type=BEGIN
#SBATCH --mail-type=END
#SBATCH --mail-type=FAIL
#SBATCH --mail-type=REQUEUE
#SBATCH --mail-type=ALL
#SBATCH --job-name=proto_conv64_birds_5shot
#SBATCH --output=%x-%j.out
#SBATCH --nodes=1
#SBATCH --gres=gpu:1
#SBATCH --ntasks-per-node=32
#SBATCH --mem=127000M
#SBATCH --time=0-12:00
#SBATCH --account=rrg-ebrahimi
nvidia-smi
module load python/3.7
source ~/my_env7/bin/activate
echo "------------------------------------< Data preparation>----------------------------------"
echo "Copying the source code"
date +"%T"
cd $SLURM_TMPDIR
cp -r ~/scratch/LibFewShot .
echo "Copying the datasets"
date +"%T"
cp -r ~/scratch/LibFewShot_Dataset/* .
echo "Extract to dataset folder"
date +"%T"
cd LibFewShot/dataset
#tar -xf $SLURM_TMPDIR/CIFAR100.tar.gz
#tar -xf $SLURM_TMPDIR/CUB_200_2011_FewShot.tar.gz
tar -xf $SLURM_TMPDIR/CUB_birds_2010.tar.gz
#tar -xf $SLURM_TMPDIR/StanfordCar.tar.gz
#tar -xf $SLURM_TMPDIR/StanfordDog.tar.gz
#unzip plant_disease_lfs.zip
#tar -xf $SLURM_TMPDIR/miniImageNet--ravi.tar.gz
#cat $SLURM_TMPDIR/tieredImageNet.tar.gz* | tar -zxf -
echo "----------------------------------< End of data preparation>--------------------------------"
date +"%T"
echo "--------------------------------------------------------------------------------------------"
echo "---------------------------------------<Run the program>------------------------------------"
date +"%T"
cd ..
python run_test_moz.py --data_root ./dataset/CUB_birds_2010
#python run_test.py --data_root ./dataset/CUB_birds_2010 --weight-root ./results/ProtoNet-miniImageNet--ravi-Conv64F-5-5-Nov-10-2021-14-47-48
wait
cd $SLURM_TMPDIR
cp -r $SLURM_TMPDIR/LibFewShot/results/ ~/scratch/LibFewShot/ |
<filename>apps/common/src/test/java/net/community/apps/common/test/gridbag/ModifiedGridLayout.java
/*
*
*/
package net.community.apps.common.test.gridbag;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.Insets;
import net.community.chest.awt.ComponentSizeType;
/**
* <P>Copyright GPLv2</P>
*
* @author <NAME>.
* @since Mar 23, 2009 12:23:14 PM
*/
public class ModifiedGridLayout extends GridLayout {
/**
*
*/
private static final long serialVersionUID = 611339312462648048L;
public ModifiedGridLayout (int rows, int cols, int hgap, int vgap) throws IllegalArgumentException
{
super(rows, cols, hgap, vgap);
}
public ModifiedGridLayout (int rows, int cols) throws IllegalArgumentException
{
this(rows, cols, 0, 0);
}
protected Dimension calculateLayoutSize (final Container parent, final ComponentSizeType szt)
{
final Insets insets=(null == parent) ? null : parent.getInsets();
final int ncomponents=(null == parent) ? 0 : parent.getComponentCount(),
rows=getRows(), cols=getColumns();
int nrows=rows,ncols=cols;
if (nrows > 0)
ncols = (ncomponents + nrows - 1) / nrows;
else
nrows = (ncomponents + ncols - 1) / ncols;
int w=0,h=0;
for (int i = 0; i < ncomponents; i++)
{
final Component comp=parent.getComponent(i);
final Dimension d=((null == szt) || (null == comp)) ? null : szt.getSize(comp);
if (null == d)
continue;
if (w < d.width)
w = d.width;
if (h < d.height)
h = d.height;
}
final int iw=(null == insets) ? 0 : insets.left + insets.right,
ih=(null == insets) ? 0 : insets.top + insets.bottom,
dw=iw + ncols * w + (ncols - 1) * getHgap(),
dh=ih + nrows * h + (nrows - 1) * getVgap();
return new Dimension(dw, dh);
}
/*
* @see java.awt.GridLayout#preferredLayoutSize(java.awt.Container)
*/
@Override
public Dimension preferredLayoutSize (final Container parent)
{
return calculateLayoutSize(parent, ComponentSizeType.PREFERRED);
}
/*
* @see java.awt.GridLayout#minimumLayoutSize(java.awt.Container)
*/
@Override
public Dimension minimumLayoutSize (Container parent)
{
return calculateLayoutSize(parent, ComponentSizeType.MINIMUM);
}
/*
* @see java.awt.GridLayout#layoutContainer(java.awt.Container)
*/
@Override
public void layoutContainer (Container parent)
{
final Insets insets=(null == parent) ? null : parent.getInsets();
final int ncomponents=(null == parent) ? 0 : parent.getComponentCount();
int x=(null == insets) ? 0 : insets.left,
y=(null == insets) ? 0 : insets.top,
nrows=getRows(), ncols=getColumns();
if (nrows > 0)
ncols = (ncomponents + nrows - 1) / nrows;
else
nrows = (ncomponents + ncols - 1) / ncols;
final int iw=(null == insets) ? 0 : (insets.left + insets.right),
ih=(null == insets) ? 0 : (insets.top + insets.bottom),
hg=getHgap(), vg=getVgap();
int w=((null == parent) ? 0 : parent.getWidth()) - iw,
h=((null == parent) ? 0 : parent.getHeight()) - ih;
if (ncols > 0)
w = (w - (ncols - 1) * hg) / ncols;
if (nrows > 0)
h = (h - (nrows - 1) * vg) / nrows;
for (int i = 0; i < ncomponents; ++i)
{
final Component comp=parent.getComponent(i);
final Dimension d=(null == comp) ? null : comp.getPreferredSize();
if (null == d)
continue;
if (ncols > 0)
{
comp.setBounds(x, y, w, d.height);
x += (d.width + hg);
}
else
{
comp.setBounds(x, y, d.width, h);
y += (d.height + vg);
}
}
}
}
|
package weixin.lottery.controller;
import org.apache.log4j.Logger;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.jeecgframework.core.common.controller.BaseController;
import org.jeecgframework.core.common.exception.BusinessException;
import org.jeecgframework.core.common.hibernate.qbc.CriteriaQuery;
import org.jeecgframework.core.common.model.json.AjaxJson;
import org.jeecgframework.core.common.model.json.DataGrid;
import org.jeecgframework.core.constant.Globals;
import org.jeecgframework.core.util.*;
import org.jeecgframework.poi.excel.ExcelExportUtil;
import org.jeecgframework.poi.excel.ExcelImportUtil;
import org.jeecgframework.poi.excel.entity.ExcelTitle;
import org.jeecgframework.poi.excel.entity.ImportParams;
import org.jeecgframework.tag.core.easyui.TagUtil;
import org.jeecgframework.web.system.service.SystemService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
import org.springframework.web.servlet.ModelAndView;
import weixin.lottery.entity.WeixinLotteryEntity;
import weixin.lottery.service.WeixinLotteryServiceI;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.OutputStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* @author onlineGenerator
* @version V1.0
* @Title: Controller
* @Description: 微信活动
* @date 2015-02-05 14:26:01
*/
@Scope("prototype")
@Controller
@RequestMapping("/weixinRedPacketsController")
public class WeixinRedPacketsController extends BaseController {
/**
* Logger for this class
*/
private static final Logger logger = Logger.getLogger(WeixinRedPacketsController.class);
@Autowired
private WeixinLotteryServiceI weixinLotteryService;
@Autowired
private SystemService systemService;
private String message;
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
/**
* 微信活动列表 页面跳转
*
* @return
*/
@RequestMapping(params = "weixinRedPackets")
public ModelAndView weixinLottery(HttpServletRequest request) {
request.setAttribute("lotteryType", request.getParameter("lotteryType"));
return new ModelAndView("weixin/lottery/weixinRedPacketsList");
}
/**
* 微信活动列表 页面跳转
*
* @return
*/
@RequestMapping(params = "weixinLotteryAddress")
public ModelAndView weixinLotteryAddress(HttpServletRequest request) {
request.setAttribute("hdid", request.getParameter("hdid"));
return new ModelAndView("weixin/lottery/weixinRedPacketsAddress");
}
/**
* 活动列表页面
*
* @param weixinLottery
* @param request
* @param response
* @param dataGrid
*/
@RequestMapping(params = "datagrid")
public void datagrid(WeixinLotteryEntity weixinLottery, HttpServletRequest request, HttpServletResponse response, DataGrid dataGrid) {
CriteriaQuery cq = new CriteriaQuery(WeixinLotteryEntity.class, dataGrid);
//商户ID
weixinLottery.setAccountid(ResourceUtil.getWeiXinAccountId());
//查询条件组装器
org.jeecgframework.core.extend.hqlsearch.HqlGenerateUtil.installHql(cq, weixinLottery, request.getParameterMap());
cq.add();
this.weixinLotteryService.getDataGridReturn(cq, true);
TagUtil.datagrid(response, dataGrid);
}
/**
* 删除微信活动
*
* @return
*/
@RequestMapping(params = "doDel")
@ResponseBody
public AjaxJson doDel(WeixinLotteryEntity weixinLottery, HttpServletRequest request) {
AjaxJson j = new AjaxJson();
weixinLottery = systemService.getEntity(WeixinLotteryEntity.class, weixinLottery.getId());
message = "微信活动删除成功";
try {
weixinLotteryService.delWeixinLottery(weixinLottery);
systemService.addLog(message, Globals.Log_Type_DEL, Globals.Log_Leavel_INFO);
} catch (Exception e) {
e.printStackTrace();
message = "微信活动删除失败";
throw new BusinessException(e.getMessage());
}
j.setMsg(message);
return j;
}
/**
* 批量删除微信活动
*
* @return
*/
@RequestMapping(params = "doBatchDel")
@ResponseBody
public AjaxJson doBatchDel(String ids, HttpServletRequest request) {
AjaxJson j = new AjaxJson();
message = "微信活动删除成功";
try {
for (String id : ids.split(",")) {
WeixinLotteryEntity weixinLottery = systemService.getEntity(WeixinLotteryEntity.class,
id
);
weixinLotteryService.delete(weixinLottery);
systemService.addLog(message, Globals.Log_Type_DEL, Globals.Log_Leavel_INFO);
}
} catch (Exception e) {
e.printStackTrace();
message = "微信活动删除失败";
throw new BusinessException(e.getMessage());
}
j.setMsg(message);
return j;
}
/**
* 添加微信活动
*
* @param ids
* @return
*/
@RequestMapping(params = "doAdd")
@ResponseBody
public AjaxJson doAdd(WeixinLotteryEntity weixinLottery, HttpServletRequest request) throws Exception {
AjaxJson j = new AjaxJson();
String start = request.getParameter("starttime");
String endtime = request.getParameter("endtime");
if (start == null || start.equals("")) {
j.setSuccess(false);
j.setMsg("请填写开始时间");
return j;
}
if (endtime == null || endtime.equals("")) {
j.setSuccess(false);
j.setMsg("请填写结束时间");
return j;
}
Date date = new Date(); //定义一个当前的日期
// DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
// String time = format.format(date);
//
// SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); ////定义一个当前的日期,转换为相应的日期
// Date date1 = sdf.parse(time);
//
// if (date.getTime() > weixinLottery.getStarttime().getTime()) {
// j.setSuccess(false);
// j.setMsg("开始时间不能小于当前的时间");
// return j;
// }
Double totalvalue = Double.parseDouble(weixinLottery.getAbledotherprize());
int redpacketstotal = weixinLottery.getLotterynumber();
if (redpacketstotal > totalvalue * 10) {
j.setSuccess(false);
j.setMsg("红包个数不得多于流量的10倍");
return j;
}
if (weixinLottery.getStarttime().getTime() >= weixinLottery.getEndtime().getTime()) {
j.setSuccess(false);
j.setMsg("结束时间必须大于开始时间");
return j;
}
if (date.getTime() > weixinLottery.getEndtime().getTime()) {
j.setMsg("结束时间不能小于当前的时间");
j.setSuccess(false);
return j;
}
message = "微信活动添加成功";
try {
if(date.getTime()>weixinLottery.getStarttime().getTime()){
weixinLottery.setState("1");//设置为活动进行中
}else {
weixinLottery.setState("2");//设置活动为尚未开始。
}
weixinLottery.setLotterynumberday(1);
weixinLotteryService.save(weixinLottery);
systemService.addLog(message, Globals.Log_Type_INSERT, Globals.Log_Leavel_INFO);
} catch (Exception e) {
e.printStackTrace();
message = "微信活动添加失败";
throw new BusinessException(e.getMessage());
}
j.setMsg(message);
return j;
}
/**
* 更新微信活动
*
* @param ids
* @return
*/
@RequestMapping(params = "doUpdate")
@ResponseBody
public AjaxJson doUpdate(WeixinLotteryEntity weixinLottery, HttpServletRequest request) {
AjaxJson j = new AjaxJson();
Double totalvalue = Double.parseDouble(weixinLottery.getAbledotherprize());
int redpacketstotal = weixinLottery.getLotterynumber();
if (redpacketstotal > totalvalue * 10) {
j.setSuccess(false);
j.setMsg("红包个数不得多于流量的10倍");
return j;
}
if (weixinLottery.getStarttime().getTime() >= weixinLottery.getEndtime().getTime()) {
j.setSuccess(false);
j.setMsg("结束时间必须大于开始时间");
return j;
}
message = "微信活动更新成功";
WeixinLotteryEntity t = weixinLotteryService.get(WeixinLotteryEntity.class, weixinLottery.getId());
try {
weixinLottery.setLotterynumberday(1);
MyBeanUtils.copyBeanNotNull2Bean(weixinLottery, t);
weixinLotteryService.saveOrUpdate(t);
systemService.addLog(message, Globals.Log_Type_UPDATE, Globals.Log_Leavel_INFO);
} catch (Exception e) {
e.printStackTrace();
message = "微信活动更新失败";
throw new BusinessException(e.getMessage());
}
j.setMsg(message);
return j;
}
/**
* 微信活动新增页面跳转
*
* @return
*/
@RequestMapping(params = "goAdd")
public ModelAndView goAdd(WeixinLotteryEntity weixinLottery, HttpServletRequest req) {
req.setAttribute("lotteryType", req.getParameter("lotteryType"));
if (StringUtil.isNotEmpty(weixinLottery.getId())) {
weixinLottery = weixinLotteryService.getEntity(WeixinLotteryEntity.class, weixinLottery.getId());
req.setAttribute("weixinLotteryPage", weixinLottery);
}
return new ModelAndView("weixin/lottery/weixinRedPackets-add");
}
/**
* 微信活动编辑页面跳转
*
* @return
*/
@RequestMapping(params = "goUpdate")
public ModelAndView goUpdate(WeixinLotteryEntity weixinLottery, HttpServletRequest req) throws Exception {
req.setAttribute("lotteryType", req.getParameter("lotteryType"));
if (StringUtil.isNotEmpty(weixinLottery.getId())) {
weixinLottery = weixinLotteryService.getEntity(WeixinLotteryEntity.class, weixinLottery.getId());
Date createTime = weixinLottery.getStarttime();
req.setAttribute("createTime", createTime); //活动开始时间
Date endTime = weixinLottery.getEndtime();
req.setAttribute("endTime", endTime); //活动结束时间
Date date = new Date(); //定义一个当前的日期
DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String time = format.format(date);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); ////定义一个当前的日期,转换为相应的日期
Date date1 = sdf.parse(time);
// 分三种情况进行判读
// 活动未开始的时候,如果当前日期小于开始日期,表示尚未开始
if (date1.getTime() < createTime.getTime()) {
req.setAttribute("hd", "-1");
}
// 活动进行中,当前日期介于开始和结束日期之间
if (date1.getTime() > createTime.getTime() && date1.getTime() < endTime.getTime()) {
req.setAttribute("hd", "0");
}
// 活动结束后,当前日期大于结束日期
if (date1.getTime() > endTime.getTime()) {
req.setAttribute("hd", "1");
}
req.setAttribute("weixinLotteryPage", weixinLottery);
}
return new ModelAndView("weixin/lottery/weixinRedPackets-update");
}
}
|
<reponame>xinjiayu/SimServerUnicom
package utils
import (
"github.com/gogf/gf/os/gtime"
"github.com/gogf/gf/util/gconv"
"time"
)
//1G计算单位
const G1 int64 = 1024 * 1024 * 1024
//1MB计算单位
const MB1 int64 = 1024 * 1024
//统一存储流量统计的时候所用的日期相关信息
type FlowUseDate struct {
Year string
LastYear string
Month string
LastMonth string
BeforeLastMonth string
Today string
Yesterday string
BeforeYesterday string
LastMonthDays string
BeforeLastMonthDays string
}
//计算两个时间相差的天数
func TimeSub(t1, t2 time.Time) int {
t1 = time.Date(t1.Year(), t1.Month(), t1.Day(), 0, 0, 0, 0, time.Local)
t2 = time.Date(t2.Year(), t2.Month(), t2.Day(), 0, 0, 0, 0, time.Local)
return int(t1.Sub(t2).Hours() / 24)
}
//转为正数
func Abs(x int64) int64 {
if x < 0 {
return -x
}
return x
}
//转换时间到时间戳格式
func ChangeUnixTime(strTime string) string {
t := gtime.NewFromStr(strTime).Second()
return gconv.String(t)
}
//计算流量统计多处使用的相关日期信息
func GetFlowUseDate() FlowUseDate {
var fud = FlowUseDate{}
fud.Year = gtime.Now().Format("Y") //当前年份
fud.LastYear = gtime.Now().AddDate(-1, 0, 0).Format("Y") //上一年年份
fud.Month = gtime.Now().Format("n") //当前月份
fud.LastMonth = gtime.Now().AddDate(0, -1, 0).Format("n") //上个月
fud.BeforeLastMonth = gtime.Now().AddDate(0, -2, 0).Format("n") //上上个月
fud.Today = gtime.Now().Format("j") //今天
fud.Yesterday = gtime.Now().AddDate(0, 0, -1).Format("j") //昨天
fud.BeforeYesterday = gtime.Now().AddDate(0, 0, -2).Format("j") //前天
fud.LastMonthDays = gtime.Now().AddDate(0, -1, 0).Format("t") //上个月的天数
fud.LastMonthDays = gtime.Now().AddDate(0, -2, 0).Format("t") //上上个月的天数
fud.BeforeLastMonthDays = gtime.Now().AddDate(0, -2, 0).Format("t") //上上个月的天数
return fud
}
|
'use strict';
var chai = require('chai');
var nodemailer = require('../src/nodemailer');
var sinon = require('sinon');
var http = require('http');
var fs = require('fs');
var expect = chai.expect;
var SMTPServer = require('smtp-server').SMTPServer;
var crypto = require('crypto');
chai.config.includeStack = true;
var PORT_NUMBER = 8397;
describe('Nodemailer unit tests', function() {
var nm, transport;
beforeEach(function() {
transport = {
name: 'testsend',
version: '1',
send: function(data, callback) {
callback();
}
};
nm = nodemailer.createTransport(transport);
});
it('should create Nodemailer transport object', function() {
expect(nm).to.exist;
});
describe('Hooking plugins', function() {
it('should add a plugin to queue', function() {
nm.use('compile', 'abc');
nm.use('compile', 'def');
expect(nm._plugins).to.deep.equal({
compile: [
'abc',
'def'
],
stream: []
});
});
it('should process compile and stream plugins', function(done) {
var compilePlugin = sinon.stub().yields(null);
var streamPlugin = sinon.stub().yields(null);
nm.use('compile', compilePlugin);
nm.use('compile', streamPlugin);
nm.sendMail({
subject: 'test'
}, function() {
expect(compilePlugin.callCount).to.equal(1);
expect(compilePlugin.args[0][0].data.subject).to.equal('test');
expect(compilePlugin.args[0][0].message).to.exist;
expect(streamPlugin.callCount).to.equal(1);
expect(streamPlugin.args[0][0].data.subject).to.equal('test');
expect(streamPlugin.args[0][0].message).to.exist;
done();
});
});
});
describe('#sendMail', function() {
it('should process sendMail', function(done) {
sinon.stub(transport, 'send').yields(null, 'tere tere');
nm.sendMail({
subject: 'test'
}, function(err, info) {
expect(transport.send.callCount).to.equal(1);
expect(info).to.equal('tere tere');
transport.send.restore();
done();
});
});
it('should return transport error', function(done) {
sinon.stub(transport, 'send').yields('tere tere');
nm.sendMail({
subject: 'test'
}, function(err) {
expect(transport.send.callCount).to.equal(1);
expect(err).to.equal('tere tere');
transport.send.restore();
done();
});
});
it('should override xMailer', function(done) {
sinon.stub(transport, 'send', function(mail, callback) {
expect(mail.message.getHeader('x-mailer')).to.equal('yyyy');
callback();
});
nm.sendMail({
subject: 'test',
xMailer: 'yyyy'
}, function() {
expect(transport.send.callCount).to.equal(1);
transport.send.restore();
done();
});
});
it('return invalid configuration error', function(done) {
nm = nodemailer.createTransport('SMTP', {});
nm.sendMail({
subject: 'test',
xMailer: 'yyyy'
}, function(err) {
expect(err).to.exist;
done();
});
});
});
describe('Resolver tests', function() {
var port = 10337;
var server;
beforeEach(function(done) {
server = http.createServer(function(req, res) {
res.writeHead(200, {
'Content-Type': 'text/plain'
});
res.end('<p>Tere, tere</p><p>vana kere!</p>\n');
});
server.listen(port, done);
});
afterEach(function(done) {
server.close(done);
});
it('should set text from html string', function(done) {
var mail = {
data: {
html: '<p>Tere, tere</p><p>vana kere!</p>\n'
}
};
nm.resolveContent(mail.data, 'html', function(err, value) {
expect(err).to.not.exist;
expect(value).to.equal('<p>Tere, tere</p><p>vana kere!</p>\n');
done();
});
});
it('should set text from html buffer', function(done) {
var mail = {
data: {
html: new Buffer('<p>Tere, tere</p><p>vana kere!</p>\n')
}
};
nm.resolveContent(mail.data, 'html', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(mail.data.html);
done();
});
});
it('should set text from a html file', function(done) {
var mail = {
data: {
html: {
path: __dirname + '/fixtures/message.html'
}
}
};
nm.resolveContent(mail.data, 'html', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer('<p>Tere, tere</p><p>vana kere!</p>\n'));
done();
});
});
it('should set text from an html url', function(done) {
var mail = {
data: {
html: {
path: 'http://localhost:' + port + '/message.html'
}
}
};
nm.resolveContent(mail.data, 'html', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer('<p>Tere, tere</p><p>vana kere!</p>\n'));
done();
});
});
it('should set text from a html stream', function(done) {
var mail = {
data: {
html: fs.createReadStream(__dirname + '/fixtures/message.html')
}
};
nm.resolveContent(mail.data, 'html', function(err, value) {
expect(err).to.not.exist;
expect(mail).to.deep.equal({
data: {
html: new Buffer('<p>Tere, tere</p><p>vana kere!</p>\n')
}
});
expect(value).to.deep.equal(new Buffer('<p>Tere, tere</p><p>vana kere!</p>\n'));
done();
});
});
it('should return an error', function(done) {
var mail = {
data: {
html: {
path: 'http://localhost:' + (port + 1000) + '/message.html'
}
}
};
nm.resolveContent(mail.data, 'html', function(err) {
expect(err).to.exist;
done();
});
});
it('should return encoded string as buffer', function(done) {
var str = '<p>Tere, tere</p><p>vana kere!</p>\n';
var mail = {
data: {
html: {
encoding: 'base64',
content: new Buffer(str).toString('base64')
}
}
};
nm.resolveContent(mail.data, 'html', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer(str));
done();
});
});
describe('data uri tests', function() {
it('should resolve with mime type and base64', function(done) {
var mail = {
data: {
attachment: {
path: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
}
}
};
nm.resolveContent(mail.data, 'attachment', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer('iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==', 'base64'));
done();
});
});
it('should resolve with mime type and plaintext', function(done) {
var mail = {
data: {
attachment: {
path: 'data:image/png,tere%20tere'
}
}
};
nm.resolveContent(mail.data, 'attachment', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer('tere tere'));
done();
});
});
it('should resolve with plaintext', function(done) {
var mail = {
data: {
attachment: {
path: 'data:,tere%20tere'
}
}
};
nm.resolveContent(mail.data, 'attachment', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer('tere tere'));
done();
});
});
it('should resolve with mime type, charset and base64', function(done) {
var mail = {
data: {
attachment: {
path: 'data:image/png;charset=iso-8859-1;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
}
}
};
nm.resolveContent(mail.data, 'attachment', function(err, value) {
expect(err).to.not.exist;
expect(value).to.deep.equal(new Buffer('iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==', 'base64'));
done();
});
});
});
});
});
describe('Nodemailer integration tests', function() {
this.timeout(10000);
var server;
beforeEach(function(done) {
server = new SMTPServer({
authMethods: ['PLAIN', 'XOAUTH2'],
disabledCommands: ['STARTTLS'],
onData: function(stream, session, callback) {
var hash = crypto.createHash('md5');
stream.on('data', function(chunk) {
hash.update(chunk);
});
stream.on('end', function() {
callback(null, hash.digest('hex'));
});
},
onAuth: function(auth, session, callback) {
if (auth.method !== 'XOAUTH2') {
if (auth.username !== 'testuser' || auth.password !== '<PASSWORD>') {
return callback(new Error('Invalid username or password'));
}
} else {
if (auth.username !== 'testuser' || auth.accessToken !== 'testtoken') {
return callback(null, {
data: {
status: '401',
schemes: 'bearer mac',
scope: 'my_smtp_access_scope_name'
}
});
}
}
callback(null, {
user: 123
});
},
onMailFrom: function(address, session, callback) {
if (!/@valid.sender/.test(address.address)) {
return callback(new Error('Only <EMAIL> is allowed to send mail'));
}
return callback(); // Accept the address
},
onRcptTo: function(address, session, callback) {
if (!/@valid.recipient/.test(address.address)) {
return callback(new Error('Only <EMAIL> is allowed to receive mail'));
}
return callback(); // Accept the address
},
logger: false
});
server.listen(PORT_NUMBER, done);
});
afterEach(function(done) {
server.close(done);
});
it('should log in and send mail', function(done) {
var nm = nodemailer.createTransport({
host: 'localhost',
port: PORT_NUMBER,
auth: {
user: 'testuser',
pass: '<PASSWORD>'
},
ignoreTLS: true
});
var mailData = {
from: '<EMAIL>',
sender: '<EMAIL>',
to: ['<EMAIL>', '<EMAIL>', '<EMAIL>'],
subject: 'test',
date: new Date('Mon, 31 Jan 2011 23:01:00 +0000'),
messageId: 'abc@def',
xMailer: 'aaa',
text: 'uuu'
};
nm.sendMail(mailData, function(err, info) {
expect(err).to.not.exist;
expect(info.accepted).to.deep.equal([
'<EMAIL>',
'<EMAIL>'
]);
expect(info.rejected).to.deep.equal([
'<EMAIL>'
]);
expect(info.messageId).to.equal('abc@def');
expect(/538ec1431ce376bc46f11b0f51849beb/i.test(info.response)).to.be.true;
done();
});
});
it('should response auth error', function(done) {
var nm = nodemailer.createTransport({
host: 'localhost',
port: PORT_NUMBER,
auth: {
user: 'invalid user',
pass: '<PASSWORD>'
},
ignoreTLS: true
});
var mailData = {
from: '<EMAIL>',
to: ['<EMAIL>', '<EMAIL>', '<EMAIL>'],
subject: 'test',
date: new Date('Mon, 31 Jan 2011 23:01:00 +0000'),
messageId: 'abc@def',
xMailer: 'aaa',
text: 'uuu'
};
nm.sendMail(mailData, function(err, info) {
expect(err).to.exist;
expect(info).to.not.exist;
expect(err.code).to.equal('EAUTH');
done();
});
});
it('should response envelope error', function(done) {
var nm = nodemailer.createTransport({
host: 'localhost',
port: PORT_NUMBER,
auth: {
user: 'testuser',
pass: '<PASSWORD>'
},
ignoreTLS: true
});
var mailData = {
from: '<EMAIL>',
to: ['<EMAIL>'],
subject: 'test',
date: new Date('Mon, 31 Jan 2011 23:01:00 +0000'),
messageId: 'abc@def',
xMailer: 'aaa',
text: 'uuu'
};
nm.sendMail(mailData, function(err, info) {
expect(err).to.exist;
expect(info).to.not.exist;
expect(err.code).to.equal('EENVELOPE');
done();
});
});
it('should override envelope', function(done) {
var nm = nodemailer.createTransport({
host: 'localhost',
port: PORT_NUMBER,
auth: {
user: 'testuser',
pass: '<PASSWORD>'
},
ignoreTLS: true
});
var mailData = {
from: '<EMAIL>',
to: ['<EMAIL>', '<EMAIL>', '<EMAIL>'],
subject: 'test',
date: new Date('Mon, 31 Jan 2011 23:01:00 +0000'),
messageId: 'abc@def',
xMailer: 'aaa',
text: 'uuu',
envelope: {
from: '<EMAIL>',
to: '<EMAIL>'
}
};
nm.sendMail(mailData, function(err, info) {
expect(err).to.not.exist;
expect(info.accepted).to.deep.equal([
'<EMAIL>'
]);
expect(info.rejected).to.deep.equal([]);
expect(info.messageId).to.equal('abc@def');
expect(/eaa13435e1401328be32bc7a4c629f9f/i.test(info.response)).to.be.true;
done();
});
});
});
|
module Travis
module Api
module V0
module Pusher
class Job
class Started < Job
end
end
end
end
end
end
|
<gh_stars>0
package com.didi.drouter.service;
import androidx.annotation.NonNull;
/**
* Created by gaowei on 2019/3/28
*/
public interface ICallService {
<T> T call(Object... params);
interface Type0<Result> {
Result call();
}
interface Type1<Param1, Result> {
Result call(Param1 param1);
}
interface Type2<Param1, Param2, Result> {
Result call(Param1 param1, Param2 param2);
}
interface Type3<Param1, Param2, Param3, Result> {
Result call(Param1 param1, Param2 param2, Param3 param3);
}
interface Type4<Param1, Param2, Param3, Param4, Result> {
Result call(Param1 param1, Param2 param2, Param3 param3, Param4 param4);
}
interface Type5<Param1, Param2, Param3, Param4, Param5, Result> {
Result call(Param1 param1, Param2 param2, Param3 param3, Param4 param4, Param5 param5);
}
interface TypeN<Result> {
Result call(@NonNull Object... params);
}
}
|
#!/bin/bash -xe
function on_exit {
rc=$?
info "Exiting with rc=$rc"
exit $rc
}
trap on_exit EXIT
function info {
echo "$(date):INFO: $1"
}
function error {
echo "$(date):INFO: $1"
}
info "Starting $0"
# User customization code below
|
<filename>proj/src/keyboard.c
#include "keyboard.h"
/* Global variables */
int kbd_hook_id = KBD_IRQ;
uint8_t scancode;
uint32_t sys_inb_counter = 0;
bool status_error = false;
// will only count the uses of sys_inb when in LAB3, because of the flag
#ifdef LAB3
int sys_inb_with_count(int port, uint8_t *value) {
sys_inb_counter++;
return util_sys_inb(port, value);
}
#else
int sys_inb_with_count(int port, uint8_t *value) {
return util_sys_inb(port, value);
}
#endif
int (keyboard_subscribe_int)(uint8_t *bit_no)
{
*bit_no = BIT(kbd_hook_id); // sets the bitmask for irq_set
if(sys_irqsetpolicy(KBD_IRQ, IRQ_REENABLE | IRQ_EXCLUSIVE, &kbd_hook_id) != OK) { //function returns OK on success
return 1;
}
else return 0;
}
int (keyboard_unsubscribe_int)()
{
if(sys_irqrmpolicy(&kbd_hook_id) != OK) { //function returns OK on success
return 1;
}
else return 0;
}
void (kbc_ih)()
{
get_scan_code();
}
bool (check_complete_scancode)(uint8_t scancode_array[], uint8_t *index) {
if (status_error)
return false;
// if there was an incompleted code, stores it in position 0 (index is 0 )
if (scancode == INCOMPLETE_CODE) {
scancode_array[*index] = INCOMPLETE_CODE;
*index = 1;
return false;
}
else {
scancode_array[*index] = scancode;
return true;
}
}
void (get_scan_code)() {
uint8_t status;
uint8_t scancode_value;
sys_inb_with_count(KBC_STATUS_PORT, &status);
if(status & OUT_BUF_FULL) {
sys_inb_with_count(KBC_OUT_BUF, &scancode_value);
if((status & (PARITY_ERROR | RECEIVE_TIME_OUT)) == 0) {
scancode = scancode_value;
}
else {
status_error = true;
return;
}
}
else {
status_error = true;
return;
}
status_error = false;
}
int reenable_keyboard_interrupts() {
if(kbc_issue_command(READ_CMD_BYTE))
return 1;
uint8_t command_byte;
if(kbc_read_return_value(&command_byte))
return 1;
uint8_t arg = command_byte | ENABLE_INTERRUPTS;
if(kbc_issue_command_with_arg(WRITE_CMD_BYTE, arg))
return 1;
return 0;
}
|
package com.katus.common.io;
import com.katus.common.util.IOUtils;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
/**
* @author <NAME>
* @version 1.0, 2021-05-24
*/
public interface FsManipulator {
/**
* 文件/目录是否存在
* @param path 路径
* @return 是否存在
* @throws IOException IO异常
*/
boolean exists(String path) throws IOException;
/**
* 路径是否为文件
* @param path 路径
* @return 是否为文件
* @throws IOException IO异常
*/
boolean isFile(String path) throws IOException;
/**
* 路径是否为目录
* @param path 路径
* @return 是否为目录
* @throws IOException IO异常
*/
default boolean isDirectory(String path) throws IOException {
return !this.isFile(path);
}
/**
* 罗列路径下的所有子路径 (单层)
* @param path 路径
* @return 路径数组
* @throws IOException IO异常
*/
String[] list(String path) throws IOException;
/**
* 创建目录
* @param path 目录路径
* @return 是否成功
* @throws IOException IO异常
*/
boolean makeDirectory(String path) throws IOException;
/**
* 创建多级目录
* @param path 目录路径
* @return 是否成功
* @throws IOException IO异常
*/
boolean makeDirectories(String path) throws IOException;
/**
* 创建文件
* @param path 文件路径
* @return 是否成功
* @throws IOException IO异常
*/
boolean createFile(String path) throws IOException;
/**
* 级联删除所有路径
* @param paths 路径
* @throws IOException IO异常
*/
default void deleteAll(String... paths) throws IOException {
for (String path : paths) {
this.delete(path);
}
}
/**
* 级联删除路径
* @param path 路径
* @throws IOException IO异常
*/
void delete(String path) throws IOException;
/**
* 获取字节输入流
* @param path 路径
* @return 字节输入流
* @throws IOException IO异常
*/
InputStream read(String path) throws IOException;
/**
* 获取字符输入流
* @param path 路径
* @param charset 字符集
* @return 字符输入流
* @throws IOException IO异常
*/
default Reader readAsText(String path, Charset charset) throws IOException {
return new BufferedReader(new InputStreamReader(this.read(path), charset));
}
default Reader readAsText(String path) throws IOException {
return this.readAsText(path, StandardCharsets.UTF_8);
}
/**
* 获取字节输出流
* @param path 路径
* @return 字节输出流
* @throws IOException IO异常
*/
OutputStream write(String path) throws IOException;
/**
* 获取字符输出流
* @param path 路径
* @param charset 字符输出流
* @return 字符输出流
* @throws IOException IO异常
*/
default Writer writeAsText(String path, Charset charset) throws IOException {
return new BufferedWriter(new OutputStreamWriter(this.write(path), charset));
}
default Writer writeAsText(String path) throws IOException {
return this.writeAsText(path, StandardCharsets.UTF_8);
}
/**
* 获取追加字节输出流
* @param path 路径
* @return 追加字节输出流
* @throws IOException IO异常
*/
OutputStream append(String path) throws IOException;
/**
* 获取追加字符输出流
* @param path 路径
* @param charset 追加字符输出流
* @return 追加字符输出流
* @throws IOException IO异常
*/
default Writer appendAsText(String path, Charset charset) throws IOException {
return new BufferedWriter(new OutputStreamWriter(this.append(path), charset));
}
default Writer appendAsText(String path) throws IOException {
return this.appendAsText(path, StandardCharsets.UTF_8);
}
/**
* 复制文件
* @param src 原始路径
* @param dest 新路径
* @throws IOException IO异常
*/
default void copy(String src, String dest) throws IOException {
InputStream is = this.read(src);
OutputStream os = this.write(dest);
IOUtils.copyBytes(is, os, 4096, true);
}
/**
* 文件重命名/文件移动
* @param src 原路径
* @param dest 新路径
* @return 是否成功
* @throws IOException IO异常
*/
boolean rename(String src, String dest) throws IOException;
default boolean remove(String src, String dest) throws IOException {
return this.rename(src, dest);
}
/**
* 压缩多文件/目录 (默认格式)
* @param inputs 多文件/目录路径
* @param output 压缩文件完整输出路径 (小写扩展名)
* @throws IOException IO异常
*/
void compress(String[] inputs, String output) throws IOException;
/**
* 解压文件 (默认格式), 解压后的文件会在输出路径下以目录的形式呈现, 即会在输出路径下新建与原压缩文件名称一致的目录 (不包括扩展名)
* @param input 压缩文件完整路径
* @param outputDir 解压文件输出路径
* @throws IOException IO异常
*/
void decompress(String input, String outputDir) throws IOException;
/**
* 获取 Home 目录
* @return Home 目录
*/
String getHomeDirectory();
/**
* 获取文件/目录的修改时间字符串
* @param path 路径
* @return 时间字符串
* @throws IOException IO异常
*/
Date modificationTime(String path) throws IOException;
/**
* 获取文件大小 (单位为字节)
* @param path 文件路径
* @return 如果是目录为目录下所有文件大小之和
* @throws IOException IO异常
*/
long size(String path) throws IOException;
/**
* 从文件中按行读取文本
* @param path 文件路径
* @param size 读取行数
* @param charset 解码字符集
* @return 文本列表
* @throws IOException IO异常
*/
default List<String> readToLines(String path, int size, Charset charset) throws IOException {
List<String> lines = new ArrayList<>();
BufferedReader reader = new BufferedReader(this.readAsText(path, charset));
String line;
while ((line = reader.readLine()) != null) {
lines.add(line);
if (size != -1 && lines.size() >= size) break;
}
IOUtils.closeAll(reader);
return lines;
}
default List<String> readToLines(String path, int size) throws IOException {
return this.readToLines(path, size, StandardCharsets.UTF_8);
}
default List<String> readToLines(String path) throws IOException {
return this.readToLines(path, -1, StandardCharsets.UTF_8);
}
/**
* 获取文本行迭代器
* @param path 文本文件路径
* @param charset 字符集
* @return 文本行迭代器
*/
LineIterator getLineIterator(String path, Charset charset) throws IOException;
default LineIterator getLineIterator(String path) throws IOException {
return getLineIterator(path, StandardCharsets.UTF_8);
}
/**
* 从文件中读取文本
* @param path 文件路径
* @param charset 解码字符集
* @return 文本
* @throws IOException IO异常
*/
default String readToText(String path, Charset charset) throws IOException {
StringBuilder sb = new StringBuilder();
List<String> lines = this.readToLines(path, -1, charset);
for (String line : lines) {
sb.append(line).append("\n");
}
if (!lines.isEmpty()) {
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
default String readToText(String path) throws IOException {
return this.readToText(path, StandardCharsets.UTF_8);
}
/**
* 将文本写入文件
* @param path 文件路径
* @param content 内容
* @param charset 编码字符集
* @throws IOException IO异常
*/
default void writeTextToFile(String path, Collection<String> content, Charset charset) throws IOException {
BufferedWriter writer = new BufferedWriter(this.writeAsText(path, charset));
for (String line : content) {
writer.write(line + "\n");
}
IOUtils.closeAll(writer);
}
default void writeTextToFile(String path, Collection<String> content) throws IOException {
this.writeTextToFile(path, content, StandardCharsets.UTF_8);
}
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-movieViewer/AFNetworking.framework"
install_framework "Pods-movieViewer/MBProgressHUD.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-movieViewer/AFNetworking.framework"
install_framework "Pods-movieViewer/MBProgressHUD.framework"
fi
|
<filename>native/native.go
package main
import (
"encoding/binary"
"encoding/json"
"errors"
"io"
"io/ioutil"
"log"
"os"
"sync"
"time"
)
func debug(v ...interface{}) {
log.Println("kindred | pid:", os.Getpid(), v)
}
type inputJSON struct {
Type string `json:"type"`
Files []string `json:"files"`
}
type outputJSON struct {
File string `json:"file"`
Data string `json:"data"`
}
type outputErrJSON struct {
File string `json:"file"`
Error string `json:"error"`
}
type fileList struct {
Mutex *sync.Mutex
Files []*fileInfo
}
func (fl *fileList) Refresh(refresh []string) {
debug("Refreshing files...", refresh)
fl.Mutex.Lock()
for _, ref := range refresh {
for _, f := range fl.Files {
if f.File == ref {
f.ReadModTime = time.Time{}
f.Error = nil
}
}
}
fl.Mutex.Unlock()
}
func (fl *fileList) Update(files []string) {
debug("Updating with...", files)
fl.Mutex.Lock()
var newFiles []*fileInfo
for _, f := range files {
file := fileInfo{File: f}
// copy over ReadModTime
for _, currentFile := range fl.Files {
if currentFile.File == file.File {
file.ReadModTime = currentFile.ReadModTime
}
}
newFiles = append(newFiles, &file)
}
fl.Files = newFiles
fl.Mutex.Unlock()
}
func (fl *fileList) ReadAll() {
fl.Mutex.Lock()
for _, f := range fl.Files {
// if a file had an error previously, do not try to read again
// this is not permanent, as Update() will clear the error
if f.Error != nil {
continue
}
fdata, err := f.Read()
if err != nil {
f.Error = err
debug("File:", f.File)
debug("fileInfo#Read error:", err)
outerr := outputErrJSON{File: f.File, Error: err.Error()}
outJSON, err := json.Marshal(outerr)
if err != nil {
debug(err)
}
output(os.Stdout, outJSON)
continue
}
debug("Read:", f.File, "; Got", len(fdata), "bytes")
out := outputJSON{File: f.File, Data: string(fdata)}
outj, err := json.Marshal(out)
if err != nil {
debug(err)
}
output(os.Stdout, outj)
}
fl.Mutex.Unlock()
}
type fileInfo struct {
File string
ReadModTime time.Time
Error error
}
// wrap _Read() to let all possible errors bubble up
// to easier set fileInfo.Error
func (f *fileInfo) Read() ([]byte, error) {
data, err := f._Read()
return data, err
}
func (f *fileInfo) _Read() ([]byte, error) {
file, err := os.Open(f.File)
if err != nil {
return nil, err
}
defer file.Close()
fileInfo, err := file.Stat()
if err != nil {
return nil, err
}
fmod := fileInfo.ModTime()
if fmod == f.ReadModTime {
return nil, nil
}
if fileInfo.Size() > 999950 { // 50 bytes off
return nil, errors.New("File will probably exceed 1MB limit")
}
f.ReadModTime = fmod
data, err := ioutil.ReadAll(file)
return data, err
}
/**
* Reads from r, if there is at least 4 bytes to read, reads it. Assumes
* first 4 bytes is n length of remaining message. Then reads n bytes.
* Returns only if there is a message, returns n and message.
* @param io.Reader r Reader
* @return int message size
* @return []byte message
*/
func input(r io.Reader) (int, []byte) {
// read first 4 bytes
l := make([]byte, 4)
rlen, err := r.Read(l)
if err != nil {
if err.Error() != "EOF" && rlen != 0 {
debug("Read data length input error:", err)
debug("Read", rlen, "bytes")
}
return 0, nil
}
if rlen == 4 {
msgSize := binary.LittleEndian.Uint32(l)
msgBytes := make([]byte, msgSize)
rlen, err = r.Read(msgBytes)
if err != nil {
debug("Reading data from input error:", err)
}
if rlen == int(msgSize) {
return rlen, msgBytes
}
debug("Read bytes returned mismatch, expecting:", msgSize, ", but got", rlen)
}
return 0, nil
}
/**
* Writes to msg to w, first 4 bytes is binary of the size of msg.
* @param io.Writer w io.Writer [description]
* @param []byte msg
*/
func output(w io.Writer, msg []byte) {
err := binary.Write(w, binary.LittleEndian, uint32(len(msg)))
if err != nil {
debug("failed to write data length:", err)
return
}
l, err := w.Write(msg)
if l != len(msg) {
debug("mismatch writing, wrote:", l, " but expecting:", len(msg))
}
if err != nil {
debug("Write error:", err)
}
}
/**
* Continously reads from Stdin for data from chrome extension
* Incoming messages are JSON { files: []string }
*/
func readStdin(fList *fileList) {
for {
_, msg := input(os.Stdin)
if msg != nil {
req := inputJSON{}
err := json.Unmarshal(msg, &req)
if err != nil {
debug(err)
}
if req.Type == "read" {
fList.Update(req.Files)
} else if req.Type == "refresh" {
fList.Refresh(req.Files)
}
}
}
}
func main() {
debug("kindred native started.")
fList := &fileList{Mutex: &sync.Mutex{}}
go readStdin(fList)
for {
fList.ReadAll()
time.Sleep(300 * time.Millisecond)
}
}
|
<reponame>IzaacBaptista/ads-senac
package view;
import java.awt.Container;
import java.awt.GridLayout;
import java.awt.FlowLayout;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JTextField;
public class TelaCadastroVeiculo extends JFrame {
JMenuItem menuItemAbrir = new JMenuItem("Abrir");
JMenuItem menuItemNovo = new JMenuItem("Novo");
JMenuItem menuItemSalvar = new JMenuItem("Salvar");
JMenuItem menuItemExportar = new JMenuItem("Exportar");
JMenuItem menuItemFechar = new JMenuItem("Fechar");
JMenuItem menuItemColar = new JMenuItem("Colar");
JMenuItem menuItemCopiar = new JMenuItem("Copiar");
JMenuItem menuItemRecortar = new JMenuItem("Recortar");
JMenuItem menuItemSubstituir = new JMenuItem("Substituir");
JMenuItem menuItemLocalizar = new JMenuItem("Localizar");
JMenuItem menuItemOpcoesAvancadas = new JMenuItem("Opções Avançadas");
JMenuItem menuItemTutoriais = new JMenuItem("Tutoriais");
JMenuItem menuItemContato = new JMenuItem("Contato");
JMenuItem menuItemAtualizacoes = new JMenuItem("Updates");
JMenuItem menuItemLogin = new JMenuItem("Login");
JMenuItem menuItemLogout = new JMenuItem("Sair");
JMenu menuArquivo = new JMenu("Arquivo");
JMenu menuEditar = new JMenu("Editar");
JMenu menuOpcoes = new JMenu("Opções");
JMenu menuAjuda = new JMenu("Ajuda");
JMenu menuSobre = new JMenu("Sobre");
JMenu menuConta = new JMenu("Conta");
JMenuBar menuBar = new JMenuBar();
JTextField campoPlaca = new JTextField();
JTextField campoRenavam = new JTextField();
JTextField campoMarca = new JTextField();
JTextField campoModelo = new JTextField();
JTextField campoChassi = new JTextField();
JTextField campoAno = new JTextField();
JTextField campoCor = new JTextField();
JLabel placa = new JLabel("Placa:");
JLabel renavam = new JLabel("Renavam:");
JLabel marca = new JLabel("Marca:");
JLabel modelo = new JLabel("Modelo:");
JLabel chassi = new JLabel("Chassi:");
JLabel ano = new JLabel("Ano:");
JLabel cor = new JLabel("Cor:");
JTextArea textArea = new JTextArea("Comentários", 10, 20);
JScrollPane scrollPane = new JScrollPane(textArea);
JButton oK = new JButton("OK");
JButton cancelarCadastro = new JButton("Cancelar");
public TelaCadastroVeiculo(String titulo) {
super(titulo);
}
private void mostrarTela() {
menuArquivo.add(menuItemAbrir);
menuArquivo.add(menuItemNovo);
menuArquivo.add(menuItemSalvar);
menuArquivo.add(menuItemExportar);
menuArquivo.addSeparator();
menuArquivo.add(menuItemFechar);
menuEditar.add(menuItemColar);
menuEditar.add(menuItemCopiar);
menuEditar.add(menuItemRecortar);
menuEditar.addSeparator();
menuEditar.add(menuItemLocalizar);
menuEditar.add(menuItemSubstituir);
menuOpcoes.add(menuItemOpcoesAvancadas);
menuAjuda.add(menuItemTutoriais);
menuSobre.add(menuItemAtualizacoes);
menuSobre.add(menuItemContato);
menuConta.add(menuItemLogin);
menuConta.add(menuItemLogout);
menuBar.add(menuArquivo);
menuBar.add(menuEditar);
menuBar.add(menuOpcoes);
menuBar.add(menuAjuda);
menuBar.add(menuSobre);
menuBar.add(menuConta);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setLayout(new FlowLayout());
Container cp = getContentPane();
JPanel panel = new JPanel();
panel.setLayout(new GridLayout(4, 4));
panel.add(placa);
panel.add(campoPlaca);
panel.add(renavam);
panel.add(campoRenavam);
panel.add(marca);
panel.add(campoMarca);
panel.add(modelo);
panel.add(campoModelo);
panel.add(chassi);
panel.add(campoChassi);
panel.add(ano);
panel.add(campoAno);
panel.add(cor);
panel.add(campoCor);
cp.add(panel);
JPanel panel2 = new JPanel();
panel2.setLayout(new GridLayout(1, 1));
panel2.add(textArea);
cp.add(panel2);
JPanel panel3 = new JPanel();
panel3.setLayout(new GridLayout(2, 2));
panel3.add(oK);
panel3.add(cancelarCadastro);
cp.add(panel3);
scrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
scrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
pack();
this.setSize(600, 300);
this.setJMenuBar(menuBar);
this.setVisible(true);
this.setResizable(false);
}
public static void main(String[] args) {
TelaCadastroVeiculo executar = new TelaCadastroVeiculo("Cadastro Veiculo");
executar.mostrarTela();
}
} |
<gh_stars>10-100
package event_bus
import(
)
type(
)
func Emit(event string, args... interface{}) {
}
|
<filename>backend/src/main/java/com/pharmacySystem/DTOs/PatientDTO.java<gh_stars>1-10
package com.pharmacySystem.DTOs;
import com.pharmacySystem.model.user.User;
public class PatientDTO {
private long id;
private String name;
private String surname;
private String email;
public PatientDTO() {
super();
}
public PatientDTO(User user) {
super();
this.id = user.getId();
this.name = user.getName();
this.surname = user.getSurname();
this.email = user.getEmail();
}
public PatientDTO(long id, String name, String surname, String email) {
super();
this.id = id;
this.name = name;
this.surname = surname;
this.email = email;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSurname() {
return surname;
}
public void setSurname(String surname) {
this.surname = surname;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}
|
#!/bin/sh
_sps_hostname=$(hostname | sed 's/\..*//')
_e=$(printf "\033")
_esc= _end=
[ -n "$BASH_VERSION" ] && _esc=$(printf '\001') _end=$(printf '\002')
_SPS_detect_distro() {
[ -f /etc/os-release ] || return
distro=$(sed -nE 's/^ID="([^"]+)".*/\1/p' /etc/os-release)
normalized=$(echo "$distro" | sed -E '
# Remove all buzzwords and extraneous words.
s/(GNU|Secure|open)//ig
:buzzwords
s/(^|[[:space:][:punct:]]+)(LTS|toolkit|operating|solutions|Security|Firewall|Cluster|Distribution|system|project|interim|enterprise|corporate|server|desktop|studio|edition|live|libre|industrial|incognito|remix|and|on|a|for|the|[0-9]+)($|[[:space:][:punct:]]+)/\1\3/i
t buzzwords
# Remove GNU or Linux not at the beginning of phrase, or X
# as a word by itself not at the beginning of phrase.
:gnulinux
s,([[:space:][:punct:]]+)(GNU|Linux|X([[:space:][:punct:]]|$)),\1,i
t gnulinux
# Trim space/punctuation from start/end.
s/[[:space:][:punct:]]+$//
s/^[[:space:][:punct:]]+//
# Normalize all SUSE products to SUSE.
s/.*(^|[[:space:][:punct:]])[Ss]USE($|[[:space:][:punct:]]).*/SUSE/i
t
# Remove everyting before the first /, if what is after is
# longer than 3 characters.
s;.+/(.{3,});\1;
# Replace all space sequences with underscore.
s/[[:space:]]+/_/g
# Keep names with one hyphen, replace all other punctuation
# sequnces with underscore.
/^[^-]+-[^-]+$/!{
s/[[:punct:]]+/_/g
}
' | tr '[:lower:]' '[:upper:]');
# If normalized name is longer than 15 characters, abbreviate
# instead.
if [ "$(printf %s "$normalized" | wc -c)" -gt 15 ]; then
normalized=$(echo "$distro" | sed -E '
:abbrev
s/(^|[[:space:][:punct:]]+)([[:alpha:]])[[:alpha:]]+/\1\2/
t abbrev
s/[[:space:][:punct:]]+//g
' | tr '[:lower:]' '[:upper:]')
fi
echo "$normalized"
unset distro normalized
}
_SPS_detect_env() {
case "$(uname -o)" in
Msys)
_sps_env='$MSYSTEM'
;;
*Linux)
_sps_env=$(_SPS_detect_distro)
if [ -z "$_sps_env" ]; then
_sps_env=LINUX
fi
;;
*)
_sps_env=$(uname -o | \
sed -E 's/[[:space:][:punct:]]+/_/g' | \
tr '[:lower:]' '[:upper:]' \
)
;;
esac
}
_SPS_env() {
eval printf "\"${_esc}${_e}\""\''[0;95m'\'"\"${_end}%s\"" "\"$_sps_env\""
}
_SPS_cmd_status() {
if [ "$?" -eq 0 ]; then
printf "${_esc}${_e}[0;32m${_end}%s" 'v'
else
printf "${_esc}${_e}[0;31m${_end}%s" 'x'
fi
}
_SPS_in_git_tree() {
OLDPWD=$PWD
_matched=
while [ "$PWD" != / ]; do
if [ -d .git ]; then
_matched=1
break
fi
cd ..
done
cd "$OLDPWD"
if [ -n "$_matched" ]; then
unset OLDPWD _matched
return 0
fi
unset OLDPWD _matched
return 1
}
_SPS_git_status() {
_status=$(LANG=C git status 2>/dev/null)
_clean=
if echo "$_status" | grep -Eq 'working tree clean'; then
if echo "$_status" | grep -Eq '^Your branch is up to date with'; then
_clean=1
fi
fi
if [ -n "$_clean" ]; then
printf "${_esc}${_e}[0;32m${_end}%s" 'v'
else
printf "${_esc}${_e}[0;31m${_end}%s" '~~~'
fi
unset _status _clean
}
_SPS_git_bar() {
! _SPS_in_git_tree && return 0
_br=$(git rev-parse --abbrev-ref HEAD 2>/dev/null)
_status=
if [ -n "$SPS_STATUS" ]; then
_status="${_esc}${_e}[0;97m${_end}|${_esc}${_e}[0m${_end}$(_SPS_git_status)"
fi
if [ -n "$_br" ]; then
printf "${_esc}${_e}[0;36m${_end}[${_esc}${_e}[35m${_end}%s%s${_esc}${_e}[36m${_end}]${_esc}${_e}[0m${_end}" "$_br" "$_status"
fi
unset _br _status
}
_SPS_cwd() {
case "$PWD" in
"$HOME")
printf "${_esc}${_e}[33m${_end}%s" '~'
;;
"$HOME"/*)
_pwd=${PWD#$HOME}
while :; do
case "$_pwd" in
/*)
_pwd=${_pwd#/}
;;
*)
break
;;
esac
done
printf "${_esc}${_e}[33m${_end}~/%s" "${_pwd}"
;;
*)
printf "${_esc}${_e}[33m${_end}%s" "${PWD}"
;;
esac
}
_SPS_detect_env
: ${USER:=$(whoami)}
if [ -z "$ZSH_VERSION" ]; then
PS1='`_SPS_cmd_status` `_SPS_env` `_SPS_cwd` `_SPS_git_bar`
'"${_esc}${_e}[38;2;140;206;250m${_end}${USER}${_esc}${_e}[1;97m${_end}@${_esc}${_e}[0m${_e}[38;2;140;206;250m${_end}${_sps_hostname} ${_esc}${_e}[38;2;220;20;60m${_end}>${_esc}${_e}[0m${_end} "
else # zsh
setopt PROMPT_SUBST
precmd() {
echo "$(_SPS_cmd_status) $(_SPS_env) $(_SPS_cwd) $(_SPS_git_bar)"
}
PS1="%{${_e}[38;2;140;206;250m%}${USER}%{${_e}[1;97m%}@%{${_e}[0m${_e}[38;2;140;206;250m%}${_sps_hostname} %{${_e}[38;2;220;20;60m%}>%{${_e}[0m%} "
fi
unset _sps_hostname
|
#pragma once
#include <eosiolib/utility.hpp>
#include <eosiolib/preprocessor/seq/for_each.hpp>
#include <eosiolib/preprocessor/seq/enum.hpp>
#include <eosiolib/preprocessor/seq/size.hpp>
#include <eosiolib/preprocessor/seq/seq.hpp>
#include <eosiolib/preprocessor/stringize.hpp>
namespace eosio {
template<typename T>
struct reflector {
typedef false_type is_reflected;
typedef false_type is_enum;
};
} /// eosio
#define EOSLIB_REFLECT_VISIT_BASE(r, visitor, base) \
eosio::reflector<base>::visit( visitor );
#define EOSLIB_REFLECT_VISIT2_BASE(r, visitor, base) \
eosio::reflector<base>::visit( t, forward<Visitor>(visitor) );
#define EOSLIB_REFLECT_VISIT_MEMBER( r, visitor, elem ) \
{ typedef decltype((static_cast<type*>(nullptr))->elem) member_type; \
visitor( &type::elem ); \
}
#define EOSLIB_REFLECT_VISIT2_MEMBER( r, visitor, elem ) \
{ typedef decltype((static_cast<type*>(nullptr))->elem) member_type; \
visitor( t.elem ); \
}
#define EOSLIB_REFLECT_BASE_MEMBER_COUNT( r, OP, elem ) \
OP eosio::reflector<elem>::total_member_count
#define EOSLIB_REFLECT_MEMBER_COUNT( r, OP, elem ) \
OP 1
#define EOSLIB_REFLECT_DERIVED_IMPL_INLINE( TYPE, INHERITS, MEMBERS ) \
template<typename Visitor>\
static inline void visit( Visitor&& v ) { \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT_BASE, v, INHERITS ) \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT_MEMBER, v, MEMBERS ) \
} \
template<typename Visitor>\
static inline void visit( const type& t, Visitor&& v ) { \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT2_BASE, v, INHERITS ) \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT2_MEMBER, v, MEMBERS ) \
} \
template<typename Visitor>\
static inline void visit( type& t, Visitor&& v ) { \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT2_BASE, v, INHERITS ) \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT2_MEMBER, v, MEMBERS ) \
}
#define EOSLIB_REFLECT_DERIVED_IMPL_EXT( TYPE, INHERITS, MEMBERS ) \
template<typename Visitor>\
void eosio::reflector<TYPE>::visit( Visitor&& v ) { \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT_BASE, v, INHERITS ) \
BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_VISIT_MEMBER, v, MEMBERS ) \
}
/**
* @def EOSLIB_REFLECT(TYPE,MEMBERS)
* @brief Specializes eosio::reflector for TYPE
*
* @param MEMBERS - a sequence of member names. (field1)(field2)(field3)
*
* @see EOSLIB_REFLECT_DERIVED
*/
#define EOSLIB_REFLECT( TYPE, MEMBERS ) \
EOSLIB_REFLECT_DERIVED( TYPE, BOOST_PP_SEQ_NIL, MEMBERS )
#define EOSLIB_REFLECT_TEMPLATE( TEMPLATE_ARGS, TYPE, MEMBERS ) \
EOSLIB_REFLECT_DERIVED_TEMPLATE( TEMPLATE_ARGS, TYPE, BOOST_PP_SEQ_NIL, MEMBERS )
#define EOSLIB_REFLECT_EMPTY( TYPE ) \
EOSLIB_REFLECT_DERIVED( TYPE, BOOST_PP_SEQ_NIL, BOOST_PP_SEQ_NIL )
#define EOSLIB_REFLECT_FWD( TYPE ) \
namespace eosio { \
template<> struct reflector<TYPE> {\
typedef TYPE type; \
typedef eosio::true_type is_reflected; \
enum member_count_enum { \
local_member_count = BOOST_PP_SEQ_SIZE(MEMBERS), \
total_member_count = local_member_count BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_BASE_MEMBER_COUNT, +, INHERITS )\
}; \
template<typename Visitor> static void visit( Visitor&& v ); \
template<typename Visitor> static void visit( const type& t, Visitor&& v ); \
template<typename Visitor> static void visit( type& t, Visitor&& v ); \
}; }
#define EOSLIB_REFLECT_DERIVED_IMPL( TYPE, MEMBERS ) \
EOSLIB_REFLECT_IMPL_DERIVED_EXT( TYPE, BOOST_PP_SEQ_NIL, MEMBERS )
#define EOSLIB_REFLECT_IMPL( TYPE, MEMBERS ) \
EOSLIB_REFLECT_DERIVED_IMPL_EXT( TYPE, BOOST_PP_SEQ_NIL, MEMBERS )
/**
* @def EOSLIB_REFLECT_DERIVED(TYPE,INHERITS,MEMBERS)
*
* @brief Specializes eosio::reflector for TYPE where
* type inherits other reflected classes
*
* @param INHERITS - a sequence of base class names (basea)(baseb)(basec)
* @param MEMBERS - a sequence of member names. (field1)(field2)(field3)
*/
#define EOSLIB_REFLECT_DERIVED( TYPE, INHERITS, MEMBERS ) \
namespace eosio { \
template<> struct reflector<TYPE> {\
typedef TYPE type; \
typedef eosio::true_type is_reflected; \
typedef eosio::false_type is_enum; \
enum member_count_enum { \
local_member_count = 0 BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_MEMBER_COUNT, +, MEMBERS ),\
total_member_count = local_member_count BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_BASE_MEMBER_COUNT, +, INHERITS )\
}; \
EOSLIB_REFLECT_DERIVED_IMPL_INLINE( TYPE, INHERITS, MEMBERS ) \
}; }
#define EOSLIB_REFLECT_DERIVED_TEMPLATE( TEMPLATE_ARGS, TYPE, INHERITS, MEMBERS ) \
namespace eosio { \
template<BOOST_PP_SEQ_ENUM(TEMPLATE_ARGS)> struct reflector<TYPE> {\
typedef TYPE type; \
typedef eosio::true_type is_defined; \
typedef eosio::false_type is_enum; \
enum member_count_enum { \
local_member_count = 0 BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_MEMBER_COUNT, +, MEMBERS ),\
total_member_count = local_member_count BOOST_PP_SEQ_FOR_EACH( EOSLIB_REFLECT_BASE_MEMBER_COUNT, +, INHERITS )\
}; \
EOSLIB_REFLECT_DERIVED_IMPL_INLINE( TYPE, INHERITS, MEMBERS ) \
}; }
|
package org.fasttimepicker;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.format.Time;
public final class ParcelableTime implements Parcelable {
// ////////////////////////////
// Parcelable apis
// ////////////////////////////
public static final Parcelable.Creator<ParcelableTime> CREATOR =
new Parcelable.Creator<ParcelableTime>() {
public ParcelableTime createFromParcel(Parcel p) {
Time time = new Time();
time.set(p.readLong());
return new ParcelableTime(time);
}
public ParcelableTime[] newArray(int size) {
return new ParcelableTime[size];
}
};
public int describeContents() {
return 0;
}
public void writeToParcel(Parcel p, int flags) {
p.writeLong(time.toMillis(false));
}
// ////////////////////////////
// end Parcelable apis
// ////////////////////////////
private Time time;
public ParcelableTime() {
this(new Time());
}
public ParcelableTime(Time time) {
this.time = time;
}
public final Time asTime() {
return time;
}
}
|
#Class to represent a booking system
class Booking:
def __init__(self, name, date, time):
self.name = name
self.date = date
self.time = time
def view_details(self):
print("Name: ", self.name)
print("Date: ", self.date)
print("Time: ", self.time)
def edit_details(self):
self.name = input("Enter new name: ")
self.date = input("Enter new date: ")
self.time = input("Enter new time: ")
def cancel_booking(self):
self.name = None
self.date = None
self.time = None
print("Booking cancelled successfully.") |
import React, { useReducer } from 'react'
import { v4 as uuid } from 'uuid';
import StrainSelectContext from './strainSelectContext';
import StrainSelectReducer from './StrainSelectReducer'
import {
ADD_STRAINSELECTION,
DELETE_STRAINSELECTION,
SET_CURRENT,
CLEAR_CURRENT
} from '../types';
const StrainSelectionState = props => {
const initialState = {
strainSelection: [],
current: null
}
const [state, dispatch] = useReducer(StrainSelectReducer, initialState);
//Add Strain Selection
const addStrainSelection = strainSelection =>{
dispatch({ type: ADD_STRAINSELECTION, payload: strainSelection})
}
//Delete Strain Selection
//Set Current Selection,
// Clear Current Selection
return (
<StrainSelectContext.Provider value={{
strainSelection: state.strainSelection,
addStrainSelection
}}>
{ props.children }
</StrainSelectContext.Provider>
)
}
export default StrainSelectionState; |
<gh_stars>0
package com.nortal.spring.cw.core.support.user;
import java.io.Serializable;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.springframework.context.annotation.Scope;
import org.springframework.context.annotation.ScopedProxyMode;
import org.springframework.stereotype.Component;
import org.springframework.validation.BindingResult;
import org.springframework.web.context.WebApplicationContext;
import com.nortal.spring.cw.core.i18n.model.Lang;
import com.nortal.spring.cw.core.web.component.composite.ControllerComponent;
/**
* @author <NAME>
*/
@Component
@Scope(value = WebApplicationContext.SCOPE_REQUEST, proxyMode = ScopedProxyMode.TARGET_CLASS)
public class CwUserRequestInfo implements Serializable {
private static final long serialVersionUID = 1L;
private Lang activeLanguage;
private String activeMainMenu;
private String contextPath;
private String applicationPath;
private String serverUrl;
private ControllerComponent controllerComponent;
private long elementTabindex = 0;
private Map<String, String> requestParameters = new LinkedHashMap<>();
private Map<Class<?>, BindingResult> bindingResultMap = new HashMap<>();
public Lang getActiveLanguage() {
return activeLanguage;
}
public void setActiveLanguage(Lang activeLanguage) {
this.activeLanguage = activeLanguage;
}
public String getContextPath() {
return contextPath;
}
public void setContextPath(String contextPath) {
this.contextPath = contextPath;
}
public String getApplicationPath() {
return applicationPath;
}
public void setApplicationPath(String applicationPath) {
this.applicationPath = applicationPath;
}
public String getApplicationRequestKey(Class<?> callerClass) {
return getApplicationRequestKey(callerClass, applicationPath);
}
public String getApplicationRequestKey(Class<?> callerClass, String applicationPath) {
return callerClass.getName().concat("_").concat(activeLanguage.getCode()).concat("_").concat(applicationPath);
}
public String getActiveMainMenu() {
return activeMainMenu;
}
public void setActiveMainMenu(String activeMainMenu) {
this.activeMainMenu = activeMainMenu;
}
public BindingResult getBindingResult(Class<?> targetClass) {
return bindingResultMap.get(targetClass);
}
public void addBindingResult(BindingResult bindingResult) {
Object target = bindingResult.getTarget();
if (target != null) {
this.bindingResultMap.put(target.getClass(), bindingResult);
}
}
public String getServerUrl() {
return serverUrl;
}
public void setServerUrl(String serverUrl) {
this.serverUrl = serverUrl;
}
/**
* Meetod tagastab aktiivse päringu {@link ControllerComponent} objekti
*
* @return {@link ControllerComponent}
*/
public ControllerComponent getControllerComponent() {
return controllerComponent;
}
/**
* Aktiivse {@link ControllerComponent} määramine
*
* @param controllerComponent
* {@link ControllerComponent}
*/
public void setControllerComponent(ControllerComponent controllerComponent) {
this.controllerComponent = controllerComponent;
}
/**
* Meetod tagastab elemendi järgmise tabindexi. Kasutatakse sisendväljade kui ka nuppude järjestamiseks
*
* @return {@link Long}
*/
public long getNextElementTabindex() {
return ++elementTabindex;
}
/**
* Meetod tagastab päringu parameetri väärtuse vastavalt argumendiks olevale võtmele. Kui sellist parameetrit ei eksisteeri tagastatakse
* <code>null</code>
*
* @param parameterKey
* {@link String} Parameetri võti
* @return {@link String}
*/
public String getRequestParameter(String parameterKey) {
return requestParameters.get(parameterKey);
}
/**
* Eelmise päringuga serverisse saadetud päringu parameetrid
*
* @param requestParameters
* {@link Map}
*/
public void setRequestParameters(Map<String, String> requestParameters) {
this.requestParameters = requestParameters;
}
}
|
// ==UserScript==
// @namespace https://tampermonkey.myso.kr/
// @name 네이버 블로그 보유키워드 분석
// @description 네이버 블로그 프로필에서 보유키워드를 확인할 수 있습니다.
// @copyright 2021, myso (https://tampermonkey.myso.kr)
// @license Apache-2.0
// @version 1.1.10
// @updateURL https://github.com/myso-kr/kr.myso.tampermonkey/raw/master/service/com.naver.blog-profile.keyword.counter.user.js
// @downloadURL https://github.com/myso-kr/kr.myso.tampermonkey/raw/master/service/com.naver.blog-profile.keyword.counter.user.js
// @author <NAME>
// @connect naver.com
// @match *://blog.naver.com/profile/intro*
// @grant GM_addStyle
// @grant GM_xmlhttpRequest
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/polyfill/Object.fromEntries.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-app.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-add-style.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-add-script.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-xmlhttp-request-async.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/donation.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/naver-blog.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/naver-search-nx.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/naver-search-rx.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/smart-editor-one.js
// @require https://cdnjs.cloudflare.com/ajax/libs/uuid/8.3.2/uuidv4.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.7.2/bluebird.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/handlebars.js/4.7.7/handlebars.min.js
// ==/UserScript==
// ==OpenUserJS==
// @author myso
// ==/OpenUserJS==
GM_App(async function main() {
GM_donation('#post-area', 0);
GM_addStyle(`@import url(https://cdnjs.cloudflare.com/ajax/libs/toastify-js/1.11.0/toastify.min.css)`);
GM_addStyle(`
.blog-keywords-total-loading { position: relative; }
.blog-keywords-total-loading::after { content: attr(data-message); color: #fff; font-family: Lato,"Helvetica Neue" ; font-weight: 200; font-size: 16px; position: absolute; width: 100%; height: 20px; line-height: 20px; left: 0; top: 0; background-color: #e74c3c; z-index: 1; }
.blog-keywords-total-loading::before { content: ""; position: absolute; background-color: #fbb034; top: -5px; left: 0px; height: 30px; width: 0px; z-index: 0; opacity: 1; transform-origin: 100% 0%; animation: loader3 10s ease-in-out infinite; }
@keyframes loader3{ 0%{width: 0px;} 70%{width: 100%; opacity: 1;} 90%{opacity: 0; width: 100%;} 100%{opacity: 0;width: 0px;} }
#profile h2.keyword_info { padding: 5px 0; font-family: 돋움; font-size: 12px; margin-top: 30px; }
#profile h2.keyword_info img.arw01 { margin: 0 7px 1px 5px; }
#profile ul.keyword_info { list-style: none; clear: both; border-bottom: 1px solid #ccc; }
#profile ul.keyword_info li.item { font-size: 12px; display: inline-block; padding: 0.2rem 0.5rem; margin-right: 0.5rem; margin-top: 0.25rem; margin-bottom: 0.25rem; background-color: #0abf53; border-radius: 50rem; color: #fff; }
#profile ul.keyword_info li.line { dispaly:block; width: 100%; height:1px; overflow:hidden; padding: 0; margin-right: 0; background: #ccc; }
#profile ul.keyword_info li.head { dispaly:block; width: 100%; padding: 5px 0; font-family: 돋움; font-size: 12px; font-weight: bold; }
`);
const uri = new URL(location.href), { blogId } = Object.fromEntries(uri.searchParams.entries()); if(!blogId) return;
const wrp = document.querySelector('#profile'); if(!wrp) return;
const cnv = wrp.querySelector('.blog-keywords-total') || document.createElement('div'); cnv.classList.add('blog-keywords-total'); wrp.append(cnv);
(async function redraw(categoryNo, pages = Number.MAX_SAFE_INTEGER) {
redraw.count = 0;
const blog = {};
blog.BlogInfo = await NB_blogInfo(blogId, 'BlogInfo');
blog.BlogUserInfo = await NB_blogInfo(blogId, 'BlogUserInfo');
blog.CategoryList = await NB_blogInfo(blogId, 'CategoryList');
blog.PopularPostBlockInfo = await NB_blogInfo(blogId, 'PopularPostBlockInfo');
blog.TalkTalkAndReservationInfo = await NB_blogInfo(blogId, 'TalkTalkAndReservationInfo');
const postsSizes = _.get(blog, 'CategoryList.mylogPostCount', 1);
const categories = _.get(blog, 'CategoryList.mylogCategoryList', []).filter(o=>o.openYN && o.postCnt > 0);
const posts = ((cnv.dataset.message = `콘텐츠 가져오는 중... 모든 작업을 중단하고 완료가 될 때까지 가만히 기다려 주세요...`) && categoryNo !== undefined) ? await NB_blogPostList(blogId, pages, { categoryNo }) : [];
const posts_with_terms = await Promise.mapSeries(posts, async (post)=>(cnv.dataset.message = `콘텐츠 분석 중... (${++redraw.count}/${posts.length})`, post.terms = await NX_termsParagraph(post.titleWithInspectMessage), post));
const terms = posts_with_terms.map((post)=>post.terms).flat();
const uniqs = terms.filter((word, index, keywords)=>keywords.indexOf(word) == index);
const group = uniqs.reduce((group, query, index)=>(group[index] = Object.assign({ query, count: terms.filter(item=>item==query).length }), group), []).sort((a, b)=>b.count - a.count);
const data = { blog, group, posts_with_terms, postsSizes, categories };
const tmpl = Handlebars.compile(`
<h2 class="keyword_info"><img src="https://blogimgs.pstatic.net/nblog/spc.gif" width="1" height="1" class="arw01" alt="">보유 키워드 조회</h2>
<div>
<select id="categoryNo" name="categoryNo">
<option value="0">전체 ({{postsSizes}})</option>
{{#each categories}}<option value="{{categoryNo}}">{{categoryName}} ({{postCnt}})</option>{{/each}}
</select>
<button id="submitInfo">조회</button>
</div>
<h2 class="keyword_info"><img src="https://blogimgs.pstatic.net/nblog/spc.gif" width="1" height="1" class="arw01" alt="">검색허용 생산 키워드 (총 {{group.length}}건)</h2>
<div>
<ul class="keyword_info">
<li class="line"></li>
{{#each group}}<li class="item">({{count}}) {{query}}</li>{{/each}}
</ul>
</div>
<h2 class="keyword_info"><img src="https://blogimgs.pstatic.net/nblog/spc.gif" width="1" height="1" class="arw01" alt="">검색허용 생산 콘텐츠 (총 {{posts_with_terms.length}}건)</h2>
<div>
<ul class="keyword_info">
{{#each posts_with_terms}}
<li class="line"></li>
<li class="head"><a href="https://blog.naver.com/{{blogId}}/{{logNo}}" target="_blank">{{titleWithInspectMessage}}</a></li>
{{#each terms}}<li class="item">{{@this}}</li>{{/each}}
{{/each}}
</ul>
</div>
`);
cnv.innerHTML = tmpl(data);
const formCategoryNo = cnv.querySelector('#categoryNo'); if(!formCategoryNo);
const formSubmitInfo = cnv.querySelector('#submitInfo'); if(!formSubmitInfo);
formSubmitInfo.onclick = async function() {
event.preventDefault();
cnv.classList.toggle('blog-keywords-total-loading', formSubmitInfo.disabled = true);
await redraw(formCategoryNo.value, pages);
cnv.classList.toggle('blog-keywords-total-loading', formSubmitInfo.disabled = false);
}
})();
}); |
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Dynamics/Level_Sets/PARTICLE_LEVELSET_RLE.cpp
#ifndef COMPILE_WITHOUT_RLE_SUPPORT
//#####################################################################
// Copyright 2005-2006, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Arrays_Computations/HEAPIFY.h>
#include <PhysBAM_Tools/Grids_RLE_Interpolation/AVERAGING_RLE.h>
#include <PhysBAM_Tools/Grids_RLE_Interpolation/LINEAR_INTERPOLATION_RLE.h>
#include <PhysBAM_Tools/Grids_Uniform_Interpolation/LINEAR_INTERPOLATION_MAC_2D_HELPER.h>
#include <PhysBAM_Tools/Grids_Uniform_Interpolation/LINEAR_INTERPOLATION_MAC_3D_HELPER.h>
#include <PhysBAM_Tools/Parallel_Computation/MPI_RLE_GRID.h>
#include <PhysBAM_Geometry/Grids_RLE_Collisions/GRID_BASED_COLLISION_GEOMETRY_RLE.h>
#include <PhysBAM_Geometry/Grids_RLE_Interpolation_Collidable/LINEAR_INTERPOLATION_COLLIDABLE_FACE_RLE.h>
#include <PhysBAM_Geometry/Grids_Uniform_Level_Sets/FAST_LEVELSET.h>
#include <PhysBAM_Geometry/Level_Sets/LEVELSET_UTILITIES.h>
#include <PhysBAM_Dynamics/Level_Sets/LEVELSET_CALLBACKS.h>
#include <PhysBAM_Dynamics/Level_Sets/PARTICLE_LEVELSET_RLE.h>
#include <PhysBAM_Dynamics/Parallel_Computation/MPI_RLE_PARTICLES.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class T_GRID> PARTICLE_LEVELSET_RLE<T_GRID>::
PARTICLE_LEVELSET_RLE(T_GRID& grid_input)
:PARTICLE_LEVELSET<T_GRID>(grid_input,phi),grid(grid_input),use_removed_negative_particles_in_long_cells(false),use_removed_positive_particles_in_long_cells(false),
removed_negative_particles_in_long_cells(0),removed_positive_particles_in_long_cells(0),mpi_grid(0)
{}
//#####################################################################
// Destructor
//#####################################################################
template<class T_GRID> PARTICLE_LEVELSET_RLE<T_GRID>::
~PARTICLE_LEVELSET_RLE()
{
delete removed_negative_particles_in_long_cells;delete removed_positive_particles_in_long_cells;
}
//#####################################################################
// Function Initialize_Particle_Levelset_Grid_Values
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Initialize_Particle_Levelset_Grid_Values()
{
PARTICLE_LEVELSET<T_GRID>::Initialize_Particle_Levelset_Grid_Values();phi.Resize(grid.number_of_cells);
if(!removed_negative_particles_in_long_cells) removed_negative_particles_in_long_cells=template_removed_particles.Clone();
if(!removed_positive_particles_in_long_cells) removed_positive_particles_in_long_cells=template_removed_particles.Clone();
}
//#####################################################################
// Function Seed_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Seed_Particles(const T time,const bool verbose)
{
bool normals_defined=levelset.normals!=0;levelset.Compute_Normals(); // make sure normals are accurate
int n=Reseed_Add_Particles(negative_particles,positive_particles,-1,PARTICLE_LEVELSET_NEGATIVE,time,0);
for(int b=1;b<=negative_particles.m;b++)if(negative_particles(b)) negative_particles(b)->array_collection->Compact();
int p=Reseed_Add_Particles(positive_particles,negative_particles,1,PARTICLE_LEVELSET_POSITIVE,time,0);
for(int b=1;b<=negative_particles.m;b++)if(positive_particles(b)) positive_particles(b)->array_collection->Compact(); // TODO: revisit
if(verbose) LOG::cout << n << " negative particles & " << p << " positive particles" << std::endl;
if(!normals_defined){delete levelset.normals;levelset.normals=0;}
}
//#####################################################################
// Function Attract_Individual_Particle_To_Interface
//#####################################################################
template<class T_GRID> bool PARTICLE_LEVELSET_RLE<T_GRID>::
Attract_Individual_Particle_To_Interface_And_Adjust_Radius(ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles,const T phi_min,const T phi_max,const BLOCK_ITERATOR& block,
const int index,const PARTICLE_LEVELSET_PARTICLE_TYPE particle_type,const T time)
{
PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*particles(block.Block());
T phi=levelset.Phi(block,cell_particles.X(index));bool inside=(phi >= phi_min && phi <= phi_max);
RANGE<TV> box=block.Bounding_Box();
if(!inside){
T phi_goal=random.Get_Uniform_Number(phi_min,phi_max);int iteration=0;
while(!inside && iteration < maximum_iterations_for_attraction){
TV N=levelset.Normal(block,cell_particles.X(index)),X;T distance=phi_goal-phi,dt=1;bool inside_domain=false;
while(!inside_domain && iteration<maximum_iterations_for_attraction){
X=cell_particles.X(index)+dt*distance*N;
if(grid.domain.Lazy_Outside(X)){dt*=.5;iteration++;}else inside_domain=true;}
if(!inside_domain) break; // ran out of iterations
phi=levelset.Phi(block,X);inside=(phi >= phi_min && phi <= phi_max);
if(!inside){
dt*=.5;iteration++;X=cell_particles.X(index)+dt*distance*N;
phi=levelset.Phi(block,X);inside=(phi >= phi_min && phi <= phi_max);}
cell_particles.X(index)=X;}}
if(!inside || box.Lazy_Outside(cell_particles.X(index))){cell_particles.array_collection->Delete_Element(index);return false;}
TV V_temp;levelset.levelset_callbacks->Adjust_Particle_For_Domain_Boundaries(cell_particles,index,V_temp,particle_type,0,time);
if(box.Lazy_Outside(cell_particles.X(index))){cell_particles.array_collection->Delete_Element(index);return false;}
phi=levelset.Collision_Aware_Phi(block,cell_particles.X(index));
if(phi<phi_min || phi>phi_max){Delete_Particle(cell_particles,index);return false;}
cell_particles.radius(index)=clamp(abs(phi),minimum_particle_radius,maximum_particle_radius);
return true;
}
//#####################################################################
// Function Adjust_Particle_Radii
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Adjust_Particle_Radii()
{
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(negative_particles(b))
Adjust_Particle_Radii(block,*negative_particles(b),-1);}
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(positive_particles(b))
Adjust_Particle_Radii(block,*positive_particles(b),1);}
}
//#####################################################################
// Function Adjust_Particle_Radii
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Adjust_Particle_Radii(const BLOCK_ITERATOR& block,PARTICLE_LEVELSET_PARTICLES<TV>& particles,const int sign)
{
assert(!particles.next);
bool near_objects=levelset.collision_body_list->Occupied_Block(block);if(near_objects) levelset.Enable_Collision_Aware_Interpolation(sign);
// new radius is negative if the particle is on the wrong side of the interface
if(sign==1) for(int k=1;k<=particles.array_collection->Size();k++)particles.radius(k)=clamp(levelset.Phi(block,particles.X(k)),minimum_particle_radius,maximum_particle_radius);
else for(int k=1;k<=particles.array_collection->Size();k++) particles.radius(k)=clamp(-levelset.Phi(block,particles.X(k)),minimum_particle_radius,maximum_particle_radius);
if(near_objects) levelset.Disable_Collision_Aware_Interpolation();
}
//#####################################################################
// Function Modify_Levelset_Using_Escaped_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Modify_Levelset_Using_Escaped_Particles()
{
levelset.Compute_Block_Minimum_And_Maximum();
if(bias_towards_negative_particles){
Modify_Levelset_Using_Escaped_Particles(phi,positive_particles,1);Modify_Levelset_Using_Escaped_Particles(phi,negative_particles,-1);}
else{
ARRAY<T> phi_minus(phi),phi_plus(phi);
Modify_Levelset_Using_Escaped_Particles(phi_minus,negative_particles,-1);Modify_Levelset_Using_Escaped_Particles(phi_plus,positive_particles,1);
for(int c=1;c<=grid.number_of_cells;c++)if(abs(levelset.phi(c)) <= half_band_width) levelset.phi(c)=minmag(phi_minus(c),phi_plus(c));}
}
//#####################################################################
// Function Modify_Levelset_Using_Escaped_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Modify_Levelset_Using_Escaped_Particles(ARRAY<T>& phi,ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles,const int sign)
{
T radius_multiplier=-sign*outside_particle_distance_multiplier,one_over_radius_multiplier=1/radius_multiplier;
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
bool near_objects=levelset.collision_body_list->Occupied_Block(block);if(near_objects) levelset.Enable_Collision_Aware_Interpolation(sign);
for(PARTICLE_LEVELSET_PARTICLES<TV>* link=particles(b);link;link=link->next){PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*link;
for(int k=1;k<=cell_particles.array_collection->Size();k++)if(levelset.Phi(block,cell_particles.X(k))*one_over_radius_multiplier > cell_particles.radius(k)){
for(int ii=0;ii<T_GRID::number_of_cells_per_block;ii++){int c=block.Cell(ii);
T radius_minus_sign_phi=cell_particles.radius(k)-sign*phi(c);
if(radius_minus_sign_phi > 0){TV center=block.Cell_X(ii);
T distance_squared=(center-cell_particles.X(k)).Magnitude_Squared();
if(distance_squared < sqr(radius_minus_sign_phi)){
static COLLISION_GEOMETRY_ID body_id;static int aggregate_id;static TV intersection_point;
if(near_objects && levelset.collision_body_list->collision_geometry_collection.Intersection_Between_Points(center,cell_particles.X(k),body_id,aggregate_id,intersection_point)) continue;
phi(c)=sign*(cell_particles.radius(k)-sqrt(distance_squared));}}}}}
if(near_objects) levelset.Disable_Collision_Aware_Interpolation();}}
}
//#####################################################################
// Function Update_Particle_Cells
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Update_Particle_Cells(ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles)
{
if(mpi_grid) Exchange_Boundary_Particles(*mpi_grid,*this,particles,(PARTICLE_LEVELSET_PARTICLES<TV>*)0,(int)(cfl_number+(T)1.5));
ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*> particle_links;particle_links.Preallocate(10);
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
RANGE<TV> box=block.Bounding_Box();PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*particles(b);
particle_links.Remove_All();for(PARTICLE_LEVELSET_PARTICLES<TV>* link=particles(b);link;link=link->next)particle_links.Append(link);
for(int link=particle_links.m;link>=1;link--)for(int k=particle_links(link)->array_collection->Size();k>=1;k--)if(box.Lazy_Outside(particle_links(link)->X(k))){
int final_b=grid.Clamped_Block_Index(particle_links(link)->X(k),1);if(!final_b){Delete_Particle(*particle_links(link),k);continue;}
if(!particles(final_b)) particles(final_b)=Allocate_Particle<PARTICLE_LEVELSET_PARTICLES<TV> >();
int absolute_index=(link-1)*particle_pool.number_particles_per_cell+k;
Move_Particle(cell_particles,*particles(final_b),absolute_index);}}}
for(int b=1;b<=particles.m;b++)if(particles(b) && !particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));
}
//#####################################################################
// Function Update_Particle_Cells
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Update_Particle_Cells(ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*>& particles,PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>* particles_in_long_cells,const T dt)
{
if(mpi_grid){
T maximum_speed_squared=0;
for(int b=1;b<=grid.number_of_blocks;b++) if(particles(b)) maximum_speed_squared=max(maximum_speed_squared,particles(b)->V.Magnitude_Squared());
T maximum_distance_in_cells=dt*sqrt(maximum_speed_squared)/grid.Minimum_Edge_Length();
Exchange_Boundary_Particles(*mpi_grid,*this,particles,particles_in_long_cells,(int)(maximum_distance_in_cells+(T)1.5));}
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
RANGE<TV> box=block.Bounding_Box();PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>& cell_particles=*particles(b);
for(int k=cell_particles.array_collection->Size();k>=1;k--)if(box.Lazy_Outside(cell_particles.X(k))){
int final_b=grid.Clamped_Block_Index(cell_particles.X(k),1);
if(!final_b){
if(particles_in_long_cells) Move_Particle(cell_particles,*particles_in_long_cells,k);
else cell_particles.array_collection->Delete_Element(k);}
else{
if(!particles(final_b)) particles(final_b)=Allocate_Particle<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV> >();
Move_Particle(cell_particles,*particles(final_b),k);}}}}
if(particles_in_long_cells)
for(int k=particles_in_long_cells->array_collection->Size();k>=1;k--){
int final_b=grid.Clamped_Block_Index(particles_in_long_cells->X(k),1);if(!final_b) continue;
if(!particles(final_b)) particles(final_b)=Allocate_Particle<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV> >();
Move_Particle(*particles_in_long_cells,*particles(final_b),k);}
LOG::cout<<"particles in long cells = "<<particles_in_long_cells<<std::endl;
for(int b=1;b<=particles.m;b++)if(particles(b) && !particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));
}
//#####################################################################
// Function Compact_Particles_Into_Single_Particle_Bin
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Compact_Particles_Into_Single_Particle_Bin()
{
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();
if(negative_particles(b)) Compact_Particles_Into_Single_Particle_Bin(block,*negative_particles(b),-1);
if(positive_particles(b)) Compact_Particles_Into_Single_Particle_Bin(block,*positive_particles(b),1);}
}
//#####################################################################
// Function Compact_Particles_Into_Single_Particle_Bin
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Compact_Particles_Into_Single_Particle_Bin(const BLOCK_ITERATOR& block,PARTICLE_LEVELSET_PARTICLES<TV>& particles,const int sign)
{
if(!particles.next) return;
assert(particles.array_collection->Size()==particle_pool.number_particles_per_cell);
int heap_size=0;ARRAY<int> heap_particle_indices(particle_pool.number_particles_per_cell);ARRAY<T> heap_phi_minus_radius(particle_pool.number_particles_per_cell);
for(int k=particles.array_collection->Size();k>=1;k--){
heap_size++;heap_particle_indices(heap_size)=k;heap_phi_minus_radius(heap_size)=sign*levelset.Phi(block,particles.X(k))-particles.radius(k);}
ARRAYS_COMPUTATIONS::Heapify(heap_phi_minus_radius,heap_particle_indices); // when heap is full, order values with largest on top
PARTICLE_LEVELSET_PARTICLES<TV>* particles_link=&particles;
while(particles_link->next){
particles_link=particles_link->next;assert(!particles_link->next||particles_link->array_collection->Size()==particle_pool.number_particles_per_cell);
for(int k=particles_link->array_collection->Size();k>=1;k--){
T phi_minus_radius=sign*levelset.Phi(block,particles_link->X(k))-particles_link->radius(k);
if(phi_minus_radius < heap_phi_minus_radius(1)){ // delete particle on top of heap & add new particle
particles.array_collection->Copy_Element(*particles_link->array_collection,k,heap_particle_indices(1));
heap_phi_minus_radius(1)=phi_minus_radius;
ARRAYS_COMPUTATIONS::Heapify(heap_phi_minus_radius,heap_particle_indices,1,heap_phi_minus_radius.m);}}}
Free_Particle_And_Clear_Pointer(particles.next);
}
//#####################################################################
// Function Euler_Step_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Euler_Step_Particles(const ARRAY<T>& V,const T dt,const T time,const bool use_second_order_for_nonremoved_particles,const bool update_particle_cells_after_euler_step,const bool verbose)
{
if(use_second_order_for_nonremoved_particles){
Second_Order_Runge_Kutta_Step_Particles(V,negative_particles,PARTICLE_LEVELSET_NEGATIVE,dt,time,update_particle_cells_after_euler_step,verbose);
Second_Order_Runge_Kutta_Step_Particles(V,positive_particles,PARTICLE_LEVELSET_POSITIVE,dt,time,update_particle_cells_after_euler_step,verbose);}
else{
Euler_Step_Particles(V,negative_particles,PARTICLE_LEVELSET_NEGATIVE,dt,time,update_particle_cells_after_euler_step);
Euler_Step_Particles(V,positive_particles,PARTICLE_LEVELSET_POSITIVE,dt,time,update_particle_cells_after_euler_step);}
Euler_Step_Removed_Particles(dt,time,update_particle_cells_after_euler_step,verbose);
}
//#####################################################################
// Function Euler_Step_Removed_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Euler_Step_Removed_Particles(const T dt,const T time,const bool update_particle_cells_after_euler_step,const bool verbose)
{
if(use_removed_negative_particles)
Euler_Step_Removed_Particles(removed_negative_particles,use_removed_negative_particles_in_long_cells?removed_negative_particles_in_long_cells:0,
PARTICLE_LEVELSET_REMOVED_NEGATIVE,dt,time,update_particle_cells_after_euler_step,verbose);
if(use_removed_positive_particles)
Euler_Step_Removed_Particles(removed_positive_particles,use_removed_positive_particles_in_long_cells?removed_positive_particles_in_long_cells:0,
PARTICLE_LEVELSET_REMOVED_POSITIVE,dt,time,update_particle_cells_after_euler_step,verbose);
}
//#####################################################################
// Function Euler_Step_Removed_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Euler_Step_Removed_Particles(ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*>& particles,PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>* particles_in_long_cells,
const PARTICLE_LEVELSET_PARTICLE_TYPE particle_type,const T dt,const T time,const bool update_particle_cells_after_euler_step,const bool verbose)
{
int number_of_deleted_particles=0,number_of_non_occupied_cells=0;
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>& cell_particles=*particles(b);
if(!levelset.collision_body_list->Swept_Occupied_Block(block)){
number_of_non_occupied_cells++;
for(int k=cell_particles.array_collection->Size();k>=1;k--) // since not an occupied cell, don't need to adjust for objects
levelset.levelset_callbacks->Adjust_Particle_For_Domain_Boundaries(cell_particles,k,cell_particles.V(k),particle_type,dt,time);}
else{
for(int k=cell_particles.array_collection->Size();k>=1;k--){
levelset.levelset_callbacks->Adjust_Particle_For_Domain_Boundaries(cell_particles,k,cell_particles.V(k),particle_type,dt,time);
T collision_distance=Particle_Collision_Distance(cell_particles.quantized_collision_distance(k));
if(!Adjust_Particle_For_Objects(cell_particles.X(k),cell_particles.V(k),cell_particles.radius(k),collision_distance,particle_type,dt,time)){
cell_particles.array_collection->Delete_Element(k);number_of_deleted_particles++;}}}
cell_particles.Euler_Step_Position(dt);}}
// handle particles in long cells
if(particles_in_long_cells){
for(int k=particles_in_long_cells->array_collection->Size();k>=1;k--){
levelset.levelset_callbacks->Adjust_Particle_For_Domain_Boundaries(*particles_in_long_cells,k,particles_in_long_cells->V(k),particle_type,dt,time);
T collision_distance=Particle_Collision_Distance(particles_in_long_cells->quantized_collision_distance(k));
// TODO: make this always check nonrasterized objects
if(!Adjust_Particle_For_Objects(particles_in_long_cells->X(k),particles_in_long_cells->V(k),particles_in_long_cells->radius(k),collision_distance,particle_type,dt,time)){
particles_in_long_cells->array_collection->Delete_Element(k);number_of_deleted_particles++;}}
particles_in_long_cells->Euler_Step_Position(dt);}
if(verbose){
if(number_of_deleted_particles) LOG::cout<<"Deleted "<<number_of_deleted_particles<<" "<<PARTICLE_LEVELSET<T_GRID>::Particle_Type_Name(particle_type)<<" due to crossover"<<std::endl;
if(number_of_non_occupied_cells) LOG::cout<<"Skipped "<<number_of_non_occupied_cells<<" non occupied cells"<<std::endl;}
if(update_particle_cells_after_euler_step) Update_Particle_Cells(particles,particles_in_long_cells,dt);
}
//#####################################################################
// Function Euler_Step_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Euler_Step_Particles(const ARRAY<T>& V,ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles,const PARTICLE_LEVELSET_PARTICLE_TYPE particle_type,
const T dt,const T time,const bool update_particle_cells_after_euler_step,const bool assume_particles_in_correct_cells)
{
PHYSBAM_NOT_IMPLEMENTED();
/*
assert(assume_particles_in_correct_cells);
//T object_contour=T_GRID::ITERATOR_CELL::Short_Max_Length(grid)*(this->min_collision_distance_factor+cfl_number);
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*particles(b);
typename T_GRID::LINEAR_INTERPOLATION_MAC_HELPER block_velocity(block,V);
//bool check_objects=(*levelset_object.block_minimum)(b)<=object_contour;
for(int k=1;k<=cell_particles.array_collection->Size();k++){
TV velocity=block_velocity.Interpolate_Face(cell_particles.X(k));
PHYSBAM_NOT_IMPLEMENTED();//if(check_objects) levelset.levelset_callbacks->Adjust_Particle_For_Objects(cell_particles,k,velocity,particle_type,dt,time);
cell_particles.X(k)+=dt*velocity;}}}
if(update_particle_cells_after_euler_step) Update_Particle_Cells(particles);
*/
}
//#####################################################################
// Function Second_Order_Runge_Kutta_Step_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Second_Order_Runge_Kutta_Step_Particles(const ARRAY<T>& V,ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles,const PARTICLE_LEVELSET_PARTICLE_TYPE particle_type,
const T dt,const T time,const bool update_particle_cells_after_euler_step,const bool verbose)
{
T_FACE_LOOKUP V_lookup(V);
T_FACE_LOOKUP_COLLIDABLE V_lookup_collidable(V_lookup,*levelset.collision_body_list,levelset.face_velocities_valid_mask_current);
typename T_FACE_LOOKUP_COLLIDABLE::LOOKUP V_lookup_collidable_lookup(V_lookup_collidable,V_lookup);
T_LINEAR_INTERPOLATION_SCALAR linear_interpolation; // use for second step since particle may not be in initial block
T_LINEAR_INTERPOLATION_COLLIDABLE_FACE_SCALAR linear_interpolation_collidable;
COLLISION_GEOMETRY_ID body_id;int aggregate_id;T start_phi,end_phi;TV body_normal,body_velocity;int number_of_deleted_particles=0,number_of_non_occupied_cells=0;
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*particles(b);
T_LINEAR_INTERPOLATION_MAC_HELPER block_velocity(block,V);RANGE<TV> box=block.Bounding_Box();
if(!levelset.collision_body_list->Swept_Occupied_Block(block)){ // not occupied, so advect ignoring objects
number_of_non_occupied_cells++;
for(int k=cell_particles.array_collection->Size();k>=1;k--){
TV velocity=block_velocity.Interpolate_Face(cell_particles.X(k));
TV X_new=cell_particles.X(k)+dt*velocity;
if(box.Lazy_Inside(X_new)) velocity=(T).5*(velocity+block_velocity.Interpolate_Face(X_new));
else{
const BLOCK_ITERATOR block(grid,X_new);if(!block){cell_particles.X(k)=X_new;continue;} // particle will be deleted in cell update
velocity=(T).5*(velocity+T_LINEAR_INTERPOLATION_MAC_HELPER::Interpolate_Face(block,V,X_new));}
levelset.levelset_callbacks->Adjust_Particle_For_Domain_Boundaries(cell_particles,k,velocity,particle_type,dt,time);
cell_particles.X(k)+=dt*velocity;}}
else{ // collision aware advection
for(int k=cell_particles.array_collection->Size();k>=1;k--){
T collision_distance=Particle_Collision_Distance(cell_particles.quantized_collision_distance(k));
BLOCK_ITERATOR block_new(block);RANGE<TV> box_new=box;
// first push particle out
if(particle_type==PARTICLE_LEVELSET_NEGATIVE){bool particle_crossover;
if(levelset.collision_body_list->Push_Out_Point(cell_particles.X(k),collision_distance,true,particle_crossover)&&particle_crossover){
cell_particles.array_collection->Delete_Element(k);number_of_deleted_particles++;continue;} // delete due to crossover in push out
if(box_new.Lazy_Outside(cell_particles.X(k))){
block_new.Initialize(cell_particles.X(k));if(!block_new) continue; // particle will be deleted in cell update
box_new=block_new.Bounding_Box();}}
TV velocity=linear_interpolation_collidable.From_Block_Face(block_new,V_lookup_collidable_lookup,cell_particles.X(k));
// adjust normal component of velocity to not move into nearest body
bool got_interaction_with_body=false;
if(particle_type==PARTICLE_LEVELSET_NEGATIVE)
got_interaction_with_body=levelset.collision_body_list->Get_Body_Penetration(cell_particles.X(k),cell_particles.X(k),collision_distance,dt,body_id,aggregate_id,
start_phi,end_phi,body_normal,body_velocity);
if(got_interaction_with_body){
T relative_normal_velocity=TV::Dot_Product(velocity-body_velocity,body_normal);
if(relative_normal_velocity<0) velocity-=relative_normal_velocity*body_normal;}
// compute position using velocity but clamp if intersects any body
TV X_new=cell_particles.X(k)+dt*velocity;
RAY<TV> ray;
if(RAY<TV>::Create_Non_Degenerate_Ray(cell_particles.X(k),X_new-cell_particles.X(k),ray))
if(levelset.collision_body_list->Closest_Non_Intersecting_Point_Of_Any_Body(ray,body_id))
X_new=ray.Point(ray.t_max);
// get average velocity
block_new.Initialize(X_new);if(!block_new) continue; // particle will be deleted in cell update
velocity=(T).5*(velocity+linear_interpolation_collidable.From_Block_Face(block_new,V_lookup_collidable_lookup,X_new));
// adjust normal component of velocity again
if(got_interaction_with_body){
T relative_normal_velocity=TV::Dot_Product(velocity-body_velocity,body_normal);
if(relative_normal_velocity<0) velocity-=relative_normal_velocity*body_normal;}
levelset.levelset_callbacks->Adjust_Particle_For_Domain_Boundaries(cell_particles,k,velocity,particle_type,dt,time);
if(!Adjust_Particle_For_Objects(cell_particles.X(k),velocity,cell_particles.radius(k),collision_distance,particle_type,dt,time)){
cell_particles.array_collection->Delete_Element(k);number_of_deleted_particles++;}
else cell_particles.X(k)+=dt*velocity;}}}}
if(verbose){
if(number_of_deleted_particles) LOG::cout<<"Deleted "<<number_of_deleted_particles<<" "<<PARTICLE_LEVELSET<T_GRID>::Particle_Type_Name(particle_type)<<" due to crossover"<<std::endl;
if(number_of_non_occupied_cells) LOG::cout<<"Skipped "<<number_of_non_occupied_cells<<" non occupied cells"<<std::endl;}
if(update_particle_cells_after_euler_step) Update_Particle_Cells(particles);
}
//#####################################################################
// Function Reseed_Particles
//#####################################################################
template<class T_GRID> int PARTICLE_LEVELSET_RLE<T_GRID>::
Reseed_Particles(const T time,ARRAY<bool>* cell_centered_mask)
{
int new_particles=0;
bool normals_defined=levelset.normals!=0;levelset.Compute_Normals(); // make sure normals are accurate
if(!cell_centered_mask) new_particles-=Reseed_Delete_Particles(negative_particles,-1);
new_particles+=Reseed_Add_Particles(negative_particles,positive_particles,-1,PARTICLE_LEVELSET_NEGATIVE,time,cell_centered_mask);
if(!cell_centered_mask) for(int b=1;b<=negative_particles.m;b++)if(negative_particles(b)) negative_particles(b)->array_collection->Compact();
if(!cell_centered_mask) new_particles-=Reseed_Delete_Particles(positive_particles,1);
new_particles+=Reseed_Add_Particles(positive_particles,negative_particles,1,PARTICLE_LEVELSET_POSITIVE,time,cell_centered_mask);
if(!cell_centered_mask) for(int b=1;b<=positive_particles.m;b++)if(positive_particles(b)) positive_particles(b)->array_collection->Compact();
if(!normals_defined){delete levelset.normals;levelset.normals=0;}
return new_particles;
}
//#####################################################################
// Function Reseed_Delete_Particles
//#####################################################################
template<class T_GRID> int PARTICLE_LEVELSET_RLE<T_GRID>::
Reseed_Delete_Particles(ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles,const int sign)
{
levelset.Compute_Block_Minimum_And_Maximum();
int number_deleted=0;ARRAY<int> heap_particle_indices(number_particles_per_cell);ARRAY<T> heap_phi_minus_radius(number_particles_per_cell);
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(particles(b)){
PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*particles(b);assert(!cell_particles.next);
for(int i=0;i<T_GRID::number_of_cells_per_block;i++){
T unsigned_phi=sign*levelset.phi(block.Cell(i));if(0<=unsigned_phi && unsigned_phi<=half_band_width) goto NEAR_THE_INTERFACE;}
if(cell_particles.array_collection->Size() == 0) Free_Particle_And_Clear_Pointer(particles(b));
else{ // delete all non-escaped particles
ARRAY<bool> escaped;Identify_Escaped_Particles(block,cell_particles,escaped,sign);
for(int index=cell_particles.array_collection->Size();index>=1;index--)if(!escaped(index)) cell_particles.array_collection->Delete_Element(index);
if(cell_particles.array_collection->Size() == 0) Free_Particle_And_Clear_Pointer(particles(b));}
continue;
NEAR_THE_INTERFACE:; // can only get in here via the goto
if(cell_particles.array_collection->Size() <= number_particles_per_cell) continue;
ARRAY<bool> escaped;Identify_Escaped_Particles(block,cell_particles,escaped,sign);int number_of_escaped_particles=escaped.Number_True();
int total_particles=cell_particles.array_collection->Size()-number_of_escaped_particles;
if(total_particles > number_particles_per_cell){ // too many particles - delete particles with a heap sort
number_deleted+=total_particles-number_particles_per_cell;int heap_size=0;
for(int index=1;index<=cell_particles.array_collection->Size();index++)if(!escaped.m || !escaped(index)){
T phi_minus_radius=sign*levelset.Phi(cell_particles.X(index))-cell_particles.radius(index);
if(heap_size < number_particles_per_cell){ // add particle to heap
heap_size++;heap_particle_indices(heap_size)=index;heap_phi_minus_radius(heap_size)=phi_minus_radius;
if(heap_size==number_particles_per_cell) ARRAYS_COMPUTATIONS::Heapify(heap_phi_minus_radius,heap_particle_indices);} // when heap is full, order values with largest on top
else{ // excess particles don't fit in the heap
if(phi_minus_radius < heap_phi_minus_radius(1)){ // delete particle on top of heap & add new particle
cell_particles.array_collection->Add_To_Deletion_List(heap_particle_indices(1));
heap_phi_minus_radius(1)=phi_minus_radius;heap_particle_indices(1)=index;
ARRAYS_COMPUTATIONS::Heapify(heap_phi_minus_radius,heap_particle_indices,1,heap_phi_minus_radius.m);}
else cell_particles.array_collection->Add_To_Deletion_List(index);}} // delete new particle, larger than top of heap
cell_particles.array_collection->Delete_Elements_On_Deletion_List();}}}
return number_deleted;
}
//#####################################################################
// Function Reseed_Add_Particles
//#####################################################################
template<class T_GRID> int PARTICLE_LEVELSET_RLE<T_GRID>::
Reseed_Add_Particles(ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& particles,ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>& other_particles,
const int sign,const PARTICLE_LEVELSET_PARTICLE_TYPE particle_type,const T time,ARRAY<bool>* cell_centered_mask)
{
int number_added=0;
for(BLOCK_ITERATOR block(grid,1);block;block++){
for(int i=0;i<T_GRID::number_of_cells_per_block;i++){
T unsigned_phi=sign*levelset.phi(block.Cell(i));
if(0<=unsigned_phi && unsigned_phi<=half_band_width && (!cell_centered_mask||(*cell_centered_mask)(block.Cell(i)))) goto NEAR_THE_INTERFACE;}
continue;
NEAR_THE_INTERFACE:; // can only get in here via the goto
int b=block.Block();
if(!particles(b)) particles(b)=Allocate_Particle<PARTICLE_LEVELSET_PARTICLES<TV> >();
int total_particles=particles(b)->array_collection->Size(),total_other_particles=0;if(other_particles(b)) total_other_particles=other_particles(b)->array_collection->Size();
if(total_particles+total_other_particles >= number_particles_per_cell) continue;
PARTICLE_LEVELSET_PARTICLES<TV>& cell_particles=*particles(b);assert(!cell_particles.next);
int number_of_particles_to_add=number_particles_per_cell-total_particles-total_other_particles;
if(sign == -1){ // we add the negative particles first, and don't want to add too many of them...
if(total_other_particles) number_of_particles_to_add=(int)((T)number_of_particles_to_add*(T)total_particles/(T)(total_particles+total_other_particles)+1);
else if(!total_particles) number_of_particles_to_add=(number_of_particles_to_add+1)/2;}
number_added+=number_of_particles_to_add;
cell_particles.array_collection->Preallocate(total_particles+number_of_particles_to_add);
T phi_min=sign*minimum_particle_radius,phi_max=sign*half_band_width;if(phi_min > phi_max) exchange(phi_min,phi_max);
RANGE<TV> box=block.Bounding_Box();
int attempts=0;
ARRAY_VIEW<int>* id=store_unique_particle_id?cell_particles.array_collection->template Get_Array<int>(ATTRIBUTE_ID_ID):0;
for(int k=1;k<=number_of_particles_to_add;k++){
int index=cell_particles.array_collection->Add_Element();
if(id) (*id)(index)=++last_unique_particle_id;
cell_particles.quantized_collision_distance(index)=(unsigned short)(random.Get_Number()*USHRT_MAX);
cell_particles.X(index)=random.Get_Uniform_Vector(box);
if(!Attract_Individual_Particle_To_Interface_And_Adjust_Radius(particles,phi_min,phi_max,block,index,particle_type,time) && ++attempts <= 5) k--;}}
return number_added;
}
//#####################################################################
// Function Identify_Escaped_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Identify_Escaped_Particles(const int sign)
{
levelset.Compute_Block_Minimum_And_Maximum();
if(!sign || sign == 1){
escaped_positive_particles.Resize(grid.number_of_blocks);
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();
if(positive_particles(b))
Identify_Escaped_Particles(block,*positive_particles(b),escaped_positive_particles(b),1);
else escaped_positive_particles(b).Clean_Memory();}}
if(!sign || sign == -1){
escaped_negative_particles.Resize(grid.number_of_blocks);
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();
if(negative_particles(b))
Identify_Escaped_Particles(block,*negative_particles(b),escaped_negative_particles(b),-1);
else escaped_negative_particles(b).Clean_Memory();}}
}
//#####################################################################
// Function Identify_Escaped_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Identify_Escaped_Particles(const BLOCK_ITERATOR& block,PARTICLE_LEVELSET_PARTICLES<TV>& particles,ARRAY<bool>& escaped,const int sign)
{
assert(!particles.next);
bool near_objects=levelset.collision_body_list->Occupied_Block(block);if(near_objects) levelset.Enable_Collision_Aware_Interpolation(sign);
T inverse_radius_multiplier=-sign/outside_particle_distance_multiplier;
escaped.Resize(particles.array_collection->Size(),false,false);ARRAYS_COMPUTATIONS::Fill(escaped,false);
for(int k=1;k<=particles.array_collection->Size();k++)if(inverse_radius_multiplier*levelset.Phi(block,particles.X(k)) > particles.radius(k)) escaped(k)=true;
if(near_objects) levelset.Disable_Collision_Aware_Interpolation();
}
//#####################################################################
// Function Delete_Particles_Outside_Grid
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Delete_Particles_Outside_Grid()
{
Delete_Particles_Outside_Grid(positive_particles,grid);
Delete_Particles_Outside_Grid(negative_particles,grid);
if(use_removed_positive_particles){
Delete_Particles_Outside_Grid(removed_positive_particles,grid);
Delete_Particles_Outside_Grid(*removed_positive_particles_in_long_cells);}
if(use_removed_negative_particles){
Delete_Particles_Outside_Grid(removed_negative_particles,grid);
Delete_Particles_Outside_Grid(*removed_negative_particles_in_long_cells);}
}
//#####################################################################
// Function Delete_Particles_Outside_Grid
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Delete_Particles_Outside_Grid(PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>& particles_in_long_cells)
{
const T_BOX_HORIZONTAL& box=grid.horizontal_grid.domain;
for(int k=particles_in_long_cells.array_collection->Size();k>=1;k--)if(!box.Lazy_Inside(particles_in_long_cells.X(k).Horizontal_Vector())) particles_in_long_cells.array_collection->Delete_Element(k);
}
//#####################################################################
// Function Delete_Particles_Outside_Grid
//#####################################################################
template<class T_GRID> template<class T_PARTICLES> void PARTICLE_LEVELSET_RLE<T_GRID>::
Delete_Particles_Outside_Grid(ARRAY<T_PARTICLES*>& particles,const RLE_GRID_2D<T>&)
{
for(RLE_GRID_ITERATOR_BLOCK_2D<T> block(grid,RANGE<VECTOR<int,1> >(0,0));block;block++){int b=block.Block();if(particles(b)){
for(int k=particles(b)->array_collection->Size();k>=1;k--) if(particles(b)->X(k).x < grid.domain.min_corner.x) particles(b)->array_collection->Delete_Element(k);
if(!particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));}}
for(RLE_GRID_ITERATOR_BLOCK_2D<T> block(grid,RANGE<VECTOR<int,1> >(grid.uniform_grid.counts.x-1,grid.uniform_grid.counts.x-1));block;block++){int b=block.Block();if(particles(b)){
for(int k=particles(b)->array_collection->Size();k>=1;k--) if(particles(b)->X(k).x > grid.domain.max_corner.x) particles(b)->array_collection->Delete_Element(k);
if(!particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));}}
}
//#####################################################################
// Function Delete_Particles_Outside_Grid
//#####################################################################
template<class T_GRID> template<class T_PARTICLES> void PARTICLE_LEVELSET_RLE<T_GRID>::
Delete_Particles_Outside_Grid(ARRAY<T_PARTICLES*>& particles,const RLE_GRID_3D<T>&)
{
for(RLE_GRID_ITERATOR_BLOCK_3D<T> block(grid,RANGE<VECTOR<int,2> >(0,0,0,grid.uniform_grid.counts.z-1));block;block++){int b=block.Block();if(particles(b)){
for(int k=particles(b)->array_collection->Size();k>=1;k--) if(particles(b)->X(k).x < grid.domain.min_corner.x) particles(b)->array_collection->Delete_Element(k);
if(!particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));}}
for(RLE_GRID_ITERATOR_BLOCK_3D<T> block(grid,RANGE<VECTOR<int,2> >(grid.uniform_grid.counts.x-1,grid.uniform_grid.counts.x-1,0,grid.uniform_grid.counts.z-1));block;block++){int b=block.Block();if(particles(b)){
for(int k=particles(b)->array_collection->Size();k>=1;k--) if(particles(b)->X(k).x > grid.domain.max_corner.x) particles(b)->array_collection->Delete_Element(k);
if(!particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));}}
for(RLE_GRID_ITERATOR_BLOCK_3D<T> block(grid,RANGE<VECTOR<int,2> >(0,grid.uniform_grid.counts.x-1,0,0));block;block++){int b=block.Block();if(particles(b)){
for(int k=particles(b)->array_collection->Size();k>=1;k--) if(particles(b)->X(k).z < grid.domain.min_corner.z) particles(b)->array_collection->Delete_Element(k);
if(!particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));}}
for(RLE_GRID_ITERATOR_BLOCK_3D<T> block(grid,RANGE<VECTOR<int,2> >(0,grid.uniform_grid.counts.x-1,grid.uniform_grid.counts.z-1,grid.uniform_grid.counts.z-1));block;block++){int b=block.Block();if(particles(b)){
for(int k=particles(b)->array_collection->Size();k>=1;k--) if(particles(b)->X(k).z > grid.domain.max_corner.z) particles(b)->array_collection->Delete_Element(k);
if(!particles(b)->array_collection->Size()) Free_Particle_And_Clear_Pointer(particles(b));}}
}
//#####################################################################
// Function Identify_And_Remove_Escaped_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Identify_And_Remove_Escaped_Particles(const ARRAY<T>& V,const T radius_fraction,const bool verbose)
{
T_FACE_LOOKUP V_lookup(V);
T_FACE_LOOKUP_COLLIDABLE V_lookup_collidable(V_lookup,*levelset.collision_body_list,levelset.face_velocities_valid_mask_current);
typename T_FACE_LOOKUP_COLLIDABLE::LOOKUP V_lookup_collidable_lookup(V_lookup_collidable,V_lookup);
Identify_Escaped_Particles();int p=0,n=0;
if(use_removed_positive_particles)
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(escaped_positive_particles(b).m)
p+=Remove_Escaped_Particles(block,*positive_particles(b),escaped_positive_particles(b),1,removed_positive_particles(b),V_lookup_collidable_lookup,radius_fraction);}
else
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(escaped_positive_particles(b).m)
p+=Remove_Escaped_Particles(block,*positive_particles(b),escaped_positive_particles(b),1,radius_fraction);}
if(use_removed_negative_particles)
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(escaped_negative_particles(b).m)
n+=Remove_Escaped_Particles(block,*negative_particles(b),escaped_negative_particles(b),-1,removed_negative_particles(b),V_lookup_collidable_lookup,radius_fraction);}
else
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(escaped_negative_particles(b).m)
n+=Remove_Escaped_Particles(block,*negative_particles(b),escaped_negative_particles(b),-1,radius_fraction);}
LOG::cout<<"new removed positive particles = "<<p<<std::endl<<"new removed negative particles = "<<n<<std::endl;
}
//#####################################################################
// Function Remove_Escaped_Particles
//#####################################################################
template<class T_GRID> template<class T_FACE_LOOKUP_LOOKUP> int PARTICLE_LEVELSET_RLE<T_GRID>::
Remove_Escaped_Particles(const BLOCK_ITERATOR& block,PARTICLE_LEVELSET_PARTICLES<TV>& particles,const ARRAY<bool>& escaped,const int sign,
PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*& removed_particles,const T_FACE_LOOKUP_LOOKUP& V,const T radius_fraction)
{
T_LINEAR_INTERPOLATION_COLLIDABLE_FACE_SCALAR linear_interpolation_collidable;
assert(!particles.next);
bool near_objects=levelset.collision_body_list->Occupied_Block(block);if(near_objects) levelset.Enable_Collision_Aware_Interpolation(sign);
int deleted=0;T minus_sign_over_radius_fraction=-sign/radius_fraction;
for(int k=particles.array_collection->Size();k>=1;k--)if(escaped(k) && minus_sign_over_radius_fraction*levelset.Phi(block,particles.X(k)) > particles.radius(k)){
if(!removed_particles) removed_particles=Allocate_Particle<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV> >();
removed_particles->array_collection->Take(*particles.array_collection,k);
removed_particles->V.Last()=linear_interpolation_collidable.From_Block_Face(block,V,removed_particles->X.Last());}
if(near_objects) levelset.Disable_Collision_Aware_Interpolation();
return deleted;
}
//#####################################################################
// Function Remove_Escaped_Particles
//#####################################################################
template<class T_GRID> int PARTICLE_LEVELSET_RLE<T_GRID>::
Remove_Escaped_Particles(const BLOCK_ITERATOR& block,PARTICLE_LEVELSET_PARTICLES<TV>& particles,const ARRAY<bool>& escaped,const int sign,const T radius_fraction)
{
assert(!particles.next);
bool near_objects=levelset.collision_body_list->Occupied_Block(block);if(near_objects) levelset.Enable_Collision_Aware_Interpolation(sign);
int deleted=0;
for(int k=particles.array_collection->Size();k>=1;k--)if(escaped(k) && -sign*levelset.Phi(block,particles.X(k)) > radius_fraction*particles.radius(k)){
particles.array_collection->Delete_Element(k);deleted++;}
if(near_objects) levelset.Disable_Collision_Aware_Interpolation();
return deleted;
}
//#####################################################################
// Function Reincorporate_Removed_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Reincorporate_Removed_Particles(const T radius_fraction)
{
if(use_removed_positive_particles)
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(removed_positive_particles(b))
Reincorporate_Removed_Particles(block,positive_particles(b),1,*removed_positive_particles(b),radius_fraction);}
if(use_removed_negative_particles)
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(removed_negative_particles(b))
Reincorporate_Removed_Particles(block,negative_particles(b),-1,*removed_negative_particles(b),radius_fraction);}
}
//#####################################################################
// Function Reincorporate_Removed_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Reincorporate_Removed_Particles(const BLOCK_ITERATOR& block,PARTICLE_LEVELSET_PARTICLES<TV>*& particles,const int sign,
PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>& removed_particles,const T radius_fraction)
{
assert(!particles || !particles->next || particles->array_collection->Size()==particle_pool.number_particles_per_cell);
bool near_objects=levelset.collision_body_list->Occupied_Block(block);if(near_objects) levelset.Enable_Collision_Aware_Interpolation(sign);
T one_over_radius_multiplier=-sign/radius_fraction;
for(int k=removed_particles.array_collection->Size();k>=1;k--)if(one_over_radius_multiplier*levelset.Phi(block,removed_particles.X(k)) < removed_particles.radius(k)){
if(!particles) particles=Allocate_Particle<PARTICLE_LEVELSET_PARTICLES<TV> >();
Move_Particle(removed_particles,*particles,k);} // maybe recalculate radius to get rid of raining effect?
if(particles) Compact_Particles_Into_Single_Particle_Bin(block,*particles,sign);
if(near_objects) levelset.Disable_Collision_Aware_Interpolation();
}
//#####################################################################
// Function Delete_Particles_Far_From_Interface
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Delete_Particles_Far_From_Interface(const int discrete_band)
{
assert(discrete_band<8);
const ARRAY<VECTOR<int,T_GRID::number_of_neighbors_per_cell> >& cell_neighbors=grid.Short_Cell_Neighbors();
const ARRAY<VECTOR<bool,T_GRID::dimension> >& cell_neighbors_visible=levelset.collision_body_list->cell_neighbors_visible;
ARRAY<char> near_interface(grid.number_of_cells);
for(int cell=1;cell<=grid.number_of_cells;cell++)for(int axis=1;axis<=T_GRID::dimension;axis++){int neighbor=cell_neighbors(cell)(2*axis);
if(neighbor && cell_neighbors_visible(cell)(axis) && LEVELSET_UTILITIES<T>::Interface(phi(cell),phi(neighbor))){
near_interface(cell)=near_interface(neighbor)=1;}}
for(int distance=1;distance<=discrete_band;distance++){
char new_mask=1<<distance,old_mask=new_mask-1;
for(int cell=1;cell<=grid.number_of_cells;cell++)for(int axis=1;axis<=T_GRID::dimension;axis++){int neighbor=cell_neighbors(cell)(2*axis);
if(neighbor && cell_neighbors_visible(cell)(axis) && (near_interface(cell)|near_interface(neighbor))&old_mask){
near_interface(cell)|=new_mask;near_interface(neighbor)|=new_mask;}}}
for(BLOCK_ITERATOR block(grid,1);block;block++){int b=block.Block();if(negative_particles(b) || positive_particles(b)){
for(int i=0;i<T_GRID::number_of_cells_per_block;i++)if(near_interface(block.Cell(i))) goto NEAR_INTERFACE;
// if not near interface, delete particles
Free_Particle_And_Clear_Pointer(negative_particles(b));
Free_Particle_And_Clear_Pointer(positive_particles(b));
NEAR_INTERFACE:;}}
}
//#####################################################################
// Function Delete_Particles_In_Local_Maximum_Phi_Cells
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Delete_Particles_In_Local_Maximum_Phi_Cells()
{
T tolerance=levelset.small_number*grid.Minimum_Edge_Length();
const ARRAY<VECTOR<int,T_GRID::number_of_neighbors_per_cell> >& cell_neighbors=grid.Short_Cell_Neighbors();
for(CELL_ITERATOR cell(grid,1);cell;cell++){int c=cell.Cell();
if(phi(c)<=0 || phi(c)>=grid.Minimum_Edge_Length()) continue;
bool local_maximum=true;
for(int n=1;n<=T_GRID::number_of_neighbors_per_cell;n++){int neighbor=cell_neighbors(c)(n);
if(!neighbor || phi(c)<phi(neighbor)-tolerance){local_maximum=false;break;}}
if(!local_maximum) continue;
TV X=cell.X();TV_INT I=cell.I();
for(int i=1;i<=T_GRID::number_of_cells_per_block;i++){
T_BLOCK block(grid,grid.uniform_grid.Node_Cell_Index(I,i)); // TODO: don't call a function with a name that makes no sense in this context
if(!block) continue;int b=block.Block();
if(positive_particles(b)){PARTICLE_LEVELSET_PARTICLES<TV>& particles=*positive_particles(b);assert(!particles.next);
for(int k=particles.array_collection->Size();k>=1;k--)if((particles.X(k)-X).Magnitude_Squared()<=sqr(particles.radius(k))) particles.array_collection->Delete_Element(k);}}}
}
//#####################################################################
// Function Transfer_Particles
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Transfer_Particles(const T_GRID& new_grid)
{
ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*> new_positive_particles(new_grid.number_of_blocks),new_negative_particles(new_grid.number_of_blocks);
ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*> new_removed_positive_particles(use_removed_positive_particles?new_grid.number_of_blocks:0),
new_removed_negative_particles(use_removed_negative_particles?new_grid.number_of_blocks:0);
for(BLOCK_ITERATOR old_block(grid,1);old_block;old_block++){
BLOCK_ITERATOR new_block(new_grid,old_block.I());if(!new_block) continue;
int b1=old_block.Block(),b2=new_block.Block();
new_negative_particles(b2)=negative_particles(b1);negative_particles(b1)=0;
new_positive_particles(b2)=positive_particles(b1);positive_particles(b1)=0;
if(use_removed_negative_particles){new_removed_negative_particles(b2)=removed_negative_particles(b1);removed_negative_particles(b1)=0;}
if(use_removed_positive_particles){new_removed_positive_particles(b2)=removed_positive_particles(b1);removed_positive_particles(b1)=0;}}
if(use_removed_negative_particles_in_long_cells)
Transfer_Particles_In_Long_Cells(new_grid,removed_negative_particles,new_removed_negative_particles,*removed_negative_particles_in_long_cells);
if(use_removed_positive_particles_in_long_cells)
Transfer_Particles_In_Long_Cells(new_grid,removed_positive_particles,new_removed_positive_particles,*removed_positive_particles_in_long_cells);
ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>::Exchange_Arrays(negative_particles,new_negative_particles);
ARRAY<PARTICLE_LEVELSET_PARTICLES<TV>*>::Exchange_Arrays(positive_particles,new_positive_particles);
ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*>::Exchange_Arrays(removed_negative_particles,new_removed_negative_particles);
ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*>::Exchange_Arrays(removed_positive_particles,new_removed_positive_particles);
for(int b=1;b<=grid.number_of_blocks;b++){Free_Particle_And_Clear_Pointer(new_negative_particles(b));Free_Particle_And_Clear_Pointer(new_positive_particles(b));}
new_removed_negative_particles.Delete_Pointers_And_Clean_Memory();new_removed_positive_particles.Delete_Pointers_And_Clean_Memory();
}
//#####################################################################
// Function Transfer_Particles_In_Long_Cells
//#####################################################################
template<class T_GRID> void PARTICLE_LEVELSET_RLE<T_GRID>::
Transfer_Particles_In_Long_Cells(const T_GRID& new_grid,ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*>& particles,ARRAY<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>*>& new_particles,
PARTICLE_LEVELSET_REMOVED_PARTICLES<TV>& particles_in_long_cells)
{
for(int k=particles_in_long_cells.array_collection->Size();k>=1;k--){
T_BLOCK new_block(new_grid,particles_in_long_cells.X(k));if(!new_block) continue;int b=new_block.Block();
if(!new_particles(b)) new_particles(b)=Allocate_Particle<PARTICLE_LEVELSET_REMOVED_PARTICLES<TV> >();
Move_Particle(particles_in_long_cells,*new_particles(b),k);}
for(int b=1;b<=grid.number_of_blocks;b++)if(particles(b)){
particles_in_long_cells.array_collection->Take(*particles(b)->array_collection);
Free_Particle_And_Clear_Pointer(particles(b));}
}
//#####################################################################
template class PARTICLE_LEVELSET_RLE<RLE_GRID_2D<float> >;
template class PARTICLE_LEVELSET_RLE<RLE_GRID_3D<float> >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class PARTICLE_LEVELSET_RLE<RLE_GRID_2D<double> >;
template class PARTICLE_LEVELSET_RLE<RLE_GRID_3D<double> >;
#endif
#endif
|
# This will guess the User class
FactoryBot.define do
factory :image_scratch, class: MediaGallery::ImageScratch do
association :ownable, factory: :user
association :image_version, factory: :image_version
end
end
|
#!/bin/bash
# -------------------------------------------------------------------------- #
# Copyright 2015-2017, CSIR Centre for High Performance Computing #
# Author: David Macleod & Israel Tshililo #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
source $(dirname $0)/../kvmrc
mac_address=$1
vf_pos=$[$2 + 1]
port_pos=$2
HCA_DEV=$3
HCA_PORT=$4
mellanox_address=$5
## CHECK if the vf is a Connectx-4 device
mHCA_addr=`echo $mellanox_address | cut -c 6-`
vHCA_type=`lspci | grep "$mHCA_addr" | grep -o 'ConnectX-4'`
if [ "$vHCA_type" == "ConnectX-4" ]; then
## Programm the GUIDS and VF maps for the mlx5 driver
if [ "$mac_address" == "clear_guid" ]; then
# Clear the VF's Port GUID
echo "Follow" > /sys/class/infiniband/mlx5_$HCA_DEV/device/sriov/$port_pos/policy
# Get the value of the Port GUID to be cleared
vf_mac=$(cat /sys/class/infiniband/mlx5_$HCA_DEV/device/sriov/$port_pos/port)
strip_mac=`echo "$vf_mac" | sed "s/://g" | tr '[:upper:]' '[:lower:]'`
vf_guid="0x$strip_mac"
## Update opensm partitions table
discard=`su - oneadmin -c "ssh $SM_HOST -t sudo $(dirname $0)/wr_sm_pkeys.sh $mac_address $vf_guid"`
else
#Program the GUID with the IP encoded MAC: using mlx5 driver
echo "00:00:$mac_address" > /sys/class/infiniband/mlx5_$HCA_DEV/device/sriov/$port_pos/port
echo "Follow" > /sys/class/infiniband/mlx5_$HCA_DEV/device/sriov/$port_pos/policy
fi
else
## Programm the GUIDS and VF maps for the mlx4 driver
## Get the guids and VF maps needed for the mlx4 driver
guids=`ls /sys/class/infiniband/mlx4_$HCA_DEV/iov/$mellanox_address/ports/$HCA_PORT/gid_idx`
primary_guid=`echo $guids | cut -d ' ' -f1`
vf_map=`cat /sys/class/infiniband/mlx4_$HCA_DEV/iov/$mellanox_address/ports/$HCA_PORT/gid_idx/$primary_guid`
if [ "$mac_address" == "clear_guid" ]; then
#Clear the GUID so that it may be assigned to another VF
echo "0xffffffffffffffff" > /sys/class/infiniband/mlx4_$HCA_DEV/iov/ports/$HCA_PORT/admin_guids/$vf_map
else
#Program the GUID with the IP encoded MAC: using mlx4 driver
strip_mac=`echo "$mac_address" | sed "s/://g" | tr '[:upper:]' '[:lower:]'`
guid="0x$strip_mac"
echo "$guid" > /sys/class/infiniband/mlx4_$HCA_DEV/iov/ports/$HCA_PORT/admin_guids/$vf_map
fi
fi
exit 0
|
// @flow
// Sample comment
/**
* @flow
*/
/**
* Sample comment
*/
|
<reponame>ZicklePop/dash-pi
import dateAsObject from '../utils/dateAsObject'
const mockDateEpoc = new Date('Jan 19 1990')
const mockDateObj = {
day: 19,
hours: 12,
minutes: '00',
month: 'Jan'
}
describe('util - dateAsObject', () => {
it('returns null when no date passed in', () => {
expect(dateAsObject()).toBeNull()
})
it('should return parsed object', () => {
expect(dateAsObject(mockDateEpoc)).toEqual(mockDateObj)
})
})
|
'use strict';
const assert = require('assert');
const path = require('path');
const mdbPath = path.resolve(__dirname + '/media/Northwind2003.mdb');
const connStr = 'Provider=Microsoft.Jet.OLEDB.4.0;Data Source=' + mdbPath;
describe('Connection', function() {
it('parseDateFn правильно парсит даты', function() {
const parseDateFn = require('../connection/speedup/parseDateFn');
let d1, d2;
d1 = parseDateFn('yyyy-MM-dd', 'H:m:s')('2018-01-05 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('dd-MM-yyyy', 'H:m:s')('05-01-2018 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('dd/MM/yyyy', 'H:m:s')('05/01/2018 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('dd.MM.yyyy', 'H:m:s')('05.01.2018 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('MM.dd.yyyy', 'H:m:s')('01.05.2018 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('MM-dd-yyyy', 'H:m:s')('01-05-2018 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('MM/dd/yyyy', 'H:m:s')('01/05/2018 12:34:56').toString();
d2 = new Date('2018-01-05 12:34:56').toString();
assert.equal(d1, d2);
d1 = parseDateFn('MM\\dd\\yyyy', 'H:m:s')('01\\05\\2018 12:34:56').toString();
d2 = '01\\05\\2018 12:34:56'; // can`t parse this format
assert.equal(d1, d2);
});
it('Connection создается и уничтожается', function() {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.connect((err, connection) => {
if (err) return console.error(err.message);
connection.end();
});
});
it('Правильно выполняется SQL-запрос с integer, string, float, boolean', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.query(
'SELECT 2*2 AS intValue, "string" AS strValue, 3.14151926 AS floatValue, ' +
'1=1 AS trueValue, 1=0 AS falseValue;',
(err, data) => {
if (err) return done(err);
try {
assert.deepEqual(data, [
{floatValue: 3.14151926, intValue: 4, strValue: 'string', trueValue: -1, falseValue: 0}
]);
} catch (err) {
connection.end();
return done(err);
}
connection.end();
done(null);
}
);
});
it('Правильно выполняется SQL-запрос с boolean', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.query(
'SELECT ProductID, Discontinued FROM Products WHERE ProductID IN (1, 5);',
(err, data, fields) => {
if (err) return done(err);
try {
assert.deepEqual(fields, [
{Name: 'ProductID', Type: 3, Precision: 10, NumericScale: 255},
{
Name: 'Discontinued',
Type: 11,
Precision: 255,
NumericScale: 255
}
]);
assert.deepEqual(data, [{ProductID: 1, Discontinued: false}, {ProductID: 5, Discontinued: true}]);
} catch (err) {
connection.end();
return done(err);
}
connection.end();
done(null);
}
);
});
it('Правильно выполняется SQL-запрос с datetime', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.query('SELECT #2018-01-01 00:00:00# AS dateValue;', (err, data) => {
if (err) return done(err);
try {
assert.deepEqual(data[0]['dateValue'].getTime(), new Date('2018-01-01 00:00:00').getTime());
} catch (err) {
connection.end();
return done(err);
}
connection.end();
done(null);
});
});
it('Правильно выполняется SQL-запрос с null', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.query(
// Если запрос возвращает значение логического выражения, то тип поля будет adSmallInt (2), а не adBoolean (11).
// Но если запрос возвращает значение логического поля, то тип поля будет adBoolean (11)
//
// Для числового поля в случае значения NULL возвращается значение 0.
// Для текстового поля в случае значения NULL возвращается пустая строка
//
// Типы полей: https://msdn.microsoft.com/ru-ru/library/ms675318(v=vs.85).aspx
'SELECT cr.CustomerID, cr.EmployeeID, ord.OrderID AS NumericNull, ord.OrderDate AS DateNull, ord.ShipName AS StringNull, IIF(ord.OrderID IS NULL, FALSE, TRUE) AS booleanValue FROM Orders ord RIGHT JOIN (SELECT CustomerID, EmployeeID FROM Customers, Employees) cr ON (ord.CustomerID = cr.Customers.CustomerID AND ord.EmployeeID = cr.EmployeeID) WHERE cr.CustomerID="ALFKI" AND cr.EmployeeID IN (1, 2) ORDER BY cr.CustomerID, cr.EmployeeID;',
(err, data, fields) => {
if (err) return done(err);
try {
assert.deepEqual(data, [
{
CustomerID: 'ALFKI',
EmployeeID: 1,
NumericNull: 10952,
DateNull: new Date('1998-03-16 00:00:00'),
StringNull: '<NAME>',
booleanValue: -1
},
{
CustomerID: 'ALFKI',
EmployeeID: 1,
NumericNull: 10835,
DateNull: new Date('1998-01-15 00:00:00'),
StringNull: 'Alfreds Futterkiste',
booleanValue: -1
},
{
CustomerID: 'ALFKI',
EmployeeID: 2,
NumericNull: 0,
DateNull: null,
StringNull: '',
booleanValue: 0
}
]);
} catch (err) {
connection.end();
return done(err);
}
connection.end();
done(null);
}
);
});
it('Правильно обрабатываются синтаксические ошибки в SQL-запросе', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.query('syntax error', err => {
if (err) {
//console.log(err.message);
if (err.message.indexOf('Microsoft JET Database Engine') >= 0) {
done(null);
} else {
done(err);
}
} else {
done(new Error());
}
connection.end();
});
});
it('Правильно выполняется SQL-запрос из файла', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
let filepath = path.join(__dirname, 'media/sql.sql');
connection.query(filepath, (err, data) => {
if (err) {
done(err);
} else {
let fErr = false;
try {
assert.deepEqual(data, [{Ok: 'Ok'}]);
} catch (err) {
fErr = true;
done(err);
}
if (!fErr) done(null);
}
connection.end();
});
});
it('Правильно выполняется подстановка именованных параметров', function(done) {
const Connection = require('../connection/connection');
let connection = new Connection(connStr);
connection.query(
'SELECT :intValue AS intValue, :floatValue AS floatValue, :stringValue AS stringValue, :dateValue AS DateValue',
{intValue: 42, floatValue: Math.PI, stringValue: 'arghhhhh', dateValue: new Date('2018-01-01 00:00:00')},
(err, data) => {
if (err) return done(err);
let fErr = false;
try {
assert.deepEqual(data, [
{
DateValue: new Date('2018-01-01 00:00:00'),
floatValue: Math.PI,
intValue: 42,
stringValue: 'arghhhhh'
}
]);
} catch (err) {
fErr = true;
done(err);
}
if (!fErr) done(null);
connection.end();
}
);
});
});
|
<filename>codes/src/main/java/org/glamey/training/codes/strings/LongestPalindromicSubstring.java
package org.glamey.training.codes.strings;
import org.glamey.training.codes.Utils;
/**
* 给定一个字符串 s,找到 s 中最长的回文子串。你可以假设 s 的最大长度为 1000。
* <p>
* 示例 1:
* 输入: "babad"
* 输出: "bab"
* 注意: "aba" 也是一个有效答案。
* <p>
* 示例 2:
* 输入: "cbbd"
* 输出: "bb"
* 通过次数282,046提交次数922,997
* <p>
* 来源:力扣(LeetCode)
* 链接:https://leetcode-cn.com/problems/longest-palindromic-substring
* 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
*/
public class LongestPalindromicSubstring {
public static void main(String[] args) {
System.out.println(byComplex("babad"));
System.out.println(byComplex("cbbd"));
System.out.println(bySpread("babad"));
System.out.println(bySpread("cbbd"));
System.out.println(bySplit("babad"));
System.out.println(bySplit("cbbd"));
}
/*************************************【暴力循环方式,遍历所有的字符串,然后判定每个字符串是否为回文】
* *****************************************************/
/**
* 暴力方式求解
* 时间复杂度为O(N^3)
*/
public static String byComplex(String source) {
if (Utils.isBlank(source) || source.length() < 2) {
return source;
}
int start = 0, maxLen = 0;
char[] chars = source.toCharArray();
for (int i = 0; i < chars.length; i++) {
for (int j = i + 1; j < chars.length; j++) {
if (maxLen < j - i + 1 && isPalindrome(chars, i, j)) {
maxLen = j - i + 1;
start = i;
}
}
}
return source.substring(start, start + maxLen);
}
private static boolean isPalindrome(char[] chars, int left, int right) {
while (left <= right) {
if (chars[left] != chars[right]) {
return false;
}
left++;
right--;
}
return true;
}
/**************************************[中心往外扩充的方式]****************************************************/
/**
* https://leetcode-cn.com/problems/longest-palindromic-substring/solution/zhong-xin-kuo-san-dong-tai-gui-hua-by
* -liweiwei1419/
* 中心扩散的原理
* 枚举所有中心O(N),中心扩散判定回文需要O(N),总体时间复杂度为O(N^2)
* <p>
* 奇数个元素,中心是实际的元素。
* 偶数个元素,中心是一个间隙。
*/
public static String bySpread(String source) {
if (Utils.isBlank(source) || source.length() < 2) {
return source;
}
int maxLen = 1;
String res = source.substring(0, 1);
for (int i = 0; i < source.length(); i++) {
//下标重合,返回的字符长度为奇数个
String oddString = contentSpread(source, i, i);
//下标不重复,返回的字符长度为偶数个
String evenString = contentSpread(source, i, i + 1);
String maxString = oddString.length() > evenString.length() ? oddString : evenString;
if (maxString.length() > maxLen) {
maxLen = maxString.length();
res = maxString;
}
}
return res;
}
private static String contentSpread(String source, int left, int right) {
/**
* eft = right 的时候,此时回文中心是一个字符,回文串的长度是奇数
* right = left + 1 的时候,此时回文中心是一个空隙,回文串的长度是偶数
*/
int i = left, j = right, len = source.length();
while (i >= 0 && j < len) {
if (source.charAt(i) != source.charAt(j)) {
break;
}
i--;
j++;
}
return source.substring(i + 1, j); //include startIndex, exclude endIndex
}
/***************************************【每个字符前后插入特殊字符的方式,这样不用判定是否为偶数个字符】
* ***************************************************/
public static String bySplit(String source) {
if (Utils.isBlank(source) || source.length() < 2) {
return source;
}
char[] chars = generateBySplit(source);
/**
* # b # a # b # c #
* 0 1 2 3 4 5 6 7 8
* 0 3 0 7 0 3 0 3 0
*
*/
int start = 0, maxLen = 0;
for (int i = 0; i < chars.length; i++) {
int maxPalindrome = maxPalindrome(chars, i);
if (maxLen < maxPalindrome) {
maxLen = maxPalindrome;
start = i;
}
}
return new String(chars, start, maxLen);
}
/**
* # b # a # b #
*/
private static int maxPalindrome(char[] chars, int index) {
if (index <= 0) {
return 0;
}
int i = index, j = index;
int count = 0;
while (i >= 0 && j < chars.length) {
if (chars[i--] != chars[j++]) {
break;
}
count++;
}
return (count - 1) * 2 + 1; // count - 1=中间值需要去掉
}
private static char[] generateBySplit(String source) {
char[] chars = new char[source.length() * 2 + 1];
int index = 0;
for (int i = 0; i < source.length(); i++) {
chars[index++] = '#';
chars[index++] = source.charAt(i);
}
chars[index++] = '#';
return chars;
}
}
|
import { ISong } from "./Audio";
const songList: ISong[] = [
{
src: "/audio/mogul.mp3",
title: "MOGUL",
artist: "Saint Marshall (Inspired by <NAME> & <NAME>)"
},
{
src: "/audio/sunshine.mp3",
title: "Can you feel the sunshine?",
artist: "Sonic R"
},
{
src: "/audio/countwhat.mp3",
title: "Count What You Have Now",
artist: "Vantage"
},
{
src: "/audio/DigitalBaptism.mp3",
title: "Digital Baptism",
artist: "Falconite"
},
{
src: "/audio/popcorncastle.mp3",
title: "Popcorn Castle",
artist: "<NAME>"
},
{
src: "/audio/landing.mp3",
title: "We're Finally Landing",
artist: "HOME"
},
{
src: "/audio/CANINE.mp3",
title: "CANINE",
artist: "Dojo"
},
];
export default songList; |
<filename>sys/admin/py/ping2_advanced.py
#! /usr/bin/env python3
# -*-coding:utf-8 -*-
# @Time : 2019/06/16 16:44:29
# @Author : che
# @Email : <EMAIL>
# 多队列和多线程池, 会把活动的结果加入到arping队列
import re
from threading import Thread
import subprocess
from queue import Queue
num_ping_threads = 3
num_arp_threads = 3
in_queue = Queue()
out_queue = Queue()
ips = ["192.168.1.%d" % ip for ip in range(1, 255)]
def pinger(i, iq, oq):
while True:
ip = iq.get()
# print('Thread %s: Pinging %s' % (i, ip))
ret = subprocess.call(
"ping -c1 %s " % ip,
shell=True,
stdout=open("/dev/null", "w"),
stderr=subprocess.STDOUT,
)
if ret == 0:
oq.put(ip)
else:
pass
# print("%s: did not respond" % ip)
iq.task_done()
def arping(i, oq):
while True:
ip = oq.get()
p = subprocess.Popen(
"arping -I wlp3s0 -c5 %s" % ip, shell=True, stdout=subprocess.PIPE
)
out = p.stdout.read().decode()
# 提取mac地址
result = out.split()
pattern = re.compile(":")
macaddr = None
for item in result:
if re.search(pattern, item):
macaddr = item
break
print("IP Address: %s | Mac Address: %s" % (ip, macaddr))
oq.task_done()
def main():
# 创建ping线程池
for i in range(num_ping_threads):
worker = Thread(target=pinger, args=(i, in_queue, out_queue))
worker.setDaemon(True)
worker.start()
# 创建arping线程池
for i in range(num_arp_threads):
worker = Thread(target=arping, args=(i, out_queue))
worker.setDaemon(True)
worker.start()
# 向队列添加任务
for ip in ips:
in_queue.put(ip)
print("Main Thread Waiting")
in_queue.join() # 两种线程通过queue传递
out_queue.join()
print("Done")
main()
|
java -Xmx1G -jar craftbukkit.jar |
# Copyright (c) 2012 Bingo Entreprenøren AS
# Copyright (c) 2012 Teknobingo Scandinavia AS
# Copyright (c) 2012 <NAME>
# Copyright (c) 2012 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Configure Rails Environment
ENV["RAILS_ENV"] = "test"
require File.expand_path("../dummy/config/environment.rb", __FILE__)
ActiveRecord::Migrator.migrations_paths = [File.expand_path("../../test/dummy/db/migrate", __FILE__)]
require "rails/test_help"
require 'mocha/mini_test'
# Filter out Minitest backtrace while allowing backtrace from other libraries
# to be shown.
Minitest.backtrace_filter = Minitest::BacktraceFilter.new
# Load support files
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
# Load fixtures from the engine
if ActiveSupport::TestCase.method_defined?(:fixture_path=)
ActiveSupport::TestCase.fixture_path = File.expand_path("../fixtures", __FILE__)
end
class ActionController::TestCase
def login_as(role = :guest)
User.current = @controller.send(:current_user=, User.find_or_create_by(name: role.to_s))
end
end |
<filename>oilisoft/src/main/java/frontend/labels/clickmenus/NodeClickMenuItem.java
package frontend.labels.clickmenus;
import frontend.labels.NodeLabel;
import javax.swing.*;
/** Custom JMenuItem for click menus
* @author <NAME>
* @since 1.0
* @version 1.0
*/
public class NodeClickMenuItem extends JMenuItem {
private ClickMenuAction action;
private NodeLabel nodeLabel;
/** Creates an NodeClickMenuItem,
* which displays text and is associated with an action
* @param text The text which has to be displayed
* @param nodeLabel The NodeLabel the Entity(Right/Left)ClickMenu was invoked on
* @param action The associated action
*/
public NodeClickMenuItem(String text, NodeLabel nodeLabel, ClickMenuAction action) {
super(text);
this.action = action;
this.nodeLabel = nodeLabel;
}
public ClickMenuAction getClickMenuAction() {
return action;
}
public NodeLabel getNodeLabel() {
return nodeLabel;
}
}
|
#!/bin/bash
echo "===== RUNNING SMOKE SUITE IOS ====================="
cucumber -p ios -p html_report -t @smoke
|
#!/bin/sh
for x in 19 18 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
do
echo "stopping pc$x"
ssh pc$x 'sudo init 0'
done
|
<reponame>felipebaloneker/Practice<gh_stars>0
function number(x,y,z){
if(x > 0 && y > 0 && z > 0){
if(x % 10 == y % 10 && y % 10 == z % 10 && x % 10 == z % 10 ){
return true;
}
else{
return false;
}
}
}
console.log(number(100,200,300));
console.log(number(43,63,23));
console.log(number(211,13,46)); |
const Discord = require("discord.js");
const fetch = require("node-fetch");
module.exports = {
name: "wiki",
cooldown: 8000,
category: "info",
uimage:'',
usage: "`wiki <query>`",
description: "Returns the article from Wikipedia",
run: async (client, message, args) => {
const body = await fetch(
`https://en.wikipedia.org/api/rest_v1/page/summary/${encodeURIComponent(
args.join(" ")
)}`
).then(res => res.json().catch(() => {}));
if (!body)
return message.channel.sendmessage.channel.send({
embed: {
color: "RANDOM",
title: "❌ Error Page Not Found."
}
});
if (body.title && body.title === "Not found.")
return message.channel.send({
embed: {
color: "RANDOM",
title: "❌ Error Page Not Found."
}
});
const embed = new Discord.MessageEmbed()
.setTitle(`🌐 ${body.title} `)
.addField(
"More Info: ",
`**[Click Here!](${body.content_urls.desktop.page})**`,
true
)
.setDescription(`** ${body.extract}**`)
.setColor(`RANDOM`)
.setTimestamp();
if (body.thumbnail) embed.setThumbnail(body.thumbnail.source);
message.channel.send(embed);
}
}; |
<reponame>mplushnikov/doctor-jim
package de.plushnikov.doctorjim.javaparser;
import de.plushnikov.doctorjim.ElementPosition;
import java.util.Collection;
import java.util.HashSet;
import java.util.TreeSet;
/**
* Baseclass of generated JavaParser.
* This class provides functionality for collection of import-information during the parser run.
*
* @author <NAME>
* @version $Id: $
*/
public abstract class AbstractJavaParser {
/**
* Main package of the class
*/
private ElementPosition mPackage;
/**
* All imports from the file
*/
private Collection<ElementPosition> mImports = new HashSet<ElementPosition>();
/**
* All local defined types
*/
private Collection<String> mLocalTypes = new HashSet<String>();
/**
* All type elements which should be processed
*/
private Collection<String> mTypes = new TreeSet<String>();
///////////////////////////////////////////////////////////////////////////
/**
* Sets informations about the package declaration
*
* @param pPackage package name
* @param pStart start position Token
* @param pEnd end position Token
*/
public void setPackage(String pPackage, Token pStart, Token pEnd) {
mPackage = new ElementPosition(pPackage, pStart, pEnd);
}
/**
* Adds new import entry
*
* @param pImport name of the import
* @param pStatic true if this is an static import
* @param pImportAll true if this is a star import
* @param pStart start position Token
* @param pEnd end position Token
*/
protected void addImport(String pImport, boolean pStatic, boolean pImportAll, Token pStart, Token pEnd) {
StringBuilder lImport = new StringBuilder(50);
if (pStatic) {
lImport.append("static ");
}
lImport.append(pImport);
if (pImportAll) {
lImport.append(".*");
}
mImports.add(new ElementPosition(lImport.toString(), pStart, pEnd));
}
/**
* Adds new type
*
* @param pType a {@link java.lang.String} object.
*/
protected void addType(String pType) {
mTypes.add(pType);
}
/**
* <p>addAnnotation</p>
*
* @param pToken a {@link de.plushnikov.doctorjim.javaparser.Token} object.
*/
protected void addAnnotation(Token pToken) {
addType(pToken.image);
}
/**
* <p>addThrows</p>
*
* @param pToken a {@link de.plushnikov.doctorjim.javaparser.Token} object.
*/
protected void addThrows(Token pToken) {
addType(pToken.image);
}
/**
* <p>addInternType</p>
*
* @param pToken a {@link de.plushnikov.doctorjim.javaparser.Token} object.
*/
protected void addInternType(Token pToken) {
final String lNameValue = pToken.image;
final String[] lParts = lNameValue.split("\\.");
if (lParts.length > 1) {
StringBuilder lTypeValue = new StringBuilder(lNameValue.length());
boolean lFound = false;
for (String lPart : lParts) {
lTypeValue.append(lPart);
// uses java conventions to revise type
if (lPart.matches("\\p{Lu}.*")) {
lFound = true;
break;
}
lTypeValue.append('.');
}
if (lFound && lTypeValue.indexOf(".") > -1) {
addType(lTypeValue.toString());
}
}
}
/**
* <p>addClassOrInterfaceName</p>
*
* @param pToken a {@link de.plushnikov.doctorjim.javaparser.Token} object.
*/
protected void addClassOrInterfaceName(Token pToken) {
mLocalTypes.add(pToken.image);
}
/**
* <p>addEnumName</p>
*
* @param pToken a {@link de.plushnikov.doctorjim.javaparser.Token} object.
*/
protected void addEnumName(Token pToken) {
mLocalTypes.add(pToken.image);
}
/**
* <p>addAnnotationName</p>
*
* @param pToken a {@link de.plushnikov.doctorjim.javaparser.Token} object.
*/
protected void addAnnotationName(Token pToken) {
mLocalTypes.add(pToken.image);
}
///////////////////////////////////////////////////////////////////////////
/**
* Getter for the main package
*
* @return information about the main package
*/
public ElementPosition getPackage() {
return mPackage;
}
/**
* Getter for all imports
*
* @return information about the imports
*/
public Collection<ElementPosition> getImports() {
return mImports;
}
/**
* Getter for all local defined types
*
* @return collection of local defined types
*/
public Collection<String> getLocalTypes() {
return mLocalTypes;
}
/**
* Getter for all Types
*
* @return information about all Types
*/
public Collection<String> getTypes() {
return mTypes;
}
}
|
<filename>src/main/java/org/la4j/inversion/NoPivotGaussInverter.java
/*
* Copyright 2011-2015, by <NAME> and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): -
*
*/
package org.la4j.inversion;
import org.la4j.Matrix;
public class NoPivotGaussInverter implements MatrixInverter {
private final Matrix matrix;
public NoPivotGaussInverter(final Matrix matrix) {
this.matrix = matrix;
}
@Override
public Matrix inverse() {
if (matrix.rows() != matrix.columns()) {
throw new IllegalArgumentException("Wrong matrix size: " + "rows != columns");
}
double var;
final Matrix result = matrix.copy();
for (int k = 0; k < matrix.rows(); k++) {
final double diagonalTerm = result.get(k, k);
if (Math.abs(diagonalTerm) <= Double.MIN_VALUE) {
throw new IllegalArgumentException(
"This matrix cannot be inverted with a non-pivoting Gauss elimination method.");
}
var = 1.0 / result.get(k, k);
result.set(k, k, 1.0);
for (int j = 0; j < matrix.rows(); j++) {
result.set(k, j, result.get(k, j) * var);
}
for (int i = 0; i < matrix.rows(); i++) {
if (i == k) {
continue;
}
var = result.get(i, k);
result.set(i, k, 0.0);
for (int j = 0; j < matrix.rows(); j++) {
result.set(i, j, result.get(i, j) - var * result.get(k, j));
}
}
}
return result;
}
@Override
public Matrix self() {
return matrix;
}
}
|
package cn.cerc.jbean.book;
public interface IBook extends IBookEnroll {
// 初始化(仅调用一次)
public void init(IBookManage manage);
// 初始化( 会被每月调用)
public void ready();
// 将帐本的更新,保存到数据库中
public void save();
}
|
from django.urls import path
from . import views
urlpatterns = [
path("controlpanel/courses/", views.controlpanel_course_list, name="controlpanel_course_list"),
path("controlpanel/courses/<int:id>/delete/", views.controlpanel_course_delete, name="controlpanel_course_delete"),
path("controlpanel/courses/<int:id>/edit/", views.controlpanel_course_form, name="controlpanel_course_form"),
path("controlpanel/courses/create/", views.controlpanel_course_form, name="controlpanel_course_form"),
path("controlpanel/courses/<int:id>/<int:content>/", views.controlpanel_course_content, name="controlpanel_course_content"),
] |
package com.yin.springboot.user.center.domain;
import java.io.Serializable;
import javax.persistence.*;
import lombok.Data;
@Data
@Table(name = "clientdetails")
public class Clientdetails implements Serializable {
@Id
@Column(name = "appId")
private String appid;
@Column(name = "resourceIds")
private String resourceids;
@Column(name = "appSecret")
private String appsecret;
@Column(name = "`scope`")
private String scope;
@Column(name = "grantTypes")
private String granttypes;
@Column(name = "redirectUrl")
private String redirecturl;
@Column(name = "authorities")
private String authorities;
@Column(name = "access_token_validity")
private Integer accessTokenValidity;
@Column(name = "refresh_token_validity")
private Integer refreshTokenValidity;
@Column(name = "additionalInformation")
private String additionalinformation;
@Column(name = "autoApproveScopes")
private String autoapprovescopes;
private static final long serialVersionUID = 1L;
} |
#!/bin/zsh
N=2000000
cmd=ls
TIMEFMT=$'\nreal\t%E\nuser\t%U\nsys\t%S'
echo
echo "type:"
time (repeat $N {type $cmd &>/dev/null})
echo
echo "hash:"
time (repeat $N {hash $cmd &>/dev/null})
echo
echo "command -v:"
time (repeat $N {command -v $cmd &>/dev/null})
echo
echo "which:"
time (repeat $N {which $cmd &>/dev/null})
echo
echo '$+commands:'
time (repeat $N { (( $+commands[$cmd] )) })
|
<gh_stars>0
with open("input_files\day08_input_mp_txt") as file:
instructions = file.read().split("\n")
# part 1
cleaned_instructions = [instruction.split(" ") for instruction in instructions]
passed_instructions = set()
accumulator = 0
current_position = 0
while current_position not in passed_instructions:
if current_position not in passed_instructions:
passed_instructions.add(current_position)
if cleaned_instructions[current_position][0] == "acc":
accumulator += int(cleaned_instructions[current_position][1])
current_position += 1
elif cleaned_instructions[current_position][0] == "jmp":
current_position = current_position + int(cleaned_instructions[current_position][1])
else:
current_position += 1
print(accumulator)
# part 2
for i in range(0, len(instructions)-1):
# set of instructions to keep track of possible infinite loops
passed_instructions = set()
accumulator = 0
current_position = 0
# switch instructions routine
if cleaned_instructions[i][0] == "nop":
cleaned_instructions[i][0] = "jmp"
elif cleaned_instructions[i][0] == "jmp":
cleaned_instructions[i][0] = "nop"
# do this as long as the instructions has not been followed already and as long as we did not followed the very last instruction
while current_position not in passed_instructions and len(cleaned_instructions)-1 not in passed_instructions:
passed_instructions.add(current_position)
if cleaned_instructions[current_position][0] == "acc":
accumulator += int(cleaned_instructions[current_position][1])
current_position += 1
elif cleaned_instructions[current_position][0] == "jmp":
current_position = current_position + int(cleaned_instructions[current_position][1])
else:
current_position += 1
# print solution and stop routine in case we issued the last instruction of the input file
if len(cleaned_instructions)-1 in passed_instructions:
print("accumulator",accumulator)
break
# revert changes in instructions before next iteration starts
if cleaned_instructions[i][0] == "nop":
cleaned_instructions[i][0] = "jmp"
elif cleaned_instructions[i][0] == "jmp":
cleaned_instructions[i][0] = "nop" |
<filename>src/test/java/th/ac/kmitl/it/prip/fractal/ParametersFromToTest.java
package th.ac.kmitl.it.prip.fractal;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class ParametersFromToTest {
private String inputParam;
private int expectedFrom;
private int expectedTo;
@Parameterized.Parameters
public static Collection<Object[]> setFromTo() {
return Arrays.asList(new Object[][] { { "fromto 1-1", 1, 1 }, { "fromto 1-5", 1, 5 }, { "fromto 5-13", 5, 13 },
{ "fromto 5-1", 5, 1 }, { "fromto 1 1", 0, -1 } });
}
public ParametersFromToTest(String inputParam, int expectedFrom, int expectedTo) {
this.inputParam = inputParam;
this.expectedFrom = expectedFrom - 1;
this.expectedTo = expectedTo;
}
@Test
public void correctSetting() throws IOException {
Parameters parameters = new Parameters(new String[] { inputParam, "processname compress" });
assertEquals(this.expectedFrom, parameters.getFromIdx());
assertEquals(this.expectedTo, parameters.getToIdx());
}
}
|
<gh_stars>0
export class GLSLVertexOptions
{
// "Vertex-like shader" here is any shader stage that can write BuiltInPosition.
// GLSL: In vertex-like shaders, rewrite [0, w] depth (Vulkan/D3D style) to [-w, w] depth (GL style).
// MSL: In vertex-like shaders, rewrite [-w, w] depth (GL style) to [0, w] depth.
// HLSL: In vertex-like shaders, rewrite [-w, w] depth (GL style) to [0, w] depth.
fixup_clipspace: boolean = false;
// In vertex-like shaders, inverts gl_Position.y or equivalent.
flip_vert_y: boolean = false;
// GLSL only, for HLSL version of this option, see CompilerHLSL.
// If true, the backend will assume that InstanceIndex will need to apply
// a base instance offset. Set to false if you know you will never use base instance
// functionality as it might remove some internal uniforms.
support_nonzero_base_instance: boolean = true;
} |
#!/bin/tcsh
#PBS -N sg_sg_input
#PBS -j oe
#PBS -q DEFAULT
#PBS -l select=1:ncpus=64:mpiprocs=1
source /etc/profile.d/modules.csh
module purge
module load oneapi-intel/2021.1.1
module load oneapi/mpi/2021.1.1
setenv OMP_NUM_THREADS 1
cd ${PBS_O_WORKDIR}
#cat ${PBS_NODEFILE} > nodelist
#activate shry
setenv PATH "/home/nkousuke/application/anaconda3/bin:$PATH"
conda activate shry
#autosub
set sg=sg_input
python ./autosub.py SG${sg}> out_autosub_SG${sg}
|
#!/usr/bin/env bash
docker-compose build quantumleap-db-setup
docker-compose pull crate
docker-compose pull timescale
docker-compose pull redis
docker-compose up -d
sleep 20
# Set test QL config file
export QL_CONFIG='src/reporter/tests/ql-config.yml'
cd ../../../
pytest src/reporter/ \
--cov-report= --cov-config=.coveragerc --cov-append --cov=src/ \
--junitxml=test-results/junit-reporter.xml
r=$?
cd -
docker-compose down -v
exit $r
|
<gh_stars>100-1000
export * from './common';
export * from './theme';
|
sudo apt install p7zip-full
mkdir -p .data/inria_ail
wget --no-check-certificate https://files.inria.fr/aerialimagelabeling/aerialimagelabeling.7z.001
wget --no-check-certificate https://files.inria.fr/aerialimagelabeling/aerialimagelabeling.7z.002
wget --no-check-certificate https://files.inria.fr/aerialimagelabeling/aerialimagelabeling.7z.003
wget --no-check-certificate https://files.inria.fr/aerialimagelabeling/aerialimagelabeling.7z.004
wget --no-check-certificate https://files.inria.fr/aerialimagelabeling/aerialimagelabeling.7z.005
7z x aerialimagelabeling.7z.001
unzip NEW2-AerialImageDataset.zip -d .data/
rm -i aerialimagelabeling.7z.*
rm -i NEW2-AerialImageDataset.zip
|
#!/bin/bash
function cronScheduleMoreOftenThan30Minutes() {
#takes a unexpanded cron schedule, returns 0 if it's more often that 30 minutes
MINUTE=$(echo $1 | (read -a ARRAY; echo ${ARRAY[0]}) )
if [[ $MINUTE =~ ^(M|H|\*)\/([0-5]?[0-9])$ ]]; then
# Match found for M/xx, H/xx or */xx
# Check if xx is smaller than 30, which means this cronjob runs more often than every 30 minutes.
STEP=${BASH_REMATCH[2]}
if [ $STEP -lt 30 ]; then
return 0
else
return 1
fi
elif [[ $MINUTE =~ ^\*$ ]]; then
# We are running every minute
return 0
else
# all other cases are more often than 30 minutes
return 1
fi
}
function contains() {
[[ $1 =~ (^|[[:space:]])$2($|[[:space:]]) ]] && return 0 || return 1
}
##############################################
### PREPARATION
##############################################
# Load path of docker-compose that should be used
set +x # reduce noise in build logs
DOCKER_COMPOSE_YAML=($(cat .lagoon.yml | shyaml get-value docker-compose-yaml))
DEPLOY_TYPE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.deploy-type default)
# Load all Services that are defined
COMPOSE_SERVICES=($(cat $DOCKER_COMPOSE_YAML | shyaml keys services))
# Default shared mariadb service broker
MARIADB_SHARED_DEFAULT_CLASS="lagoon-dbaas-mariadb-apb"
MONGODB_SHARED_DEFAULT_CLASS="lagoon-maas-mongodb-apb"
# Figure out which services should we handle
SERVICE_TYPES=()
IMAGES=()
NATIVE_CRONJOB_CLEANUP_ARRAY=()
DBAAS=()
declare -A MAP_DEPLOYMENT_SERVICETYPE_TO_IMAGENAME
declare -A MAP_SERVICE_TYPE_TO_COMPOSE_SERVICE
declare -A MAP_SERVICE_NAME_TO_IMAGENAME
declare -A MAP_SERVICE_NAME_TO_SERVICEBROKER_CLASS
declare -A MAP_SERVICE_NAME_TO_SERVICEBROKER_PLAN
declare -A MAP_SERVICE_NAME_TO_DBAAS_ENVIRONMENT
declare -A IMAGES_PULL
declare -A IMAGES_BUILD
declare -A IMAGE_HASHES
HELM_ARGUMENTS=()
. /kubectl-build-deploy/scripts/kubectl-get-cluster-capabilities.sh
for CAPABILITIES in "${CAPABILITIES[@]}"; do
HELM_ARGUMENTS+=(-a "${CAPABILITIES}")
done
set -x
set +x # reduce noise in build logs
# Allow the servicetype be overridden by the lagoon API
# This accepts colon separated values like so `SERVICE_NAME:SERVICE_TYPE_OVERRIDE`, and multiple overrides
# separated by commas
# Example 1: mariadb:mariadb-dbaas < tells any docker-compose services named mariadb to use the mariadb-dbaas service type
# Example 2: mariadb:mariadb-dbaas,nginx:nginx-persistent
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
LAGOON_SERVICE_TYPES=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_SERVICE_TYPES") | "\(.value)"'))
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
TEMP_LAGOON_SERVICE_TYPES=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_SERVICE_TYPES") | "\(.value)"'))
if [ ! -z $TEMP_LAGOON_SERVICE_TYPES ]; then
LAGOON_SERVICE_TYPES=$TEMP_LAGOON_SERVICE_TYPES
fi
fi
# Allow the dbaas environment type to be overridden by the lagoon API
# This accepts colon separated values like so `SERVICE_NAME:DBAAS_ENVIRONMENT_TYPE`, and multiple overrides
# separated by commas
# Example 1: mariadb:production < tells any docker-compose services named mariadb to use the production dbaas environment type
# Example 2: mariadb:production,mariadb-test:development
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
LAGOON_DBAAS_ENVIRONMENT_TYPES=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_DBAAS_ENVIRONMENT_TYPES") | "\(.value)"'))
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
TEMP_LAGOON_DBAAS_ENVIRONMENT_TYPES=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_DBAAS_ENVIRONMENT_TYPES") | "\(.value)"'))
if [ ! -z $TEMP_LAGOON_DBAAS_ENVIRONMENT_TYPES ]; then
LAGOON_DBAAS_ENVIRONMENT_TYPES=$TEMP_LAGOON_DBAAS_ENVIRONMENT_TYPES
fi
fi
set -x
for COMPOSE_SERVICE in "${COMPOSE_SERVICES[@]}"
do
# The name of the service can be overridden, if not we use the actual servicename
SERVICE_NAME=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.name default)
if [ "$SERVICE_NAME" == "default" ]; then
SERVICE_NAME=$COMPOSE_SERVICE
fi
# Load the servicetype. If it's "none" we will not care about this service at all
SERVICE_TYPE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.type custom)
# Allow the servicetype to be overriden by environment in .lagoon.yml
ENVIRONMENT_SERVICE_TYPE_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.types.$SERVICE_NAME false)
if [ ! $ENVIRONMENT_SERVICE_TYPE_OVERRIDE == "false" ]; then
SERVICE_TYPE=$ENVIRONMENT_SERVICE_TYPE_OVERRIDE
fi
if [ ! -z "$LAGOON_SERVICE_TYPES" ]; then
IFS=',' read -ra LAGOON_SERVICE_TYPES_SPLIT <<< "$LAGOON_SERVICE_TYPES"
for LAGOON_SERVICE_TYPE in "${LAGOON_SERVICE_TYPES_SPLIT[@]}"
do
IFS=':' read -ra LAGOON_SERVICE_TYPE_SPLIT <<< "$LAGOON_SERVICE_TYPE"
if [ "${LAGOON_SERVICE_TYPE_SPLIT[0]}" == "$SERVICE_NAME" ]; then
SERVICE_TYPE=${LAGOON_SERVICE_TYPE_SPLIT[1]}
fi
done
fi
# Previous versions of Lagoon used "python-ckandatapusher", this should be mapped to "python"
if [[ "$SERVICE_TYPE" == "python-ckandatapusher" ]]; then
SERVICE_TYPE="python"
fi
# "mariadb" is a meta service, which allows lagoon to decide itself which of the services to use:
# - mariadb-single (a single mariadb pod)
# - mariadb-dbaas (use the dbaas shared operator)
if [ "$SERVICE_TYPE" == "mariadb" ]; then
# if there is already a service existing with the service_name we assume that for this project there has been a
# mariadb-single deployed (probably from the past where there was no mariadb-shared yet, or mariadb-dbaas) and use that one
if kubectl --insecure-skip-tls-verify -n ${NAMESPACE} get service "$SERVICE_NAME" &> /dev/null; then
SERVICE_TYPE="mariadb-single"
# check if this cluster supports the default one, if not we assume that this cluster is not capable of shared mariadbs and we use a mariadb-single
# real basic check to see if the mariadbconsumer exists as a kind
elif [[ "${CAPABILITIES[@]}" =~ "mariadb.amazee.io/v1/MariaDBConsumer" ]]; then
SERVICE_TYPE="mariadb-dbaas"
else
SERVICE_TYPE="mariadb-single"
fi
fi
# Previous versions of Lagoon supported "mariadb-shared", this has been superseeded by "mariadb-dbaas"
if [[ "$SERVICE_TYPE" == "mariadb-shared" ]]; then
SERVICE_TYPE="mariadb-dbaas"
fi
if [[ "$SERVICE_TYPE" == "mariadb-dbaas" ]]; then
# Default plan is the enviroment type
DBAAS_ENVIRONMENT=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.mariadb-dbaas\\.environment "${ENVIRONMENT_TYPE}")
# Allow the dbaas shared servicebroker plan to be overriden by environment in .lagoon.yml
ENVIRONMENT_DBAAS_ENVIRONMENT_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.overrides.$SERVICE_NAME.mariadb-dbaas\\.environment false)
if [ ! $DBAAS_ENVIRONMENT_OVERRIDE == "false" ]; then
DBAAS_ENVIRONMENT=$ENVIRONMENT_DBAAS_ENVIRONMENT_OVERRIDE
fi
# If we have a dbaas environment type override in the api, consume it here
if [ ! -z "$LAGOON_DBAAS_ENVIRONMENT_TYPES" ]; then
IFS=',' read -ra LAGOON_DBAAS_ENVIRONMENT_TYPES_SPLIT <<< "$LAGOON_DBAAS_ENVIRONMENT_TYPES"
for LAGOON_DBAAS_ENVIRONMENT_TYPE in "${LAGOON_DBAAS_ENVIRONMENT_TYPES_SPLIT[@]}"
do
IFS=':' read -ra LAGOON_DBAAS_ENVIRONMENT_TYPE_SPLIT <<< "$LAGOON_DBAAS_ENVIRONMENT_TYPE"
if [ "${LAGOON_DBAAS_ENVIRONMENT_TYPE_SPLIT[0]}" == "$SERVICE_NAME" ]; then
DBAAS_ENVIRONMENT=${LAGOON_DBAAS_ENVIRONMENT_TYPE_SPLIT[1]}
fi
done
fi
MAP_SERVICE_NAME_TO_DBAAS_ENVIRONMENT["${SERVICE_NAME}"]="${DBAAS_ENVIRONMENT}"
fi
# "postgres" is a meta service, which allows lagoon to decide itself which of the services to use:
# - postgres-single (a single postgres pod)
# - postgres-dbaas (use the dbaas shared operator)
if [ "$SERVICE_TYPE" == "postgres" ]; then
# if there is already a service existing with the service_name we assume that for this project there has been a
# postgres-single deployed (probably from the past where there was no postgres-shared yet, or postgres-dbaas) and use that one
if kubectl --insecure-skip-tls-verify -n ${NAMESPACE} get service "$SERVICE_NAME" &> /dev/null; then
SERVICE_TYPE="postgres-single"
# heck if this cluster supports the default one, if not we assume that this cluster is not capable of shared PostgreSQL and we use a postgres-single
# real basic check to see if the postgreSQLConsumer exists as a kind
elif [[ "${CAPABILITIES[@]}" =~ "postgres.amazee.io/v1/PostgreSQLConsumer" ]]; then
SERVICE_TYPE="postgres-dbaas"
else
SERVICE_TYPE="postgres-single"
fi
fi
# Previous versions of Lagoon supported "postgres-shared", this has been superseeded by "postgres-dbaas"
if [[ "$SERVICE_TYPE" == "postgres-shared" ]]; then
SERVICE_TYPE="postgres-dbaas"
fi
if [[ "$SERVICE_TYPE" == "postgres-dbaas" ]]; then
# Default plan is the enviroment type
DBAAS_ENVIRONMENT=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.postgres-dbaas\\.environment "${ENVIRONMENT_TYPE}")
# Allow the dbaas shared servicebroker plan to be overriden by environment in .lagoon.yml
ENVIRONMENT_DBAAS_ENVIRONMENT_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.overrides.$SERVICE_NAME.postgres-dbaas\\.environment false)
if [ ! $DBAAS_ENVIRONMENT_OVERRIDE == "false" ]; then
DBAAS_ENVIRONMENT=$ENVIRONMENT_DBAAS_ENVIRONMENT_OVERRIDE
fi
MAP_SERVICE_NAME_TO_DBAAS_ENVIRONMENT["${SERVICE_NAME}"]="${DBAAS_ENVIRONMENT}"
fi
# "mongo" is a meta service, which allows lagoon to decide itself which of the services to use:
# - mongodb-single (a single mongodb pod)
# - mongodb-dbaas (use the dbaas shared operator)
if [ "$SERVICE_TYPE" == "mongo" ]; then
# if there is already a service existing with the service_name we assume that for this project there has been a
# mongodb-single deployed (probably from the past where there was no mongodb-shared yet, or mongodb-dbaas) and use that one
if kubectl --insecure-skip-tls-verify -n ${NAMESPACE} get service "$SERVICE_NAME" &> /dev/null; then
SERVICE_TYPE="mongodb-single"
# heck if this cluster supports the default one, if not we assume that this cluster is not capable of shared MongoDB and we use a mongodb-single
# real basic check to see if the MongoDBConsumer exists as a kind
elif [[ "${CAPABILITIES[@]}" =~ "mongodb.amazee.io/v1/MongoDBConsumer" ]]; then
SERVICE_TYPE="mongodb-dbaas"
else
SERVICE_TYPE="mongodb-single"
fi
fi
# Previous versions of Lagoon supported "mongo-shared", this has been superseeded by "mongodb-dbaas"
if [[ "$SERVICE_TYPE" == "mongo-shared" ]]; then
SERVICE_TYPE="mongodb-dbaas"
fi
if [[ "$SERVICE_TYPE" == "mongodb-dbaas" ]]; then
# Default plan is the enviroment type
DBAAS_ENVIRONMENT=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.mongodb-dbaas\\.environment "${ENVIRONMENT_TYPE}")
# Allow the dbaas shared servicebroker plan to be overriden by environment in .lagoon.yml
ENVIRONMENT_DBAAS_ENVIRONMENT_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.overrides.$SERVICE_NAME.mongodb-dbaas\\.environment false)
if [ ! $DBAAS_ENVIRONMENT_OVERRIDE == "false" ]; then
DBAAS_ENVIRONMENT=$ENVIRONMENT_DBAAS_ENVIRONMENT_OVERRIDE
fi
# If we have a dbaas environment type override in the api, consume it here
if [ ! -z "$LAGOON_DBAAS_ENVIRONMENT_TYPES" ]; then
IFS=',' read -ra LAGOON_DBAAS_ENVIRONMENT_TYPES_SPLIT <<< "$LAGOON_DBAAS_ENVIRONMENT_TYPES"
for LAGOON_DBAAS_ENVIRONMENT_TYPE in "${LAGOON_DBAAS_ENVIRONMENT_TYPES_SPLIT[@]}"
do
IFS=':' read -ra LAGOON_DBAAS_ENVIRONMENT_TYPE_SPLIT <<< "$LAGOON_DBAAS_ENVIRONMENT_TYPE"
if [ "${LAGOON_DBAAS_ENVIRONMENT_TYPE_SPLIT[0]}" == "$SERVICE_NAME" ]; then
DBAAS_ENVIRONMENT=${LAGOON_DBAAS_ENVIRONMENT_TYPE_SPLIT[1]}
fi
done
fi
MAP_SERVICE_NAME_TO_DBAAS_ENVIRONMENT["${SERVICE_NAME}"]="${DBAAS_ENVIRONMENT}"
fi
if [ "$SERVICE_TYPE" == "none" ]; then
continue
fi
# For DeploymentConfigs with multiple Services inside (like nginx-php), we allow to define the service type of within the
# deploymentconfig via lagoon.deployment.servicetype. If this is not set we use the Compose Service Name
DEPLOYMENT_SERVICETYPE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.deployment\\.servicetype default)
if [ "$DEPLOYMENT_SERVICETYPE" == "default" ]; then
DEPLOYMENT_SERVICETYPE=$COMPOSE_SERVICE
fi
# The ImageName is the same as the Name of the Docker Compose ServiceName
IMAGE_NAME=$COMPOSE_SERVICE
# Do not handle images for shared services
if [[ "$SERVICE_TYPE" != "mariadb-dbaas" ]] &&
[[ "$SERVICE_TYPE" != "mariadb-shared" ]] &&
[[ "$SERVICE_TYPE" != "postgres-shared" ]] &&
[[ "$SERVICE_TYPE" != "postgres-dbaas" ]] &&
[[ "$SERVICE_TYPE" != "mongodb-dbaas" ]] &&
[[ "$SERVICE_TYPE" != "mongodb-shared" ]]; then
# Generate List of Images to build
IMAGES+=("${IMAGE_NAME}")
fi
# Map Deployment ServiceType to the ImageName
MAP_DEPLOYMENT_SERVICETYPE_TO_IMAGENAME["${SERVICE_NAME}:${DEPLOYMENT_SERVICETYPE}"]="${IMAGE_NAME}"
# Create an array with all Service Names and Types if it does not exist yet
if [[ ! " ${SERVICE_TYPES[@]} " =~ " ${SERVICE_NAME}:${SERVICE_TYPE} " ]]; then
SERVICE_TYPES+=("${SERVICE_NAME}:${SERVICE_TYPE}")
fi
# ServiceName and Type to Original Service Name Mapping, but only once per Service name and Type,
# as we have original services that appear twice (like in the case of nginx-php)
if [[ ! "${MAP_SERVICE_TYPE_TO_COMPOSE_SERVICE["${SERVICE_NAME}:${SERVICE_TYPE}"]+isset}" ]]; then
MAP_SERVICE_TYPE_TO_COMPOSE_SERVICE["${SERVICE_NAME}:${SERVICE_TYPE}"]="${COMPOSE_SERVICE}"
fi
# ServiceName to ImageName mapping, but only once as we have original services that appear twice (like in the case of nginx-php)
# these will be handled via MAP_DEPLOYMENT_SERVICETYPE_TO_IMAGENAME
if [[ ! "${MAP_SERVICE_NAME_TO_IMAGENAME["${SERVICE_NAME}"]+isset}" ]]; then
MAP_SERVICE_NAME_TO_IMAGENAME["${SERVICE_NAME}"]="${IMAGE_NAME}"
fi
done
##############################################
### BUILD IMAGES
##############################################
# we only need to build images for pullrequests and branches
if [[ "$BUILD_TYPE" == "pullrequest" || "$BUILD_TYPE" == "branch" ]]; then
BUILD_ARGS=()
set +x # reduce noise in build logs
# Get the pre-rollout and post-rollout vars
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
LAGOON_PREROLLOUT_DISABLED=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_PREROLLOUT_DISABLED") | "\(.value)"'))
LAGOON_POSTROLLOUT_DISABLED=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_POSTROLLOUT_DISABLED") | "\(.value)"'))
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
LAGOON_PREROLLOUT_DISABLED=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_PREROLLOUT_DISABLED") | "\(.value)"'))
LAGOON_POSTROLLOUT_DISABLED=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_POSTROLLOUT_DISABLED") | "\(.value)"'))
fi
set -x
set +x # reduce noise in build logs
# Add environment variables from lagoon API as build args
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
echo "LAGOON_PROJECT_VARIABLES are available from the API"
# multiline/spaced variables seem to break when being added from the API.
# this changes the way it works to create the variable in a similar way to how they are injected below
LAGOON_ENV_VARS=$(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.scope == "build" or .scope == "global") | "\(.name)"')
for LAGOON_ENV_VAR in $LAGOON_ENV_VARS
do
BUILD_ARGS+=(--build-arg $(echo $LAGOON_ENV_VAR)="$(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.scope == "build" or .scope == "global") | select(.name == "'$LAGOON_ENV_VAR'") | "\(.value)"')")
done
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
echo "LAGOON_ENVIRONMENT_VARIABLES are available from the API"
# multiline/spaced variables seem to break when being added from the API.
# this changes the way it works to create the variable in a similar way to how they are injected below
LAGOON_ENV_VARS=$(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.scope == "build" or .scope == "global") | "\(.name)"')
for LAGOON_ENV_VAR in $LAGOON_ENV_VARS
do
BUILD_ARGS+=(--build-arg $(echo $LAGOON_ENV_VAR)="$(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.scope == "build" or .scope == "global") | select(.name == "'$LAGOON_ENV_VAR'") | "\(.value)"')")
done
fi
set -x
BUILD_ARGS+=(--build-arg IMAGE_REPO="${CI_OVERRIDE_IMAGE_REPO}")
BUILD_ARGS+=(--build-arg LAGOON_PROJECT="${PROJECT}")
BUILD_ARGS+=(--build-arg LAGOON_ENVIRONMENT="${ENVIRONMENT}")
BUILD_ARGS+=(--build-arg LAGOON_ENVIRONMENT_TYPE="${ENVIRONMENT_TYPE}")
BUILD_ARGS+=(--build-arg LAGOON_BUILD_TYPE="${BUILD_TYPE}")
BUILD_ARGS+=(--build-arg LAGOON_GIT_SOURCE_REPOSITORY="${SOURCE_REPOSITORY}")
set +x
BUILD_ARGS+=(--build-arg LAGOON_SSH_PRIVATE_KEY="${SSH_PRIVATE_KEY}")
set -x
if [ "$BUILD_TYPE" == "branch" ]; then
BUILD_ARGS+=(--build-arg LAGOON_GIT_SHA="${LAGOON_GIT_SHA}")
BUILD_ARGS+=(--build-arg LAGOON_GIT_BRANCH="${BRANCH}")
fi
if [ "$BUILD_TYPE" == "pullrequest" ]; then
BUILD_ARGS+=(--build-arg LAGOON_GIT_SHA="${LAGOON_GIT_SHA}")
BUILD_ARGS+=(--build-arg LAGOON_PR_HEAD_BRANCH="${PR_HEAD_BRANCH}")
BUILD_ARGS+=(--build-arg LAGOON_PR_HEAD_SHA="${PR_HEAD_SHA}")
BUILD_ARGS+=(--build-arg LAGOON_PR_BASE_BRANCH="${PR_BASE_BRANCH}")
BUILD_ARGS+=(--build-arg LAGOON_PR_BASE_SHA="${PR_BASE_SHA}")
BUILD_ARGS+=(--build-arg LAGOON_PR_TITLE="${PR_TITLE}")
BUILD_ARGS+=(--build-arg LAGOON_PR_NUMBER="${PR_NUMBER}")
fi
for IMAGE_NAME in "${IMAGES[@]}"
do
DOCKERFILE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$IMAGE_NAME.build.dockerfile false)
# allow to overwrite build dockerfile for this environment and service
ENVIRONMENT_DOCKERFILE_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.overrides.$IMAGE_NAME.build.dockerfile false)
if [ ! $ENVIRONMENT_DOCKERFILE_OVERRIDE == "false" ]; then
DOCKERFILE=$ENVIRONMENT_DOCKERFILE_OVERRIDE
fi
if [ $DOCKERFILE == "false" ]; then
# No Dockerfile defined, assuming to download the Image directly
PULL_IMAGE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$IMAGE_NAME.image false)
if [ $PULL_IMAGE == "false" ]; then
echo "No Dockerfile or Image for service ${IMAGE_NAME} defined"; exit 1;
fi
# allow to overwrite image that we pull
OVERRIDE_IMAGE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$IMAGE_NAME.labels.lagoon\\.image false)
# allow to overwrite image that we pull for this environment and service
ENVIRONMENT_IMAGE_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.overrides.$IMAGE_NAME.image false)
if [ ! $ENVIRONMENT_IMAGE_OVERRIDE == "false" ]; then
OVERRIDE_IMAGE=$ENVIRONMENT_IMAGE_OVERRIDE
fi
if [ ! $OVERRIDE_IMAGE == "false" ]; then
# expand environment variables from ${OVERRIDE_IMAGE}
PULL_IMAGE=$(echo "${OVERRIDE_IMAGE}" | envsubst)
fi
# if the image just is an image name (like "alpine") we prefix it with `libary/` as the imagecache does not understand
# the magic `alpine` images
if [[ ! "$PULL_IMAGE" =~ "/" ]]; then
PULL_IMAGE="library/$PULL_IMAGE"
fi
# Add the images we should pull to the IMAGES_PULL array, they will later be tagged from dockerhub
IMAGES_PULL["${IMAGE_NAME}"]="${PULL_IMAGE}"
else
# Dockerfile defined, load the context and build it
# We need the Image Name uppercase sometimes, so we create that here
IMAGE_NAME_UPPERCASE=$(echo "$IMAGE_NAME" | tr '[:lower:]' '[:upper:]')
# To prevent clashes of ImageNames during parallel builds, we give all Images a Temporary name
TEMPORARY_IMAGE_NAME="${NAMESPACE}-${IMAGE_NAME}"
BUILD_CONTEXT=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$IMAGE_NAME.build.context .)
# allow to overwrite build context for this environment and service
ENVIRONMENT_BUILD_CONTEXT_OVERRIDE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.overrides.$IMAGE_NAME.build.context false)
if [ ! $ENVIRONMENT_BUILD_CONTEXT_OVERRIDE == "false" ]; then
BUILD_CONTEXT=$ENVIRONMENT_BUILD_CONTEXT_OVERRIDE
fi
if [ ! -f $BUILD_CONTEXT/$DOCKERFILE ]; then
echo "defined Dockerfile $DOCKERFILE for service $IMAGE_NAME not found"; exit 1;
fi
. /kubectl-build-deploy/scripts/exec-build.sh
# Keep a list of the images we have built, as we need to push them to the OpenShift Registry later
IMAGES_BUILD["${IMAGE_NAME}"]="${TEMPORARY_IMAGE_NAME}"
# adding the build image to the list of arguments passed into the next image builds
BUILD_ARGS+=(--build-arg ${IMAGE_NAME_UPPERCASE}_IMAGE=${TEMPORARY_IMAGE_NAME})
fi
done
fi
##############################################
### RUN PRE-ROLLOUT tasks defined in .lagoon.yml
##############################################
if [ "${LAGOON_PREROLLOUT_DISABLED}" != "true" ]; then
COUNTER=0
while [ -n "$(cat .lagoon.yml | shyaml keys tasks.pre-rollout.$COUNTER 2> /dev/null)" ]
do
TASK_TYPE=$(cat .lagoon.yml | shyaml keys tasks.pre-rollout.$COUNTER)
echo $TASK_TYPE
case "$TASK_TYPE" in
run)
COMMAND=$(cat .lagoon.yml | shyaml get-value tasks.pre-rollout.$COUNTER.$TASK_TYPE.command)
SERVICE_NAME=$(cat .lagoon.yml | shyaml get-value tasks.pre-rollout.$COUNTER.$TASK_TYPE.service)
CONTAINER=$(cat .lagoon.yml | shyaml get-value tasks.pre-rollout.$COUNTER.$TASK_TYPE.container false)
SHELL=$(cat .lagoon.yml | shyaml get-value tasks.pre-rollout.$COUNTER.$TASK_TYPE.shell sh)
. /kubectl-build-deploy/scripts/exec-pre-tasks-run.sh
;;
*)
echo "Task Type ${TASK_TYPE} not implemented"; exit 1;
esac
let COUNTER=COUNTER+1
done
else
echo "pre-rollout tasks are currently disabled LAGOON_PREROLLOUT_DISABLED is set to true"
fi
##############################################
### CREATE OPENSHIFT SERVICES, ROUTES and SERVICEBROKERS
##############################################
YAML_FOLDER="/kubectl-build-deploy/lagoon/services-routes"
mkdir -p $YAML_FOLDER
# BC for routes.insecure, which is now called routes.autogenerate.insecure
BC_ROUTES_AUTOGENERATE_INSECURE=$(cat .lagoon.yml | shyaml get-value routes.insecure false)
if [ ! $BC_ROUTES_AUTOGENERATE_INSECURE == "false" ]; then
echo "=== routes.insecure is now defined in routes.autogenerate.insecure, pleae update your .lagoon.yml file"
ROUTES_AUTOGENERATE_INSECURE=$BC_ROUTES_AUTOGENERATE_INSECURE
else
# By default we allow insecure traffic on autogenerate routes
ROUTES_AUTOGENERATE_INSECURE=$(cat .lagoon.yml | shyaml get-value routes.autogenerate.insecure Allow)
fi
ROUTES_AUTOGENERATE_ENABLED=$(set -o pipefail; cat .lagoon.yml | shyaml get-value routes.autogenerate.enabled true | tr '[:upper:]' '[:lower:]')
ROUTES_AUTOGENERATE_ALLOW_PRS=$(set -o pipefail; cat .lagoon.yml | shyaml get-value routes.autogenerate.allowPullrequests $ROUTES_AUTOGENERATE_ENABLED | tr '[:upper:]' '[:lower:]')
if [[ "$TYPE" == "pullrequest" && "$ROUTES_AUTOGENERATE_ALLOW_PRS" == "true" ]]; then
ROUTES_AUTOGENERATE_ENABLED=true
fi
## fail silently if the key autogenerateRoutes doesn't exist and default to whatever ROUTES_AUTOGENERATE_ENABLED is set to
ROUTES_AUTOGENERATE_BRANCH=$(set -o pipefail; cat .lagoon.yml | shyaml -q get-value environments.${BRANCH//./\\.}.autogenerateRoutes $ROUTES_AUTOGENERATE_ENABLED | tr '[:upper:]' '[:lower:]')
if [[ "$ROUTES_AUTOGENERATE_BRANCH" == "true" ]]; then
ROUTES_AUTOGENERATE_ENABLED=true
fi
ROUTES_AUTOGENERATE_PREFIXES=$(yq r -C .lagoon.yml routes.autogenerate.prefixes.*)
touch /kubectl-build-deploy/values.yaml
yq write -i -- /kubectl-build-deploy/values.yaml 'project' $PROJECT
yq write -i -- /kubectl-build-deploy/values.yaml 'environment' $ENVIRONMENT
yq write -i -- /kubectl-build-deploy/values.yaml 'environmentType' $ENVIRONMENT_TYPE
yq write -i -- /kubectl-build-deploy/values.yaml 'namespace' $NAMESPACE
yq write -i -- /kubectl-build-deploy/values.yaml 'gitSha' $LAGOON_GIT_SHA
yq write -i -- /kubectl-build-deploy/values.yaml 'buildType' $BUILD_TYPE
yq write -i -- /kubectl-build-deploy/values.yaml 'routesAutogenerateInsecure' $ROUTES_AUTOGENERATE_INSECURE
yq write -i -- /kubectl-build-deploy/values.yaml 'routesAutogenerateEnabled' $ROUTES_AUTOGENERATE_ENABLED
yq write -i -- /kubectl-build-deploy/values.yaml 'routesAutogenerateSuffix' $ROUTER_URL
yq write -i -- /kubectl-build-deploy/values.yaml 'routesAutogenerateShortSuffix' $SHORT_ROUTER_URL
for i in $ROUTES_AUTOGENERATE_PREFIXES; do yq write -i -- /kubectl-build-deploy/values.yaml 'routesAutogeneratePrefixes[+]' $i; done
yq write -i -- /kubectl-build-deploy/values.yaml 'kubernetes' $KUBERNETES
yq write -i -- /kubectl-build-deploy/values.yaml 'lagoonVersion' $LAGOON_VERSION
echo -e "\
imagePullSecrets:\n\
" >> /kubectl-build-deploy/values.yaml
for REGISTRY_SECRET in "${REGISTRY_SECRETS[@]}"
do
echo -e "\
- name: "${REGISTRY_SECRET}"\n\
" >> /kubectl-build-deploy/values.yaml
done
echo -e "\
LAGOON_PROJECT=${PROJECT}\n\
LAGOON_ENVIRONMENT=${ENVIRONMENT}\n\
LAGOON_ENVIRONMENT_TYPE=${ENVIRONMENT_TYPE}\n\
LAGOON_GIT_SHA=${LAGOON_GIT_SHA}\n\
LAGOON_KUBERNETES=${KUBERNETES}\n\
" >> /kubectl-build-deploy/values.env
# DEPRECATED: will be removed with Lagoon 3.0.0
# LAGOON_GIT_SAFE_BRANCH is pointing to the enviornment name, therefore also is filled if this environment
# is created by a PR or Promote workflow. This technically wrong, therefore will be removed
echo -e "\
LAGOON_GIT_SAFE_BRANCH=${ENVIRONMENT}\n\
" >> /kubectl-build-deploy/values.env
if [ "$BUILD_TYPE" == "branch" ]; then
yq write -i -- /kubectl-build-deploy/values.yaml 'branch' $BRANCH
echo -e "\
LAGOON_GIT_BRANCH=${BRANCH}\n\
" >> /kubectl-build-deploy/values.env
fi
if [ "$BUILD_TYPE" == "pullrequest" ]; then
yq write -i -- /kubectl-build-deploy/values.yaml 'prHeadBranch' "$PR_HEAD_BRANCH"
yq write -i -- /kubectl-build-deploy/values.yaml 'prBaseBranch' "$PR_BASE_BRANCH"
yq write -i -- /kubectl-build-deploy/values.yaml 'prTitle' "$PR_TITLE"
yq write -i -- /kubectl-build-deploy/values.yaml 'prNumber' "$PR_NUMBER"
echo -e "\
LAGOON_PR_HEAD_BRANCH=${PR_HEAD_BRANCH}\n\
LAGOON_PR_BASE_BRANCH=${PR_BASE_BRANCH}\n\
LAGOON_PR_TITLE=${PR_TITLE}\n\
LAGOON_PR_NUMBER=${PR_NUMBER}\n\
" >> /kubectl-build-deploy/values.env
fi
for SERVICE_TYPES_ENTRY in "${SERVICE_TYPES[@]}"
do
echo "=== BEGIN route processing for service ${SERVICE_TYPES_ENTRY} ==="
IFS=':' read -ra SERVICE_TYPES_ENTRY_SPLIT <<< "$SERVICE_TYPES_ENTRY"
TEMPLATE_PARAMETERS=()
SERVICE_NAME=${SERVICE_TYPES_ENTRY_SPLIT[0]}
SERVICE_TYPE=${SERVICE_TYPES_ENTRY_SPLIT[1]}
touch /kubectl-build-deploy/${SERVICE_NAME}-values.yaml
HELM_SERVICE_TEMPLATE="templates/service.yaml"
if [ -f /kubectl-build-deploy/helmcharts/${SERVICE_TYPE}/$HELM_SERVICE_TEMPLATE ]; then
cat /kubectl-build-deploy/values.yaml
helm template ${SERVICE_NAME} /kubectl-build-deploy/helmcharts/${SERVICE_TYPE} -s $HELM_SERVICE_TEMPLATE -f /kubectl-build-deploy/values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${SERVICE_NAME}.yaml
fi
if [ $ROUTES_AUTOGENERATE_ENABLED == "true" ]; then
HELM_INGRESS_TEMPLATE="templates/ingress.yaml"
if [ -f /kubectl-build-deploy/helmcharts/${SERVICE_TYPE}/$HELM_INGRESS_TEMPLATE ]; then
# The very first generated route is set as MAIN_GENERATED_ROUTE
if [ -z "${MAIN_GENERATED_ROUTE+x}" ]; then
MAIN_GENERATED_ROUTE=$SERVICE_NAME
fi
helm template ${SERVICE_NAME} /kubectl-build-deploy/helmcharts/${SERVICE_TYPE} -s $HELM_INGRESS_TEMPLATE -f /kubectl-build-deploy/values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${SERVICE_NAME}.yaml
fi
fi
HELM_DBAAS_TEMPLATE="templates/dbaas.yaml"
if [ -f /kubectl-build-deploy/helmcharts/${SERVICE_TYPE}/$HELM_DBAAS_TEMPLATE ]; then
# Load the requested class and plan for this service
DBAAS_ENVIRONMENT="${MAP_SERVICE_NAME_TO_DBAAS_ENVIRONMENT["${SERVICE_NAME}"]}"
yq write -i -- /kubectl-build-deploy/${SERVICE_NAME}-values.yaml 'environment' $DBAAS_ENVIRONMENT
helm template ${SERVICE_NAME} /kubectl-build-deploy/helmcharts/${SERVICE_TYPE} -s $HELM_DBAAS_TEMPLATE -f /kubectl-build-deploy/values.yaml -f /kubectl-build-deploy/${SERVICE_NAME}-values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${SERVICE_NAME}.yaml
DBAAS+=("${SERVICE_NAME}:${SERVICE_TYPE}")
fi
done
TEMPLATE_PARAMETERS=()
##############################################
### CUSTOM FASTLY API SECRETS .lagoon.yml
##############################################
# if a customer is using their own fastly configuration, then they can define their api token and platform tls configuration ID in the .lagoon.yml file
# this will get created as a `kind: Secret` in kubernetes so that created ingresses will be able to use this secret to talk to the fastly api.
#
# in this example, the customer needs to add a build envvar called `FASTLY_API_TOKEN` and then populates the .lagoon.yml file with something like this
#
# fastly:
# api-secrets:
# - name: customer
# apiTokenVariableName: FASTLY_API_TOKEN
# platformTLSConfiguration: A1bcEdFgH12eD242Sds
#
# then the build process will attempt to check the lagoon variables for one called `FASTLY_API_TOKEN` and will use the value of this variable when creating the
# `kind: Secret` in kubernetes
#
# support for multiple api-secrets is possible in the instance that a customer uses 2 separate services in different accounts in the one project
## any fastly api secrets will be prefixed with this, so that we always add this to whatever the customer provides
FASTLY_API_SECRET_PREFIX="fastly-api-"
FASTLY_API_SECRETS_COUNTER=0
FASTLY_API_SECRETS=()
if [ -n "$(cat .lagoon.yml | shyaml keys fastly.api-secrets.$FASTLY_API_SECRETS_COUNTER 2> /dev/null)" ]; then
while [ -n "$(cat .lagoon.yml | shyaml get-value fastly.api-secrets.$FASTLY_API_SECRETS_COUNTER 2> /dev/null)" ]; do
FASTLY_API_SECRET_NAME=$FASTLY_API_SECRET_PREFIX$(cat .lagoon.yml | shyaml get-value fastly.api-secrets.$FASTLY_API_SECRETS_COUNTER.name 2> /dev/null)
if [ -z "$FASTLY_API_SECRET_NAME" ]; then
echo -e "A fastly api secret was defined in the .lagoon.yml file, but no name could be found the .lagoon.yml\n\nPlease check if the name has been set correctly."
exit 1
fi
FASTLY_API_TOKEN_VALUE=$(cat .lagoon.yml | shyaml get-value fastly.api-secrets.$FASTLY_API_SECRETS_COUNTER.apiTokenVariableName false)
if [[ $FASTLY_API_TOKEN_VALUE == "false" ]]; then
echo "No 'apiTokenVariableName' defined for fastly secret $FASTLY_API_SECRET_NAME"; exit 1;
fi
# if we have everything we need, we can proceed to logging in
if [ $FASTLY_API_TOKEN_VALUE != "false" ]; then
FASTLY_API_TOKEN=""
# check if we have a password defined anywhere in the api first
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
FASTLY_API_TOKEN=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.scope == "build" and .name == "'$FASTLY_API_TOKEN_VALUE'") | "\(.value)"'))
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
TEMP_FASTLY_API_TOKEN=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.scope == "build" and .name == "'$FASTLY_API_TOKEN_VALUE'") | "\(.value)"'))
if [ ! -z "$TEMP_FASTLY_API_TOKEN" ]; then
FASTLY_API_TOKEN=$TEMP_FASTLY_API_TOKEN
fi
fi
if [ -z "$FASTLY_API_TOKEN" ]; then
echo -e "A fastly api secret was defined in the .lagoon.yml file, but no token could be found in the Lagoon API matching the variable name provided\n\nPlease check if the token has been set correctly."
exit 1
fi
fi
FASTLY_API_PLATFORMTLS_CONFIGURATION=$(cat .lagoon.yml | shyaml get-value fastly.api-secrets.$FASTLY_API_SECRETS_COUNTER.platformTLSConfiguration "")
if [ -z "$FASTLY_API_PLATFORMTLS_CONFIGURATION" ]; then
echo -e "A fastly api secret was defined in the .lagoon.yml file, but no platform tls configuration id could be found in the .lagoon.yml\n\nPlease check if the platform tls configuration id has been set correctly."
exit 1
fi
# run the script to create the secrets
. /kubectl-build-deploy/scripts/exec-fastly-api-secrets.sh
let FASTLY_API_SECRETS_COUNTER=FASTLY_API_SECRETS_COUNTER+1
done
fi
# FASTLY API SECRETS FROM LAGOON API VARIABLE
# Allow for defining fastly api secrets using lagoon api variables
# This accepts colon separated values like so `SECRET_NAME:FASTLY_API_TOKEN:FASTLY_PLATFORMTLS_CONFIGURATION_ID`, and multiple overrides
# separated by commas
# Example 1: examplecom:x1s8asfafasf7ssf:fa23rsdgsdgas
# ^^^ will create a kubernetes secret called `$FASTLY_API_SECRET_PREFIX-examplecom` with 2 data fields (one for api token, the other for platform tls id)
# populated with `x1s8asfafasf7ssf` and `fa23rsdgsdgas` for whichever field it should be
# and the name will get created with the prefix defined in `FASTLY_API_SECRET_PREFIX`
# Example 2: examplecom:x1s8asfafasf7ssf:fa23rsdgsdgas,example2com:fa23rsdgsdgas:x1s8asfafasf7ssf,example3com:fa23rsdgsdgas:x1s8asfafasf7ssf:example3com
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
LAGOON_FASTLY_API_SECRETS=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_FASTLY_API_SECRETS") | "\(.value)"'))
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
TEMP_LAGOON_FASTLY_API_SECRETS=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_FASTLY_API_SECRETS") | "\(.value)"'))
if [ ! -z $TEMP_LAGOON_FASTLY_API_SECRETS ]; then
LAGOON_FASTLY_API_SECRETS=$TEMP_LAGOON_FASTLY_API_SECRETS
fi
fi
if [ ! -z "$LAGOON_FASTLY_API_SECRETS" ]; then
IFS=',' read -ra LAGOON_FASTLY_API_SECRETS_SPLIT <<< "$LAGOON_FASTLY_API_SECRETS"
for LAGOON_FASTLY_API_SECRETS_DATA in "${LAGOON_FASTLY_API_SECRETS_SPLIT[@]}"
do
IFS=':' read -ra LAGOON_FASTLY_API_SECRET_SPLIT <<< "$LAGOON_FASTLY_API_SECRETS_DATA"
if [ -z "${LAGOON_FASTLY_API_SECRET_SPLIT[0]}" ] || [ -z "${LAGOON_FASTLY_API_SECRET_SPLIT[1]}" ] || [ -z "${LAGOON_FASTLY_API_SECRET_SPLIT[2]}" ]; then
echo -e "An override was defined in the lagoon API with LAGOON_FASTLY_API_SECRETS but was not structured correctly, the format should be NAME:FASTLY_API_TOKEN:FASTLY_PLATFORMTLS_CONFIGURATION_ID and comma separated for multiples"
exit 1
fi
# the fastly api secret name will be created with the prefix that is defined above
FASTLY_API_SECRET_NAME=$FASTLY_API_SECRET_PREFIX${LAGOON_FASTLY_API_SECRET_SPLIT[0]}
FASTLY_API_TOKEN=${LAGOON_FASTLY_API_SECRET_SPLIT[1]}
FASTLY_API_PLATFORMTLS_CONFIGURATION=${LAGOON_FASTLY_API_SECRET_SPLIT[2]}
# run the script to create the secrets
. /kubectl-build-deploy/scripts/exec-fastly-api-secrets.sh
done
fi
# FASTLY SERVICE ID PER INGRESS OVERRIDE FROM LAGOON API VARIABLE
# Allow the fastly serviceid for specific ingress to be overridden by the lagoon API
# This accepts colon separated values like so `INGRESS_DOMAIN:FASTLY_SERVICE_ID:WATCH_STATUS:SECRET_NAME(OPTIONAL)`, and multiple overrides
# separated by commas
# Example 1: www.example.com:x1s8asfafasf7ssf:true
# ^^^ tells the ingress creation to use the service id x1s8asfafasf7ssf for ingress www.example.com, with the watch status of true
# Example 2: www.example.com:x1s8asfafasf7ssf:true,www.not-example.com:fa23rsdgsdgas:false
# ^^^ same as above, but also tells the ingress creation to use the service id fa23rsdgsdgas for ingress www.not-example.com, with the watch status of false
# Example 3: www.example.com:x1s8asfafasf7ssf:true:examplecom
# ^^^ tells the ingress creation to use the service id x1s8asfafasf7ssf for ingress www.example.com, with the watch status of true
# but it will also be annotated to be told to use the secret named `examplecom` that could be defined elsewhere
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
LAGOON_FASTLY_SERVICE_IDS=($(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_FASTLY_SERVICE_IDS") | "\(.value)"'))
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
TEMP_LAGOON_FASTLY_SERVICE_IDS=($(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_FASTLY_SERVICE_IDS") | "\(.value)"'))
if [ ! -z $TEMP_LAGOON_FASTLY_SERVICE_IDS ]; then
LAGOON_FASTLY_SERVICE_IDS=$TEMP_LAGOON_FASTLY_SERVICE_IDS
fi
fi
##############################################
### CUSTOM ROUTES FROM .lagoon.yml
##############################################
ROUTES_SERVICE_COUNTER=0
# we need to check for production routes for active/standby if they are defined, as these will get migrated between environments as required
if [ "${ENVIRONMENT_TYPE}" == "production" ]; then
if [ "${BRANCH//./\\.}" == "${ACTIVE_ENVIRONMENT}" ]; then
if [ -n "$(cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; then
while [ -n "$(cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; do
ROUTES_SERVICE=$(cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER)
ROUTE_DOMAIN_COUNTER=0
while [ -n "$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER 2> /dev/null)" ]; do
# Routes can either be a key (when the have additional settings) or just a value
if cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER &> /dev/null; then
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
# Route Domains include dots, which need to be esacped via `\.` in order to use them within shyaml
ROUTE_DOMAIN_ESCAPED=$(cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER | sed 's/\./\\./g')
ROUTE_TLS_ACME=$(set -o pipefail; cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.tls-acme true | tr '[:upper:]' '[:lower:]')
ROUTE_MIGRATE=$(set -o pipefail; cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.migrate true | tr '[:upper:]' '[:lower:]')
ROUTE_INSECURE=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.insecure Redirect)
ROUTE_HSTS=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.hsts null)
MONITORING_PATH=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.monitoring-path "/")
ROUTE_ANNOTATIONS=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.annotations {})
# get the fastly configuration values from .lagoon.yml
if cat .lagoon.yml | shyaml keys production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly &> /dev/null; then
ROUTE_FASTLY_SERVICE_ID=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.service-id "")
ROUTE_FASTLY_SERVICE_API_SECRET=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.api-secret-name "")
ROUTE_FASTLY_SERVICE_WATCH=$(set -o pipefail; cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.watch false | tr '[:upper:]' '[:lower:]')
else
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
else
# Only a value given, assuming some defaults
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml get-value production_routes.active.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
ROUTE_TLS_ACME=true
ROUTE_MIGRATE=true
ROUTE_INSECURE=Redirect
ROUTE_HSTS=null
MONITORING_PATH="/"
ROUTE_ANNOTATIONS="{}"
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
# work out if there are any lagoon api variable overrides for the annotations that are being added
. /kubectl-build-deploy/scripts/exec-fastly-annotations.sh
# if we get any other populated service id overrides in any of the steps in exec-fastly-annotations.sh
# make it available to the ingress creation here by overriding what may be defined in the lagoon.yml
if [ ! -z "$LAGOON_FASTLY_SERVICE_ID" ]; then
ROUTE_FASTLY_SERVICE_ID=$LAGOON_FASTLY_SERVICE_ID
ROUTE_FASTLY_SERVICE_WATCH=$LAGOON_FASTLY_SERVICE_WATCH
if [ ! -z $LAGOON_FASTLY_SERVICE_API_SECRET ]; then
ROUTE_FASTLY_SERVICE_API_SECRET=$LAGOON_FASTLY_SERVICE_API_SECRET
fi
fi
FASTLY_ARGS=()
if [ ! -z "$ROUTE_FASTLY_SERVICE_ID" ]; then
FASTLY_ARGS+=(--set fastly.serviceId=${ROUTE_FASTLY_SERVICE_ID})
if [ ! -z "$ROUTE_FASTLY_SERVICE_API_SECRET" ]; then
if contains $FASTLY_API_SECRETS "${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET}"; then
FASTLY_ARGS+=(--set fastly.apiSecretName=${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET})
else
echo "$ROUTE_FASTLY_SERVICE_API_SECRET requested, but not found in .lagoon.yml file"; exit 1;
fi
fi
fi
touch /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
echo "$ROUTE_ANNOTATIONS" | yq p - annotations > /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
# ${ROUTE_DOMAIN} is used as a helm release name which be max 53 characters long.
# So we need some logic to make sure it's always max 53 characters
if [[ ${#ROUTE_DOMAIN} -gt 53 ]] ; then
# Trim the route domain to 47 characters, and add an 5 character hash of the domain at the end
# this gives a total of 53 characters
INGRESS_NAME=${ROUTE_DOMAIN:0:47}-$(echo ${ROUTE_DOMAIN} | md5sum | cut -f 1 -d " " | cut -c 1-5)
else
INGRESS_NAME=${ROUTE_DOMAIN}
fi
# The very first found route is set as MAIN_CUSTOM_ROUTE
if [ -z "${MAIN_CUSTOM_ROUTE+x}" ]; then
MAIN_CUSTOM_ROUTE=$INGRESS_NAME
# if we are in production we enabled monitoring for the main custom route
if [ "${ENVIRONMENT_TYPE}" == "production" ]; then
MONITORING_ENABLED="true"
fi
fi
ROUTE_SERVICE=$ROUTES_SERVICE
cat /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
helm template ${INGRESS_NAME} \
/kubectl-build-deploy/helmcharts/custom-ingress \
--set host="${ROUTE_DOMAIN}" \
--set service="${ROUTE_SERVICE}" \
--set tls_acme="${ROUTE_TLS_ACME}" \
--set insecure="${ROUTE_INSECURE}" \
--set hsts="${ROUTE_HSTS}" \
--set routeMigrate="${ROUTE_MIGRATE}" \
--set ingressmonitorcontroller.enabled="${MONITORING_ENABLED}" \
--set ingressmonitorcontroller.path="${MONITORING_PATH}" \
--set ingressmonitorcontroller.alertContacts="${MONITORING_ALERTCONTACT}" \
--set ingressmonitorcontroller.statuspageId="${MONITORING_STATUSPAGEID}" \
"${FASTLY_ARGS[@]}" --set fastly.watch="${ROUTE_FASTLY_SERVICE_WATCH}" \
-f /kubectl-build-deploy/values.yaml -f /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${ROUTE_DOMAIN}.yaml
MONITORING_ENABLED="false" # disabling a possible enabled monitoring again
let ROUTE_DOMAIN_COUNTER=ROUTE_DOMAIN_COUNTER+1
done
let ROUTES_SERVICE_COUNTER=ROUTES_SERVICE_COUNTER+1
done
fi
fi
if [ "${BRANCH//./\\.}" == "${STANDBY_ENVIRONMENT}" ]; then
if [ -n "$(cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; then
while [ -n "$(cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; do
ROUTES_SERVICE=$(cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER)
ROUTE_DOMAIN_COUNTER=0
while [ -n "$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER 2> /dev/null)" ]; do
# Routes can either be a key (when the have additional settings) or just a value
if cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER &> /dev/null; then
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
# Route Domains include dots, which need to be esacped via `\.` in order to use them within shyaml
ROUTE_DOMAIN_ESCAPED=$(cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER | sed 's/\./\\./g')
ROUTE_TLS_ACME=$(set -o pipefail; cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.tls-acme true | tr '[:upper:]' '[:lower:]')
ROUTE_MIGRATE=$(set -o pipefail; cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.migrate true | tr '[:upper:]' '[:lower:]')
ROUTE_INSECURE=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.insecure Redirect)
ROUTE_HSTS=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.hsts null)
MONITORING_PATH=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.monitoring-path "/")
ROUTE_ANNOTATIONS=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.annotations {})
# get the fastly configuration values from .lagoon.yml
if cat .lagoon.yml | shyaml keys production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly &> /dev/null; then
ROUTE_FASTLY_SERVICE_ID=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.service-id "")
ROUTE_FASTLY_SERVICE_API_SECRET=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.api-secret-name "")
ROUTE_FASTLY_SERVICE_WATCH=$(set -o pipefail; cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.watch false | tr '[:upper:]' '[:lower:]')
else
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
else
# Only a value given, assuming some defaults
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml get-value production_routes.standby.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
ROUTE_TLS_ACME=true
ROUTE_MIGRATE=true
ROUTE_INSECURE=Redirect
ROUTE_HSTS=null
MONITORING_PATH="/"
ROUTE_ANNOTATIONS="{}"
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
# work out if there are any lagoon api variable overrides for the annotations that are being added
. /kubectl-build-deploy/scripts/exec-fastly-annotations.sh
# if we get any other populated service id overrides in any of the steps in exec-fastly-annotations.sh
# make it available to the ingress creation here by overriding what may be defined in the lagoon.yml
if [ ! -z "$LAGOON_FASTLY_SERVICE_ID" ]; then
ROUTE_FASTLY_SERVICE_ID=$LAGOON_FASTLY_SERVICE_ID
ROUTE_FASTLY_SERVICE_WATCH=$LAGOON_FASTLY_SERVICE_WATCH
if [ ! -z $LAGOON_FASTLY_SERVICE_API_SECRET ]; then
ROUTE_FASTLY_SERVICE_API_SECRET=$LAGOON_FASTLY_SERVICE_API_SECRET
fi
fi
# Create the fastly values required
FASTLY_ARGS=()
if [ ! -z "$ROUTE_FASTLY_SERVICE_ID" ]; then
FASTLY_ARGS+=(--set fastly.serviceId=${ROUTE_FASTLY_SERVICE_ID})
if [ ! -z "$ROUTE_FASTLY_SERVICE_API_SECRET" ]; then
if contains $FASTLY_API_SECRETS "${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET}"; then
FASTLY_ARGS+=(--set fastly.apiSecretName=${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET})
else
echo "$ROUTE_FASTLY_SERVICE_API_SECRET requested, but not found in .lagoon.yml file"; exit 1;
fi
fi
ROUTE_FASTLY_SERVICE_WATCH=true
fi
touch /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
echo "$ROUTE_ANNOTATIONS" | yq p - annotations > /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
# ${ROUTE_DOMAIN} is used as a helm release name which be max 53 characters long.
# So we need some logic to make sure it's always max 53 characters
if [[ ${#ROUTE_DOMAIN} -gt 53 ]] ; then
# Trim the route domain to 47 characters, and add an 5 character hash of the domain at the end
# this gives a total of 53 characters
INGRESS_NAME=${ROUTE_DOMAIN:0:47}-$(echo ${ROUTE_DOMAIN} | md5sum | cut -f 1 -d " " | cut -c 1-5)
else
INGRESS_NAME=${ROUTE_DOMAIN}
fi
# The very first found route is set as MAIN_CUSTOM_ROUTE
if [ -z "${MAIN_CUSTOM_ROUTE+x}" ]; then
MAIN_CUSTOM_ROUTE=$INGRESS_NAME
# if we are in production we enabled monitoring for the main custom route
if [ "${ENVIRONMENT_TYPE}" == "production" ]; then
MONITORING_ENABLED="true"
fi
fi
ROUTE_SERVICE=$ROUTES_SERVICE
cat /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
helm template ${INGRESS_NAME} \
/kubectl-build-deploy/helmcharts/custom-ingress \
--set host="${ROUTE_DOMAIN}" \
--set service="${ROUTE_SERVICE}" \
--set tls_acme="${ROUTE_TLS_ACME}" \
--set insecure="${ROUTE_INSECURE}" \
--set hsts="${ROUTE_HSTS}" \
--set routeMigrate="${ROUTE_MIGRATE}" \
--set ingressmonitorcontroller.enabled="${MONITORING_ENABLED}" \
--set ingressmonitorcontroller.path="${MONITORING_PATH}" \
--set ingressmonitorcontroller.alertContacts="${MONITORING_ALERTCONTACT}" \
--set ingressmonitorcontroller.statuspageId="${MONITORING_STATUSPAGEID}" \
"${FASTLY_ARGS[@]}" --set fastly.watch="${ROUTE_FASTLY_SERVICE_WATCH}" \
-f /kubectl-build-deploy/values.yaml -f /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${ROUTE_DOMAIN}.yaml
MONITORING_ENABLED="false" # disabling a possible enabled monitoring again
let ROUTE_DOMAIN_COUNTER=ROUTE_DOMAIN_COUNTER+1
done
let ROUTES_SERVICE_COUNTER=ROUTES_SERVICE_COUNTER+1
done
fi
fi
fi
MONITORING_ENABLED="false" # monitoring is by default disabled, it will be enabled for the first route again
# Two while loops as we have multiple services that want routes and each service has multiple routes
ROUTES_SERVICE_COUNTER=0
if [ -n "$(cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; then
while [ -n "$(cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; do
ROUTES_SERVICE=$(cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER)
ROUTE_DOMAIN_COUNTER=0
while [ -n "$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER 2> /dev/null)" ]; do
# Routes can either be a key (when the have additional settings) or just a value
if cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER &> /dev/null; then
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
# Route Domains include dots, which need to be esacped via `\.` in order to use them within shyaml
ROUTE_DOMAIN_ESCAPED=$(cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER | sed 's/\./\\./g')
ROUTE_TLS_ACME=$(set -o pipefail; cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.tls-acme true | tr '[:upper:]' '[:lower:]')
ROUTE_MIGRATE=$(set -o pipefail; cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.migrate false | tr '[:upper:]' '[:lower:]')
ROUTE_INSECURE=$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.insecure Redirect)
ROUTE_HSTS=$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.hsts null)
MONITORING_PATH=$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.monitoring-path "/")
ROUTE_ANNOTATIONS=$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.annotations {})
# get the fastly configuration values from .lagoon.yml
if cat .lagoon.yml | shyaml keys ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly &> /dev/null; then
ROUTE_FASTLY_SERVICE_ID=$(cat .lagoon.yml | shyaml ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.service-id "")
ROUTE_FASTLY_SERVICE_API_SECRET=$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.api-secret-name "")
ROUTE_FASTLY_SERVICE_WATCH=$(set -o pipefail; cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.watch false | tr '[:upper:]' '[:lower:]')
else
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
else
# Only a value given, assuming some defaults
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml get-value ${PROJECT}.environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
ROUTE_TLS_ACME=true
ROUTE_MIGRATE=false
ROUTE_INSECURE=Redirect
ROUTE_HSTS=null
MONITORING_PATH="/"
ROUTE_ANNOTATIONS="{}"
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
# work out if there are any lagoon api variable overrides for the annotations that are being added
. /kubectl-build-deploy/scripts/exec-fastly-annotations.sh
# if we get any other populated service id overrides in any of the steps in exec-fastly-annotations.sh
# make it available to the ingress creation here by overriding what may be defined in the lagoon.yml
if [ ! -z "$LAGOON_FASTLY_SERVICE_ID" ]; then
ROUTE_FASTLY_SERVICE_ID=$LAGOON_FASTLY_SERVICE_ID
ROUTE_FASTLY_SERVICE_WATCH=$LAGOON_FASTLY_SERVICE_WATCH
if [ ! -z $LAGOON_FASTLY_SERVICE_API_SECRET ]; then
ROUTE_FASTLY_SERVICE_API_SECRET=$LAGOON_FASTLY_SERVICE_API_SECRET
fi
fi
# Create the fastly values required
FASTLY_ARGS=()
if [ ! -z "$ROUTE_FASTLY_SERVICE_ID" ]; then
FASTLY_ARGS+=(--set fastly.serviceId=${ROUTE_FASTLY_SERVICE_ID})
if [ ! -z "$ROUTE_FASTLY_SERVICE_API_SECRET" ]; then
if contains $FASTLY_API_SECRETS "${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET}"; then
FASTLY_ARGS+=(--set fastly.apiSecretName=${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET})
else
echo "$ROUTE_FASTLY_SERVICE_API_SECRET requested, but not found in .lagoon.yml file"; exit 1;
fi
fi
ROUTE_FASTLY_SERVICE_WATCH=true
fi
# ${ROUTE_DOMAIN} is used as a helm release name which be max 53 characters long.
# So we need some logic to make sure it's always max 53 characters
if [[ ${#ROUTE_DOMAIN} -gt 53 ]] ; then
# Trim the route domain to 47 characters, and add an 5 character hash of the domain at the end
# this gives a total of 53 characters
INGRESS_NAME=${ROUTE_DOMAIN:0:47}-$(echo ${ROUTE_DOMAIN} | md5sum | cut -f 1 -d " " | cut -c 1-5)
else
INGRESS_NAME=${ROUTE_DOMAIN}
fi
touch /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
echo "$ROUTE_ANNOTATIONS" | yq p - annotations > /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
# The very first found route is set as MAIN_CUSTOM_ROUTE
if [ -z "${MAIN_CUSTOM_ROUTE+x}" ]; then
MAIN_CUSTOM_ROUTE=$INGRESS_NAME
# if we are in production we enabled monitoring for the main custom route
if [ "${ENVIRONMENT_TYPE}" == "production" ]; then
MONITORING_ENABLED="true"
fi
fi
ROUTE_SERVICE=$ROUTES_SERVICE
cat /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
helm template ${INGRESS_NAME} \
/kubectl-build-deploy/helmcharts/custom-ingress \
--set host="${ROUTE_DOMAIN}" \
--set service="${ROUTE_SERVICE}" \
--set tls_acme="${ROUTE_TLS_ACME}" \
--set insecure="${ROUTE_INSECURE}" \
--set hsts="${ROUTE_HSTS}" \
--set routeMigrate="${ROUTE_MIGRATE}" \
--set ingressmonitorcontroller.enabled="${MONITORING_ENABLED}" \
--set ingressmonitorcontroller.path="${MONITORING_PATH}" \
--set ingressmonitorcontroller.alertContacts="${MONITORING_ALERTCONTACT}" \
--set ingressmonitorcontroller.statuspageId="${MONITORING_STATUSPAGEID}" \
"${FASTLY_ARGS[@]}" --set fastly.watch="${ROUTE_FASTLY_SERVICE_WATCH}" \
-f /kubectl-build-deploy/values.yaml -f /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${ROUTE_DOMAIN}.yaml
MONITORING_ENABLED="false" # disabling a possible enabled monitoring again
let ROUTE_DOMAIN_COUNTER=ROUTE_DOMAIN_COUNTER+1
done
let ROUTES_SERVICE_COUNTER=ROUTES_SERVICE_COUNTER+1
done
else
while [ -n "$(cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER 2> /dev/null)" ]; do
ROUTES_SERVICE=$(cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER)
ROUTE_DOMAIN_COUNTER=0
while [ -n "$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER 2> /dev/null)" ]; do
# Routes can either be a key (when the have additional settings) or just a value
if cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER &> /dev/null; then
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
# Route Domains include dots, which need to be esacped via `\.` in order to use them within shyaml
ROUTE_DOMAIN_ESCAPED=$(cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER | sed 's/\./\\./g')
ROUTE_TLS_ACME=$(set -o pipefail; cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.tls-acme true | tr '[:upper:]' '[:lower:]')
ROUTE_MIGRATE=$(set -o pipefail; cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.migrate false | tr '[:upper:]' '[:lower:]')
ROUTE_INSECURE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.insecure Redirect)
ROUTE_HSTS=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.hsts null)
MONITORING_PATH=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.monitoring-path "/")
ROUTE_ANNOTATIONS=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.annotations {})
# get the fastly configuration values from .lagoon.yml
if cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly &> /dev/null; then
ROUTE_FASTLY_SERVICE_ID=$(cat .lagoon.yml | shyaml environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.service-id "")
ROUTE_FASTLY_SERVICE_API_SECRET=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.api-secret-name "")
ROUTE_FASTLY_SERVICE_WATCH=$(set -o pipefail; cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER.$ROUTE_DOMAIN_ESCAPED.fastly.watch false | tr '[:upper:]' '[:lower:]')
else
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
else
# Only a value given, assuming some defaults
ROUTE_DOMAIN=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.routes.$ROUTES_SERVICE_COUNTER.$ROUTES_SERVICE.$ROUTE_DOMAIN_COUNTER)
ROUTE_TLS_ACME=true
ROUTE_MIGRATE=false
ROUTE_INSECURE=Redirect
ROUTE_HSTS=null
MONITORING_PATH="/"
ROUTE_ANNOTATIONS="{}"
ROUTE_FASTLY_SERVICE_ID=""
ROUTE_FASTLY_SERVICE_API_SECRET=""
ROUTE_FASTLY_SERVICE_WATCH=false
fi
# work out if there are any lagoon api variable overrides for the annotations that are being added
. /kubectl-build-deploy/scripts/exec-fastly-annotations.sh
# if we get any other populated service id overrides in any of the steps in exec-fastly-annotations.sh
# make it available to the ingress creation here by overriding what may be defined in the lagoon.yml
if [ ! -z "$LAGOON_FASTLY_SERVICE_ID" ]; then
ROUTE_FASTLY_SERVICE_ID=$LAGOON_FASTLY_SERVICE_ID
ROUTE_FASTLY_SERVICE_WATCH=$LAGOON_FASTLY_SERVICE_WATCH
if [ ! -z $LAGOON_FASTLY_SERVICE_API_SECRET ]; then
ROUTE_FASTLY_SERVICE_API_SECRET=$LAGOON_FASTLY_SERVICE_API_SECRET
fi
fi
# Create the fastly values required
FASTLY_ARGS=()
if [ ! -z "$ROUTE_FASTLY_SERVICE_ID" ]; then
FASTLY_ARGS+=(--set fastly.serviceId=${ROUTE_FASTLY_SERVICE_ID})
if [ ! -z "$ROUTE_FASTLY_SERVICE_API_SECRET" ]; then
if contains $FASTLY_API_SECRETS "${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET}"; then
FASTLY_ARGS+=(--set fastly.apiSecretName=${FASTLY_API_SECRET_PREFIX}${ROUTE_FASTLY_SERVICE_API_SECRET})
else
echo "$ROUTE_FASTLY_SERVICE_API_SECRET requested, but not found in .lagoon.yml file"; exit 1;
fi
fi
ROUTE_FASTLY_SERVICE_WATCH=true
fi
touch /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
echo "$ROUTE_ANNOTATIONS" | yq p - annotations > /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
# ${ROUTE_DOMAIN} is used as a helm release name which be max 53 characters long.
# So we need some logic to make sure it's always max 53 characters
if [[ ${#ROUTE_DOMAIN} -gt 53 ]] ; then
# Trim the route domain to 47 characters, and add an 5 character hash of the domain at the end
# this gives a total of 53 characters
INGRESS_NAME=${ROUTE_DOMAIN:0:47}-$(echo ${ROUTE_DOMAIN} | md5sum | cut -f 1 -d " " | cut -c 1-5)
else
INGRESS_NAME=${ROUTE_DOMAIN}
fi
# The very first found route is set as MAIN_CUSTOM_ROUTE
if [ -z "${MAIN_CUSTOM_ROUTE+x}" ]; then
MAIN_CUSTOM_ROUTE=$INGRESS_NAME
# if we are in production we enabled monitoring for the main custom route
if [ "${ENVIRONMENT_TYPE}" == "production" ]; then
MONITORING_ENABLED="true"
fi
fi
ROUTE_SERVICE=$ROUTES_SERVICE
cat /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml
helm template ${INGRESS_NAME} \
/kubectl-build-deploy/helmcharts/custom-ingress \
--set host="${ROUTE_DOMAIN}" \
--set service="${ROUTE_SERVICE}" \
--set tls_acme="${ROUTE_TLS_ACME}" \
--set insecure="${ROUTE_INSECURE}" \
--set hsts="${ROUTE_HSTS}" \
--set routeMigrate="${ROUTE_MIGRATE}" \
--set ingressmonitorcontroller.enabled="${MONITORING_ENABLED}" \
--set ingressmonitorcontroller.path="${MONITORING_PATH}" \
--set ingressmonitorcontroller.alertContacts="${MONITORING_ALERTCONTACT}" \
--set ingressmonitorcontroller.statuspageId="${MONITORING_STATUSPAGEID}" \
"${FASTLY_ARGS[@]}" --set fastly.watch="${ROUTE_FASTLY_SERVICE_WATCH}" \
-f /kubectl-build-deploy/values.yaml -f /kubectl-build-deploy/${ROUTE_DOMAIN}-values.yaml "${HELM_ARGUMENTS[@]}" > $YAML_FOLDER/${ROUTE_DOMAIN}.yaml
MONITORING_ENABLED="false" # disabling a possible enabled monitoring again
let ROUTE_DOMAIN_COUNTER=ROUTE_DOMAIN_COUNTER+1
done
let ROUTES_SERVICE_COUNTER=ROUTES_SERVICE_COUNTER+1
done
fi
# If k8up is supported by this cluster we create the schedule definition
if [[ "${CAPABILITIES[@]}" =~ "backup.appuio.ch/v1alpha1/Schedule" ]]; then
if ! kubectl --insecure-skip-tls-verify -n ${NAMESPACE} get secret baas-repo-pw &> /dev/null; then
# Create baas-repo-pw secret based on the project secret
set +x
kubectl --insecure-skip-tls-verify -n ${NAMESPACE} create secret generic baas-repo-pw --from-literal=repo-pw=$(echo -n "$PROJECT_SECRET-BAAS-REPO-PW" | sha256sum | cut -d " " -f 1)
set -x
fi
TEMPLATE_PARAMETERS=()
# Check for custom baas bucket name
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
BAAS_BUCKET_NAME=$(echo $LAGOON_PROJECT_VARIABLES | jq -r '.[] | select(.name == "LAGOON_BAAS_BUCKET_NAME") | "\(.value)"')
fi
if [ -z $BAAS_BUCKET_NAME ]; then
BAAS_BUCKET_NAME=baas-${PROJECT}
fi
# Pull in .lagoon.yml variables
PRODUCTION_MONTHLY_BACKUP_RETENTION=$(cat .lagoon.yml | shyaml get-value backup-retention.production.monthly "")
PRODUCTION_WEEKLY_BACKUP_RETENTION=$(cat .lagoon.yml | shyaml get-value backup-retention.production.weekly "")
PRODUCTION_DAILY_BACKUP_RETENTION=$(cat .lagoon.yml | shyaml get-value backup-retention.production.daily "")
# Set template parameters for retention values (prefer .lagoon.yml values over supplied defaults after ensuring they are valid integers via "-eq" comparison)
if [ ! -z $PRODUCTION_MONTHLY_BACKUP_RETENTION ] && [ "$PRODUCTION_MONTHLY_BACKUP_RETENTION" -eq "$PRODUCTION_MONTHLY_BACKUP_RETENTION" ] && [ $ENVIRONMENT_TYPE = 'production' ]; then
MONTHLY_BACKUP_RETENTION=${PRODUCTION_MONTHLY_BACKUP_RETENTION}
else
MONTHLY_BACKUP_RETENTION=${MONTHLY_BACKUP_DEFAULT_RETENTION}
fi
if [ ! -z $PRODUCTION_WEEKLY_BACKUP_RETENTION ] && [ "$PRODUCTION_WEEKLY_BACKUP_RETENTION" -eq "$PRODUCTION_WEEKLY_BACKUP_RETENTION" ] && [ $ENVIRONMENT_TYPE = 'production' ]; then
WEEKLY_BACKUP_RETENTION=${PRODUCTION_WEEKLY_BACKUP_RETENTION}
else
WEEKLY_BACKUP_RETENTION=${WEEKLY_BACKUP_DEFAULT_RETENTION}
fi
if [ ! -z $PRODUCTION_DAILY_BACKUP_RETENTION ] && [ "$PRODUCTION_DAILY_BACKUP_RETENTION" -eq "$PRODUCTION_DAILY_BACKUP_RETENTION" ] && [ $ENVIRONMENT_TYPE = 'production' ]; then
DAILY_BACKUP_RETENTION=${PRODUCTION_DAILY_BACKUP_RETENTION}
else
DAILY_BACKUP_RETENTION=${DAILY_BACKUP_DEFAULT_RETENTION}
fi
# Run Backups every day at 2200-0200
BACKUP_SCHEDULE=$( /kubectl-build-deploy/scripts/convert-crontab.sh "${NAMESPACE}" "M H(22-2) * * *")
if [ ! -z $K8UP_WEEKLY_RANDOM_FEATURE_FLAG ] && [ $K8UP_WEEKLY_RANDOM_FEATURE_FLAG = 'enabled' ]; then
# Let the controller deduplicate checks (will run weekly at a random time throughout the week)
CHECK_SCHEDULE="@weekly-random"
else
# Run Checks on Sunday at 0300-0600
CHECK_SCHEDULE=$( /kubectl-build-deploy/scripts/convert-crontab.sh "${NAMESPACE}" "M H(3-6) * * 0")
fi
if [ ! -z $K8UP_WEEKLY_RANDOM_FEATURE_FLAG ] && [ $K8UP_WEEKLY_RANDOM_FEATURE_FLAG = 'enabled' ]; then
# Let the controller deduplicate prunes (will run weekly at a random time throughout the week)
PRUNE_SCHEDULE="@weekly-random"
else
# Run Prune on Saturday at 0300-0600
PRUNE_SCHEDULE=$( /kubectl-build-deploy/scripts/convert-crontab.sh "${NAMESPACE}" "M H(3-6) * * 6")
fi
OPENSHIFT_TEMPLATE="/kubectl-build-deploy/openshift-templates/backup-schedule.yml"
helm template k8up-lagoon-backup-schedule /kubectl-build-deploy/helmcharts/k8up-schedule \
-f /kubectl-build-deploy/values.yaml \
--set backup.schedule="${BACKUP_SCHEDULE}" \
--set check.schedule="${CHECK_SCHEDULE}" \
--set prune.schedule="${PRUNE_SCHEDULE}" "${HELM_ARGUMENTS[@]}" \
--set baasBucketName="${BAAS_BUCKET_NAME}" > $YAML_FOLDER/k8up-lagoon-backup-schedule.yaml \
--set prune.retention.keepMonthly=$MONTHLY_BACKUP_RETENTION \
--set prune.retention.keepWeekly=$WEEKLY_BACKUP_RETENTION \
--set prune.retention.keepDaily=$DAILY_BACKUP_RETENTION
fi
if [ "$(ls -A $YAML_FOLDER/)" ]; then
find $YAML_FOLDER -type f -exec cat {} \;
kubectl apply --insecure-skip-tls-verify -n ${NAMESPACE} -f $YAML_FOLDER/
fi
##############################################
### PROJECT WIDE ENV VARIABLES
##############################################
# If we have a custom route, we use that as main route
if [ "$MAIN_CUSTOM_ROUTE" ]; then
MAIN_ROUTE_NAME=$MAIN_CUSTOM_ROUTE
# no custom route, we use the first generated route
elif [ "$MAIN_GENERATED_ROUTE" ]; then
MAIN_ROUTE_NAME=$MAIN_GENERATED_ROUTE
fi
# Load the found main routes with correct schema
if [ "$MAIN_ROUTE_NAME" ]; then
ROUTE=$(kubectl -n ${NAMESPACE} get ingress "$MAIN_ROUTE_NAME" -o=go-template --template='{{if .spec.tls}}https://{{else}}http://{{end}}{{(index .spec.rules 0).host}}')
fi
# Load all routes with correct schema and comma separated
ROUTES=$(kubectl -n ${NAMESPACE} get ingress --sort-by='{.metadata.name}' -l "acme.openshift.io/exposer!=true" -o=go-template --template='{{range $indexItems, $ingress := .items}}{{if $indexItems}},{{end}}{{$tls := .spec.tls}}{{range $indexRule, $rule := .spec.rules}}{{if $indexRule}},{{end}}{{if $tls}}https://{{else}}http://{{end}}{{.host}}{{end}}{{end}}')
# Active / Standby routes
ACTIVE_ROUTES=""
STANDBY_ROUTES=""
if [ ! -z "${STANDBY_ENVIRONMENT}" ]; then
ACTIVE_ROUTES=$(kubectl -n ${NAMESPACE} get ingress --sort-by='{.metadata.name}' -l "dioscuri.amazee.io/migrate=true" -o=go-template --template='{{range $indexItems, $ingress := .items}}{{if $indexItems}},{{end}}{{$tls := .spec.tls}}{{range $indexRule, $rule := .spec.rules}}{{if $indexRule}},{{end}}{{if $tls}}https://{{else}}http://{{end}}{{.host}}{{end}}{{end}}')
STANDBY_ROUTES=$(kubectl -n ${NAMESPACE} get ingress --sort-by='{.metadata.name}' -l "dioscuri.amazee.io/migrate=true" -o=go-template --template='{{range $indexItems, $ingress := .items}}{{if $indexItems}},{{end}}{{$tls := .spec.tls}}{{range $indexRule, $rule := .spec.rules}}{{if $indexRule}},{{end}}{{if $tls}}https://{{else}}http://{{end}}{{.host}}{{end}}{{end}}')
fi
# Get list of autogenerated routes
AUTOGENERATED_ROUTES=$(kubectl -n ${NAMESPACE} get ingress --sort-by='{.metadata.name}' -l "lagoon.sh/autogenerated=true" -o=go-template --template='{{range $indexItems, $ingress := .items}}{{if $indexItems}},{{end}}{{$tls := .spec.tls}}{{range $indexRule, $rule := .spec.rules}}{{if $indexRule}},{{end}}{{if $tls}}https://{{else}}http://{{end}}{{.host}}{{end}}{{end}}')
yq write -i -- /kubectl-build-deploy/values.yaml 'route' "$ROUTE"
yq write -i -- /kubectl-build-deploy/values.yaml 'routes' "$ROUTES"
yq write -i -- /kubectl-build-deploy/values.yaml 'autogeneratedRoutes' "$AUTOGENERATED_ROUTES"
echo -e "\
LAGOON_ROUTE=${ROUTE}\n\
LAGOON_ROUTES=${ROUTES}\n\
LAGOON_AUTOGENERATED_ROUTES=${AUTOGENERATED_ROUTES}\n\
" >> /kubectl-build-deploy/values.env
# Generate a Config Map with project wide env variables
kubectl -n ${NAMESPACE} create configmap lagoon-env -o yaml --dry-run --from-env-file=/kubectl-build-deploy/values.env | kubectl apply -n ${NAMESPACE} -f -
set +x # reduce noise in build logs
# Add environment variables from lagoon API
if [ ! -z "$LAGOON_PROJECT_VARIABLES" ]; then
HAS_PROJECT_RUNTIME_VARS=$(echo $LAGOON_PROJECT_VARIABLES | jq -r 'map( select(.scope == "runtime" or .scope == "global") )')
if [ ! "$HAS_PROJECT_RUNTIME_VARS" = "[]" ]; then
kubectl patch --insecure-skip-tls-verify \
-n ${NAMESPACE} \
configmap lagoon-env \
-p "{\"data\":$(echo $LAGOON_PROJECT_VARIABLES | jq -r 'map( select(.scope == "runtime" or .scope == "global") ) | map( { (.name) : .value } ) | add | tostring')}"
fi
fi
if [ ! -z "$LAGOON_ENVIRONMENT_VARIABLES" ]; then
HAS_ENVIRONMENT_RUNTIME_VARS=$(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r 'map( select(.scope == "runtime" or .scope == "global") )')
if [ ! "$HAS_ENVIRONMENT_RUNTIME_VARS" = "[]" ]; then
kubectl patch --insecure-skip-tls-verify \
-n ${NAMESPACE} \
configmap lagoon-env \
-p "{\"data\":$(echo $LAGOON_ENVIRONMENT_VARIABLES | jq -r 'map( select(.scope == "runtime" or .scope == "global") ) | map( { (.name) : .value } ) | add | tostring')}"
fi
fi
set -x
if [ "$BUILD_TYPE" == "pullrequest" ]; then
kubectl patch --insecure-skip-tls-verify \
-n ${NAMESPACE} \
configmap lagoon-env \
-p "{\"data\":{\"LAGOON_PR_HEAD_BRANCH\":\"${PR_HEAD_BRANCH}\", \"LAGOON_PR_BASE_BRANCH\":\"${PR_BASE_BRANCH}\", \"LAGOON_PR_TITLE\":$(echo $PR_TITLE | jq -R)}}"
fi
# loop through created DBAAS
for DBAAS_ENTRY in "${DBAAS[@]}"
do
IFS=':' read -ra DBAAS_ENTRY_SPLIT <<< "$DBAAS_ENTRY"
SERVICE_NAME=${DBAAS_ENTRY_SPLIT[0]}
SERVICE_TYPE=${DBAAS_ENTRY_SPLIT[1]}
SERVICE_NAME_UPPERCASE=$(echo "$SERVICE_NAME" | tr '[:lower:]' '[:upper:]' | tr '-' '_')
case "$SERVICE_TYPE" in
mariadb-dbaas)
. /kubectl-build-deploy/scripts/exec-kubectl-mariadb-dbaas.sh
;;
postgres-dbaas)
. /kubectl-build-deploy/scripts/exec-kubectl-postgres-dbaas.sh
;;
mongodb-dbaas)
. /kubectl-build-deploy/scripts/exec-kubectl-mongodb-dbaas.sh
;;
*)
echo "DBAAS Type ${SERVICE_TYPE} not implemented"; exit 1;
esac
done
##############################################
### REDEPLOY DEPLOYMENTS IF CONFIG MAP CHANGES
##############################################
CONFIG_MAP_SHA=$(kubectl --insecure-skip-tls-verify -n ${NAMESPACE} get configmap lagoon-env -o yaml | shyaml get-value data | sha256sum | awk '{print $1}')
# write the configmap to the values file so when we `exec-kubectl-resources-with-images.sh` the deployments will get the value of the config map
# which will cause a change in the deployment and trigger a rollout if only the configmap has changed
yq write -i -- /kubectl-build-deploy/values.yaml 'configMapSha' $CONFIG_MAP_SHA
##############################################
### PUSH IMAGES TO OPENSHIFT REGISTRY
##############################################
if [ "$BUILD_TYPE" == "pullrequest" ] || [ "$BUILD_TYPE" == "branch" ]; then
# All images that should be pulled are copied to the harbor registry
for IMAGE_NAME in "${!IMAGES_PULL[@]}"
do
PULL_IMAGE="${IMAGES_PULL[${IMAGE_NAME}]}"
# Try to handle private registries first
if [ $PRIVATE_REGISTRY_COUNTER -gt 0 ]; then
if [ $PRIVATE_EXTERNAL_REGISTRY ]; then
EXTERNAL_REGISTRY=0
for EXTERNAL_REGISTRY_URL in "${PRIVATE_REGISTRY_URLS[@]}"
do
# strip off "http://" or "https://" from registry url if present
bare_url="${EXTERNAL_REGISTRY_URL#http://}"
bare_url="${EXTERNAL_REGISTRY_URL#https://}"
# Test registry to see if image is from an external registry or just private docker hub
case $bare_url in
"$PULL_IMAGE"*)
EXTERNAL_REGISTRY=1
;;
esac
done
# If this image is hosted in an external registry, pull it from there
if [ $EXTERNAL_REGISTRY -eq 1 ]; then
skopeo copy --dest-tls-verify=false docker://${PULL_IMAGE} docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest}
# If this image is not from an external registry, but docker hub creds were supplied, pull it straight from Docker Hub
elif [ $PRIVATE_DOCKER_HUB_REGISTRY -eq 1 ]; then
skopeo copy --dest-tls-verify=false docker://${PULL_IMAGE} docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest}
# If image not from an external registry and no docker hub creds were supplied, pull image from the imagecache
else
skopeo copy --dest-tls-verify=false docker://${IMAGECACHE_REGISTRY}/${PULL_IMAGE} docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest}
fi
# If the private registry counter is 1 and no external registry was listed, we know a private docker hub was specified
else
skopeo copy --dest-tls-verify=false docker://${PULL_IMAGE} docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest}
fi
# If no private registries, use the imagecache
else
skopeo copy --dest-tls-verify=false docker://${IMAGECACHE_REGISTRY}/${PULL_IMAGE} docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest}
fi
IMAGE_HASHES[${IMAGE_NAME}]=$(skopeo inspect docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest} --tls-verify=false | jq ".Name + \"@\" + .Digest" -r)
done
for IMAGE_NAME in "${!IMAGES_BUILD[@]}"
do
# Before the push the temporary name is resolved to the future tag with the registry in the image name
TEMPORARY_IMAGE_NAME="${IMAGES_BUILD[${IMAGE_NAME}]}"
# This will actually not push any images and instead just add them to the file /kubectl-build-deploy/lagoon/push
. /kubectl-build-deploy/scripts/exec-push-parallel.sh
done
# If we have Images to Push to the OpenRegistry, let's do so
if [ -f /kubectl-build-deploy/lagoon/push ]; then
parallel --retries 4 < /kubectl-build-deploy/lagoon/push
fi
# load the image hashes for just pushed Images
for IMAGE_NAME in "${!IMAGES_BUILD[@]}"
do
JQ_QUERY=(jq -r ".[]|select(test(\"${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}\"))")
IMAGE_HASHES[${IMAGE_NAME}]=$(docker inspect ${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest} --format '{{json .RepoDigests}}' | "${JQ_QUERY[@]}")
done
elif [ "$BUILD_TYPE" == "promote" ]; then
for IMAGE_NAME in "${IMAGES[@]}"
do
. /kubectl-build-deploy/scripts/exec-kubernetes-promote.sh
IMAGE_HASHES[${IMAGE_NAME}]=$(skopeo inspect docker://${REGISTRY}/${PROJECT}/${ENVIRONMENT}/${IMAGE_NAME}:${IMAGE_TAG:-latest} --tls-verify=false | jq ".Name + \"@\" + .Digest" -r)
done
fi
##############################################
### CREATE PVC, DEPLOYMENTS AND CRONJOBS
##############################################
YAML_FOLDER="/kubectl-build-deploy/lagoon/deploymentconfigs-pvcs-cronjobs-backups"
mkdir -p $YAML_FOLDER
for SERVICE_TYPES_ENTRY in "${SERVICE_TYPES[@]}"
do
IFS=':' read -ra SERVICE_TYPES_ENTRY_SPLIT <<< "$SERVICE_TYPES_ENTRY"
SERVICE_NAME=${SERVICE_TYPES_ENTRY_SPLIT[0]}
SERVICE_TYPE=${SERVICE_TYPES_ENTRY_SPLIT[1]}
SERVICE_NAME_IMAGE="${MAP_SERVICE_NAME_TO_IMAGENAME[${SERVICE_NAME}]}"
SERVICE_NAME_IMAGE_HASH="${IMAGE_HASHES[${SERVICE_NAME_IMAGE}]}"
SERVICE_NAME_UPPERCASE=$(echo "$SERVICE_NAME" | tr '[:lower:]' '[:upper:]')
COMPOSE_SERVICE=${MAP_SERVICE_TYPE_TO_COMPOSE_SERVICE["${SERVICE_TYPES_ENTRY}"]}
# Some Templates need additonal Parameters, like where persistent storage can be found.
HELM_SET_VALUES=()
# PERSISTENT_STORAGE_CLASS=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.persistent\\.class false)
# if [ ! $PERSISTENT_STORAGE_CLASS == "false" ]; then
# TEMPLATE_PARAMETERS+=(-p PERSISTENT_STORAGE_CLASS="${PERSISTENT_STORAGE_CLASS}")
# fi
PERSISTENT_STORAGE_SIZE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.persistent\\.size false)
if [ ! $PERSISTENT_STORAGE_SIZE == "false" ]; then
HELM_SET_VALUES+=(--set "persistentStorage.size=${PERSISTENT_STORAGE_SIZE}")
fi
PERSISTENT_STORAGE_PATH=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.persistent false)
if [ ! $PERSISTENT_STORAGE_PATH == "false" ]; then
HELM_SET_VALUES+=(--set "persistentStorage.path=${PERSISTENT_STORAGE_PATH}")
PERSISTENT_STORAGE_NAME=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.persistent\\.name false)
if [ ! $PERSISTENT_STORAGE_NAME == "false" ]; then
HELM_SET_VALUES+=(--set "persistentStorage.name=${PERSISTENT_STORAGE_NAME}")
else
HELM_SET_VALUES+=(--set "persistentStorage.name=${SERVICE_NAME}")
fi
fi
# TODO: we don't need this anymore
# DEPLOYMENT_STRATEGY=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.$COMPOSE_SERVICE.labels.lagoon\\.deployment\\.strategy false)
# if [ ! $DEPLOYMENT_STRATEGY == "false" ]; then
# TEMPLATE_PARAMETERS+=(-p DEPLOYMENT_STRATEGY="${DEPLOYMENT_STRATEGY}")
# fi
CRONJOB_COUNTER=0
CRONJOBS_ARRAY_INSIDE_POD=() #crons run inside an existing pod more frequently than every 15 minutes
while [ -n "$(cat .lagoon.yml | shyaml keys environments.${BRANCH//./\\.}.cronjobs.$CRONJOB_COUNTER 2> /dev/null)" ]
do
CRONJOB_SERVICE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.cronjobs.$CRONJOB_COUNTER.service)
# Only implement the cronjob for the services we are currently handling
if [ $CRONJOB_SERVICE == $SERVICE_NAME ]; then
CRONJOB_NAME=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.cronjobs.$CRONJOB_COUNTER.name | sed "s/[^[:alnum:]-]/-/g" | sed "s/^-//g")
CRONJOB_SCHEDULE_RAW=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.cronjobs.$CRONJOB_COUNTER.schedule)
# Convert the Cronjob Schedule for additional features and better spread
CRONJOB_SCHEDULE=$( /kubectl-build-deploy/scripts/convert-crontab.sh "${NAMESPACE}" "$CRONJOB_SCHEDULE_RAW")
CRONJOB_COMMAND=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.cronjobs.$CRONJOB_COUNTER.command)
if cronScheduleMoreOftenThan30Minutes "$CRONJOB_SCHEDULE_RAW" ; then
# If this cronjob is more often than 30 minutes, we run the cronjob inside the pod itself
CRONJOBS_ARRAY_INSIDE_POD+=("${CRONJOB_SCHEDULE} ${CRONJOB_COMMAND}")
else
# This cronjob runs less ofen than every 30 minutes, we create a kubernetes native cronjob for it.
# Add this cronjob to the native cleanup array, this will remove native cronjobs at the end of this script
NATIVE_CRONJOB_CLEANUP_ARRAY+=($(echo "cronjob-${SERVICE_NAME}-${CRONJOB_NAME}" | awk '{print tolower($0)}'))
# kubectl stores this cronjob name lowercased
# if [ ! -f $OPENSHIFT_TEMPLATE ]; then
# echo "No cronjob support for service '${SERVICE_NAME}' with type '${SERVICE_TYPE}', please contact the Lagoon maintainers to implement cronjob support"; exit 1;
# else
yq write -i -- /kubectl-build-deploy/${SERVICE_NAME}-values.yaml "nativeCronjobs.${CRONJOB_NAME,,}.schedule" "$CRONJOB_SCHEDULE"
yq write -i -- /kubectl-build-deploy/${SERVICE_NAME}-values.yaml "nativeCronjobs.${CRONJOB_NAME,,}.command" "$CRONJOB_COMMAND"
# fi
fi
fi
let CRONJOB_COUNTER=CRONJOB_COUNTER+1
done
# if there are cronjobs running inside pods, add them to the deploymentconfig.
if [[ ${#CRONJOBS_ARRAY_INSIDE_POD[@]} -ge 1 ]]; then
yq write -i -- /kubectl-build-deploy/${SERVICE_NAME}-values.yaml 'inPodCronjobs' "$(printf '%s\n' "${CRONJOBS_ARRAY_INSIDE_POD[@]}")"
else
yq write -i --tag '!!str' -- /kubectl-build-deploy/${SERVICE_NAME}-values.yaml 'inPodCronjobs' ''
fi
. /kubectl-build-deploy/scripts/exec-kubectl-resources-with-images.sh
done
##############################################
### APPLY RESOURCES
##############################################
if [ "$(ls -A $YAML_FOLDER/)" ]; then
if [ "$CI" == "true" ]; then
# During CI tests of Lagoon itself we only have a single compute node, so we change podAntiAffinity to podAffinity
find $YAML_FOLDER -type f -print0 | xargs -0 sed -i s/podAntiAffinity/podAffinity/g
# During CI tests of Lagoon itself we only have a single compute node, so we change ReadWriteMany to ReadWriteOnce
find $YAML_FOLDER -type f -print0 | xargs -0 sed -i s/ReadWriteMany/ReadWriteOnce/g
fi
find $YAML_FOLDER -type f -exec cat {} \;
kubectl apply --insecure-skip-tls-verify -n ${NAMESPACE} -f $YAML_FOLDER/
fi
##############################################
### WAIT FOR POST-ROLLOUT TO BE FINISHED
##############################################
for SERVICE_TYPES_ENTRY in "${SERVICE_TYPES[@]}"
do
IFS=':' read -ra SERVICE_TYPES_ENTRY_SPLIT <<< "$SERVICE_TYPES_ENTRY"
SERVICE_NAME=${SERVICE_TYPES_ENTRY_SPLIT[0]}
SERVICE_TYPE=${SERVICE_TYPES_ENTRY_SPLIT[1]}
SERVICE_ROLLOUT_TYPE=$(cat $DOCKER_COMPOSE_YAML | shyaml get-value services.${SERVICE_NAME}.labels.lagoon\\.rollout deployment)
# Allow the rollout type to be overriden by environment in .lagoon.yml
ENVIRONMENT_SERVICE_ROLLOUT_TYPE=$(cat .lagoon.yml | shyaml get-value environments.${BRANCH//./\\.}.rollouts.${SERVICE_NAME} false)
if [ ! $ENVIRONMENT_SERVICE_ROLLOUT_TYPE == "false" ]; then
SERVICE_ROLLOUT_TYPE=$ENVIRONMENT_SERVICE_ROLLOUT_TYPE
fi
if [ $SERVICE_TYPE == "mariadb-dbaas" ]; then
echo "nothing to monitor for $SERVICE_TYPE"
elif [ $SERVICE_TYPE == "postgres-dbaas" ]; then
echo "nothing to monitor for $SERVICE_TYPE"
elif [ $SERVICE_TYPE == "mongodb-dbaas" ]; then
echo "nothing to monitor for $SERVICE_TYPE"
elif [ ! $SERVICE_ROLLOUT_TYPE == "false" ]; then
. /kubectl-build-deploy/scripts/exec-monitor-deploy.sh
fi
done
##############################################
### CLEANUP NATIVE CRONJOBS which have been removed from .lagoon.yml or modified to run more frequently than every 15 minutes
##############################################
CURRENT_CRONJOBS=$(kubectl -n ${NAMESPACE} get cronjobs --no-headers | cut -d " " -f 1 | xargs)
IFS=' ' read -a SPLIT_CURRENT_CRONJOBS <<< $CURRENT_CRONJOBS
for SINGLE_NATIVE_CRONJOB in ${SPLIT_CURRENT_CRONJOBS[@]}
do
re="\<$SINGLE_NATIVE_CRONJOB\>"
text=$( IFS=' '; echo "${NATIVE_CRONJOB_CLEANUP_ARRAY[*]}")
if [[ "$text" =~ $re ]]; then
#echo "Single cron found: ${SINGLE_NATIVE_CRONJOB}"
continue
else
#echo "Single cron missing: ${SINGLE_NATIVE_CRONJOB}"
kubectl --insecure-skip-tls-verify -n ${NAMESPACE} delete cronjob ${SINGLE_NATIVE_CRONJOB}
fi
done
##############################################
### RUN POST-ROLLOUT tasks defined in .lagoon.yml
##############################################
# if we have LAGOON_POSTROLLOUT_DISABLED set, don't try to run any pre-rollout tasks
if [ "${LAGOON_POSTROLLOUT_DISABLED}" != "true" ]; then
COUNTER=0
while [ -n "$(cat .lagoon.yml | shyaml keys tasks.post-rollout.$COUNTER 2> /dev/null)" ]
do
TASK_TYPE=$(cat .lagoon.yml | shyaml keys tasks.post-rollout.$COUNTER)
echo $TASK_TYPE
case "$TASK_TYPE" in
run)
COMMAND=$(cat .lagoon.yml | shyaml get-value tasks.post-rollout.$COUNTER.$TASK_TYPE.command)
SERVICE_NAME=$(cat .lagoon.yml | shyaml get-value tasks.post-rollout.$COUNTER.$TASK_TYPE.service)
CONTAINER=$(cat .lagoon.yml | shyaml get-value tasks.post-rollout.$COUNTER.$TASK_TYPE.container false)
SHELL=$(cat .lagoon.yml | shyaml get-value tasks.post-rollout.$COUNTER.$TASK_TYPE.shell sh)
. /kubectl-build-deploy/scripts/exec-tasks-run.sh
;;
*)
echo "Task Type ${TASK_TYPE} not implemented"; exit 1;
esac
let COUNTER=COUNTER+1
done
else
echo "post-rollout tasks are currently disabled LAGOON_POSTROLLOUT_DISABLED is set to true"
fi
##############################################
### PUSH the latest .lagoon.yml into lagoon-yaml configmap
##############################################
if kubectl --insecure-skip-tls-verify -n ${NAMESPACE} get configmap lagoon-yaml &> /dev/null; then
# replace it
kubectl --insecure-skip-tls-verify -n ${NAMESPACE} create configmap lagoon-yaml --from-file=.lagoon.yml -o yaml --dry-run | kubectl replace -f -
else
# create it
kubectl --insecure-skip-tls-verify -n ${NAMESPACE} create configmap lagoon-yaml --from-file=.lagoon.yml
fi
|
#####################################################################
#####################################################################
# script: scratch.sh
# author: Lincoln Harris
# date: 7.16.18
#
#####################################################################
#####################################################################
#!/bin/bash
for dir in *; do
echo $dir
#mv ./${dir}/*R1_all.fastq ./${dir}/*R1_001.fastq
#mv ./${dir}/*R2_all.fastq ./${dir}/*R2_001.fastq
mv ./${dir}/*R1_001.fastq ./${dir}/${dir}_R1_001.fastq
mv ./${dir}/*R2_001.fastq ./${dir}/${dir}_R2_001.fastq
done
#####################################################################
#####################################################################
|
import React from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { SykmeldingUtdrag as SykmeldingUtdragForArbeidstakere, sykmelding as sykmeldingPt } from '@navikt/digisyfo-npm';
import { settErOppdelt } from '../../utils/settErOppdelt';
import { ARBEIDSTAKERE, SELVSTENDIGE_OG_FRILANSERE, ARBEIDSLEDIG, BEHANDLINGSDAGER, ANNET_ARBEIDSFORHOLD } from '../../enums/soknadtyper';
import SykmeldingUtdragForSelvstendige from '../../soknad-selvstendig-frilanser/sykmelding-utdrag/SykmeldingUtdragForSelvstendige';
import SykmeldingUtdragForArbeidsledige from '../../soknad-arbeidsledig/sykmelding-utdrag/SykmeldingUtdragForArbeidsledige';
import { soknadPt } from '../../prop-types/soknadProptype';
import SykmeldingUtdragForBehandlingsdager from '../../soknad-behandlingsdager/sykmelding-utdrag/SykmeldingUtdragForBehandlingsdager';
const Utdrag = ({ sykmelding, soknad, erApen, erOppdelt }) => {
if (!sykmelding) {
return null;
}
switch (soknad.soknadstype) {
case ARBEIDSTAKERE:
return (
<SykmeldingUtdragForArbeidstakere
erApen={erApen}
sykmelding={sykmelding}
sykepengesoknad={{ _erOppdelt: erOppdelt }}
/>
);
case SELVSTENDIGE_OG_FRILANSERE:
return (
<SykmeldingUtdragForSelvstendige
erApen={erApen}
sykmelding={sykmelding}
erOppdelt={erOppdelt}
/>
);
case ARBEIDSLEDIG:
case ANNET_ARBEIDSFORHOLD:
return (
<SykmeldingUtdragForArbeidsledige
erApen={erApen}
sykmelding={sykmelding}
erOppdelt={erOppdelt}
/>
);
case BEHANDLINGSDAGER:
return (
<SykmeldingUtdragForBehandlingsdager
erApen={erApen}
sykmelding={sykmelding}
soknad={soknad}
erOppdelt={erOppdelt}
/>
);
default:
return null;
}
};
Utdrag.propTypes = {
sykmelding: sykmeldingPt,
soknad: soknadPt,
erApen: PropTypes.bool,
erOppdelt: PropTypes.bool,
};
const mapStateToProps = (state, ownProps) => {
const sykmelding = state.dineSykmeldinger.data.find((sykmld) => {
return sykmld.id === ownProps.soknad.sykmeldingId;
});
return {
sykmelding,
erOppdelt: settErOppdelt(ownProps.soknad, sykmelding)._erOppdelt,
};
};
const SykmeldingUtdrag = connect(mapStateToProps)(Utdrag);
export default SykmeldingUtdrag;
|
import RPi.GPIO as GPIO
import time
def blink_led(pin, frequency, duration):
GPIO.setmode(GPIO.BCM) # Set the pin numbering mode to BCM
GPIO.setup(pin, GPIO.OUT) # Set the pin as an output
try:
while duration > 0:
GPIO.output(pin, GPIO.HIGH) # Turn the LED on
time.sleep(1 / (2 * frequency)) # Wait for half the period
GPIO.output(pin, GPIO.LOW) # Turn the LED off
time.sleep(1 / (2 * frequency)) # Wait for half the period
duration -= 1 / frequency # Decrement the remaining duration by one period
finally:
GPIO.cleanup() # Clean up the GPIO settings when done
# Example usage
blink_led(18, 1, 10) # Blinks the LED connected to GPIO pin 18 at 1 Hz for 10 seconds |
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_06_scalability_rexi_fd_new_up_to_2048/run_rexi_fd_par_m2048_t001_n0128_r0014_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_06_scalability_rexi_fd_new_up_to_2048/run_rexi_fd_par_m2048_t001_n0128_r0014_a1.err
#SBATCH -J rexi_fd_par_m2048_t001_n0128_r0014_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=14
#SBATCH --cpus-per-task=1
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=01:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=1
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_06_scalability_rexi_fd_new_up_to_2048
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 1 -envall -ppn 28 -n 14 ./build/rexi_fd_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=2048 -C -5.0
|
<gh_stars>1-10
package neuralNetworks.objects;
import dataTypes.DoubleVector;
import java.util.List;
import java.util.stream.Stream;
public class MiniBatch {
private final List<NetworkPattern> networkPatterns;
public MiniBatch(List<NetworkPattern> networkPatterns) {
this.networkPatterns = networkPatterns;
}
public Stream<NetworkPattern> stream() {
return networkPatterns.stream();
}
public NetworkPattern get(int index) {
return networkPatterns.get(index);
}
public DoubleVector getInput(int index) {
return get(index).getInput();
}
public DoubleVector getOutput(int index) {
return get(index).getOutput() ;
}
public int size() {
return networkPatterns.size();
}
}
|
<filename>common-utils/common-util/src/test/java/com/atjl/util/number/NumberHelperTest.java
package com.atjl.util.number;
import org.junit.Test;
import java.util.HashMap;
import static junit.framework.TestCase.assertEquals;
/**
* NumberUtil Tester.
*
* @author <Authors name>
* @version 1.0
*/
public class NumberHelperTest {
/**
* Method: long2Int(long lnum)
*/
@Test
public void testLong2Int() throws Exception {
HashMap hm = new HashMap();
hm.put("","");
}
/**
* Method: filterDot(String number)
*/
@Test
public void testFilterDot() throws Exception {
assertEquals("23", NumberUtil.filterDot("23"));
assertEquals("1", NumberUtil.filterDot("1.23"));
assertEquals("0", NumberUtil.filterDot("0.23"));
}
/**
* Method: minus(T t1, T t2)
*/
@Test
public void testMinus() throws Exception {
}
/**
* Method: incr(T t1)
*/
@Test
public void testIncr() throws Exception {
}
/**
* Method: add(T t1, T t2)
*/
@Test
public void testAdd() throws Exception {
}
/**
* Method: div(T t1, T t2)
*/
@Test
public void testDiv() throws Exception {
}
/**
* Method: mod(T t1, T t2)
*/
@Test
public void testMod() throws Exception {
}
/**
* Method: getNumber(Class<T> cls, int i)
*/
@Test
public void testGetNumber() throws Exception {
}
}
|
using System;
using System.Data;
using System.Data.SqlClient;
namespace DatabaseReader
{
class Program
{
static void Main(string[] args)
{
string connectionString = "Data Source=YourServer;Initial Catalog=YourDatabase;Integrated Security=True";
string query = "SELECT * FROM YourTable";
using (SqlConnection connection = new SqlConnection(connectionString))
{
SqlCommand command = new SqlCommand(query, connection);
connection.Open();
using (SqlDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
Console.WriteLine($"Column1: {reader["Column1"]}, Column2: {reader["Column2"]}, ...");
}
}
}
}
}
} |
<reponame>opit7/fhem-docker<filename>fhemExtended/core/RPiSensors/max6675.py
#!/usr/bin/env python
# Copyright 2014 IIJ Innovation Institute Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY IIJ INNOVATION INSTITUTE INC. ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL IIJ INNOVATION INSTITUTE INC. OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Copyright 2014 <NAME>. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''A Python class to access MAX6675 based temperature sensor. The
spidev module (https://github.com/doceme/py-spidev) is required.
Example:
import spidev
import max6675
sensor = max6675.Max6675(0, 0)
print sensor.temperature
'''
import sensorbase
import spidev
import time
class Max6675(sensorbase.SensorBase):
def __init__(self, bus = None, client = None):
'''Initializes the sensor.
bus: The SPI bus.
client: The identifier of the client.
'''
assert(bus is not None)
assert(client is not None)
super(Max6675, self).__init__(self._update_sensor_data)
self._bus = bus
self._client = client
self._temperature = None
self._handle = spidev.SpiDev(self._bus, self._client)
def __del__(self):
if hasattr(self, '_handle'):
self._handle.close()
@property
def temperature(self):
'''Returns a temperature value. Returns None if no valid value is
set yet.
'''
self._update()
return (self._temperature)
def _update_sensor_data(self):
vals = self._handle.readbytes(2)
self._temperature = ((vals[0] << 8 | vals[1]) >> 3) * 0.25
if __name__ == '__main__':
import spidev
sensor = Max6675(0, 0)
for cache in [0, 5]:
print '**********'
print 'Cache lifetime is %d' % cache
sensor.cache_lifetime = cache
for c in range(10):
print sensor.temperature
|
#include <vector>
#include <stack>
#include <limits>
namespace elang {
namespace {
typedef std::vector<int> IntVector;
class MinStack {
private:
std::stack<int> dataStack;
std::stack<int> minStack;
public:
void push_back(int val) {
dataStack.push(val);
if (minStack.empty() || val <= minStack.top()) {
minStack.push(val);
}
}
void pop_back() {
if (dataStack.top() == minStack.top()) {
minStack.pop();
}
dataStack.pop();
}
int get_min() {
return minStack.top();
}
};
}
} |
#!/bin/bash
# shellcheck disable=SC2086
set -xeo pipefail
retval=0
cd algebra
cargo $CARGOARGS check --features "parallel" || retval="$?"
cargo $CARGOARGS check --features "fft" || retval="$?"
cargo $CARGOARGS check --features "n_fold" || retval="$?"
cargo $CARGOARGS check --features "bls12_377" || retval="$?"
cargo $CARGOARGS check --features "bls12_381" || retval="$?"
cargo $CARGOARGS check --features "edwards_bls12" || retval="$?"
cargo $CARGOARGS check --features "edwards_sw6" || retval="$?"
cargo $CARGOARGS check --features "jubjub" || retval="$?"
cargo $CARGOARGS check --features "sw6" || retval="$?"
cargo $CARGOARGS check --features "mnt4_753" || retval="$?"
cargo $CARGOARGS check --features "mnt6_298" || retval="$?"
cargo $CARGOARGS check --features "mnt6_753" || retval="$?"
cargo $CARGOARGS check --features "bn_382" || retval="$?"
cargo $CARGOARGS check --features "tweedle" || retval="$?"
cargo $CARGOARGS check --features "full" || retval="$?"
exit "$retval"
|
<reponame>pulsar-chem/BPModule
import pulsar as psr
def load_ref_system():
""" Returns toluene as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 1.2264 0.0427 0.0670
C 1.0031 -1.3293 0.0600
C -0.2945 -1.8256 -0.0060
C -1.3704 -0.9461 -0.0646
C -1.1511 0.4266 -0.0578
C 0.1497 0.9292 0.0066
C 0.3871 2.3956 -0.0022
H 2.2495 0.4310 0.1211
H 1.8510 -2.0202 0.1071
H -0.4688 -2.9062 -0.0109
H -2.3926 -1.3347 -0.1157
H -2.0006 1.1172 -0.1021
H 0.5024 2.7582 -1.0330
H 1.2994 2.6647 0.5466
H -0.4475 2.9470 0.4506
""")
|
<filename>src/app/components/EditorCont/RangeSelectDialog.tsx<gh_stars>0
import React, { useState } from "react";
import {makeStyles} from '@material-ui/core/styles';
import WrapWithDialog from "../WrapWithDialog";
import Button from "@material-ui/core/Button";
import TextField from '@material-ui/core/TextField';
const useStyles = makeStyles({
wrapper: {
display: "flex",
flexDirection: "column",
gap: "20px",
outline: "none",
maxHeight: "80%",
padding: "10px",
},
rangeFieldCont: {
display: "flex",
gap: "10px",
outline: "none",
padding: "20px"
},
btn: {
backgroundColor: "#012069",
color: "white",
"&:hover": {
backgroundColor: "#012069",
opacity: 0.8
}
}
})
type RangeSelectDialogProps = {
showModal: boolean,
handleClose: any,
handleBack: Function,
onRangeConfirm: Function,
currInputMinRange: string,
currInputMaxRange: string,
}
export default function RangeSelectDialog(props: RangeSelectDialogProps) {
const classes = useStyles();
const [inputMinRange, setInputMinRange] = useState(props.currInputMinRange);
const [inputMaxRange, setInputMaxRange] = useState(props.currInputMinRange);
const onInputMinRange = (event: any) => {
const newInputMin = event && event.target && event.target.value ? event.target.value : "";
setInputMinRange(newInputMin);
}
const onInputMaxRange = (event: any) => {
const newInputMax = event && event.target && event.target.value ? event.target.value : "";
setInputMaxRange(newInputMax);
}
return (
<WrapWithDialog
showModal={props.showModal}
handleClose={props.handleClose}
handleBack={props.handleBack}
title={"Range Input"}
>
<div
className={classes.wrapper}
>
<div
style={{
display: 'flex',
gap: '5px'
}}
>
{
Number.isInteger(Number.parseInt(inputMinRange)) &&
Number.parseInt(inputMinRange) >= 0
?
<TextField
label="Minimum"
value={inputMinRange}
onChange={onInputMinRange}
/>
:
<TextField
error
value={inputMinRange}
label="Error Minimum"
helperText="Must be an integer > 0"
onChange={onInputMinRange}
/>
}
{
Number.isInteger(Number.parseInt(inputMaxRange))
?
<TextField
label="Maximum"
value={inputMaxRange}
onChange={onInputMaxRange}
/>
:
<TextField
error
value={inputMaxRange}
onChange={onInputMaxRange}
label="Error Maximum"
helperText="Must be an integer"
/>
}
</div>
{
Number.isInteger(Number.parseInt(inputMinRange)) &&
Number.parseInt(inputMinRange) >= 0 &&
Number.isInteger(Number.parseInt(inputMaxRange)) &&
<Button
className={classes.btn}
onClick={() => props.onRangeConfirm(inputMinRange, inputMaxRange)}
variant="outlined"
>
{`Confirm Range?`}
</Button>
}
</div>
</WrapWithDialog>
)
} |
package com.redislabs.lettusearch.aggregate;
public enum Order {
Asc, Desc
} |
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package servlet.kondisjon;
import crypto.ValidSession;
import html.Div;
import html.DivForm;
import html.Input;
import html.StandardHtml;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import util.http.Headers;
import util.sql.Database;
/**
*
* @author
*/
public class Turer extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
Headers.GET(resp);
ValidSession.isValid(req, resp);
PrintWriter out = resp.getWriter();
try {
StandardHtml html = new StandardHtml("Kondisjon Turer");
DivForm form = getKondisjonTurForm();
Div div = new Div("", "kondisjonTurTabell", "div-table");
Div containerDiv = new Div(form.toString() + div.toString(), "div-container");
html.addBodyContent(containerDiv.toString());
String tableArr = "['getKondisjonTur','kondisjonTurTabell','/kondisjon/turer/']";
String deleteArr = "['deleteKondisjonTur','kondisjonTurerId','/kondisjon/turer/']";
html.addBodyJS("buildTable(" + tableArr + "," + deleteArr + ");");
String paramArray = "['navn','km','mohStart','mohSlutt']";
html.addBodyJS("insertRequest('kondisjonTurSubmit','insertKondisjonTur','/kondisjon/turer/'," + paramArray + "," + tableArr + "," + deleteArr + ");");
//Form.get(brukerId));
out.print(html.toString());
} catch (Exception e) {
e.printStackTrace(out);
}
}
private DivForm getKondisjonTurForm() {
DivForm form = new DivForm("kondisjonTurForm", "div-form");
// new Input(placeholder, label, inputType, elementId, elementClass)
form.addElement(new Input("tur navn", "tur navn", "text", "kondisjonTurInputNavn", "input"));
form.addElement(new Input("kilometer", "kilometer", "number", "kondisjonTurInputKm", "input"));
form.addElement(new Input("moh start", "moh start", "number", "kondisjonTurInputMohStart", "input"));
form.addElement(new Input("moh slutt", "moh slutt", "number", "kondisjonTurInputMohSlutt", "input"));
form.addElement(new Div("submit", "kondisjonTurSubmit", "submit"));
return form;
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
Headers.POST(response);
PrintWriter out = response.getWriter();
String type = request.getParameter("type");
/* stopper request hvis ugylid session */
try {
int brukerId = (int) request.getSession().getAttribute("brukerId");
if (type.equals("getKondisjonTur")) {
out.print(getKondisjonTur(brukerId));
} else if (type.equals("insertKondisjonTur")) {
out.print(insertKondisjonTur(brukerId,
request.getParameter("navn"),
Double.parseDouble(request.getParameter("km")),
Integer.parseInt(request.getParameter("mohStart")),
Integer.parseInt(request.getParameter("mohSlutt"))));
} else if (type.equals("deleteKondisjonTur")) {
out.print(deleteKondisjonTur(brukerId, Integer.parseInt(request.getParameter("kondisjonTurerId"))));
} else if (type.equals("updateKondisjonTur")) {
out.print(updateKondisjonTur(brukerId,
Integer.parseInt(request.getParameter("rowId")),
Double.parseDouble(request.getParameter("km")),
Integer.parseInt(request.getParameter("mohStart")),
Integer.parseInt(request.getParameter("mohSlutt")),
request.getParameter("navn")));
}
} catch (Exception e) {
e.printStackTrace(out);
}
}
private int updateKondisjonTur(int brukerId, int kondisjonTurerId, double km, int mohStart, int mohSlutt, String navn) throws Exception {
String query = "UPDATE kondisjonTurer SET navn = ?, km = ?, mohStart = ?, mohSlutt = ? WHERE kondisjonTurerId = ? AND brukerId = " + brukerId + ";";
return Database.singleUpdateQuery(query, new Object[]{navn, km, mohStart, mohSlutt, kondisjonTurerId}, false);
}
//brukerId brukt på annen måte her? må bli mer konsistens
private int deleteKondisjonTur(int brukerId, int kondisjonTurerId) throws Exception {
String query = "DELETE FROM kondisjonTurer WHERE brukerId = ? AND kondisjonTurerId = ?;";
return Database.singleUpdateQuery(query, new Object[]{brukerId, kondisjonTurerId}, false);
}
private String getKondisjonTur(int brukerId) throws Exception {
String query = "SELECT kondisjonTurerId,navn,km,mohStart,mohSlutt FROM kondisjonTurer "
+ " WHERE brukerId = " + brukerId + ";";
return Database.normalQuery(query).getJSON();
}
private int insertKondisjonTur(int brukerId, String navn, double km, int mohStart, int mohSlutt) throws Exception {
Object[] vars = {navn, km, mohStart, mohSlutt};
String query = "INSERT INTO kondisjonTurer (brukerId,navn,km,mohStart,mohSlutt) "
+ "VALUES (" + brukerId + ",?,?,?,?);";
return Database.singleUpdateQuery(query, vars, false);
}
}
|
#!/bin/bash
set -e
command=$1
arguments=${*:2}
show_help() {
echo """
Available commands:
srtm : start SRTM subsystem (DEM)
healthsites : start healthsites subsystem (health facilities)
osm : start OSM subsystem (water/transport)
copernicus_glc : start Coppernicus GLC subsystem (land cover)
worldpop : start worldpop subsystem (population)
accessibility : start accessibility analysis
python : run arbitrary python code
bash : launch bash session
test : launch tests using Pytest
Any arguments passed will be forwarded to the executed command
"""
}
case "$command" in
"srtm")
python3 -m srtm2 $arguments
;;
"worldpop")
python3 -m worldpop $arguments
;;
"healthsites")
python3 -m healthsites $arguments
;;
"osm")
python3 -m osm $arguments
;;
"copernicus_glc")
python3 -m copernicus_glc $arguments
;;
"accessibility")
python3 -m accessibility $arguments
;;
"boundaries")
python3 -m boundaries $arguments
;;
"zonalstats")
python3 -m zonalstats $arguments
;;
"test")
pytest -s $arguments
;;
"python")
python3 $arguments
;;
"bash")
bash $arguments
;;
*)
show_help
;;
esac
|
<filename>powerauth-nextstep-model/src/main/java/io/getlime/security/powerauth/lib/nextstep/model/entity/data/OperationReferenceAttribute.java
package io.getlime.security.powerauth.lib.nextstep.model.entity.data;
import io.getlime.security.powerauth.lib.nextstep.model.converter.OperationTextNormalizer;
/**
* Reference in operation data.
*
* @author <NAME>, <EMAIL>
*/
public class OperationReferenceAttribute extends OperationDataAttribute {
private String reference;
/**
* Default constructor.
*/
public OperationReferenceAttribute() {
this.type = Type.REFERENCE;
}
/**
* Constructor with reference.
* @param reference Reference.
*/
public OperationReferenceAttribute(String reference) {
this.type = Type.REFERENCE;
this.reference = reference;
}
/**
* Get reference.
* @return Reference.
*/
public String getReference() {
return reference;
}
/**
* Set reference.
* @param reference Reference.
*/
public void setReference(String reference) {
this.reference = reference;
}
@Override
public Type getType() {
return type;
}
@Override
public String formattedValue() {
if (reference == null) {
return "";
}
return "R"+new OperationTextNormalizer().normalizeOperationData(reference);
}
}
|
'use strict';
/**
* @ngdoc directive
* @name hatshopApp.directive:bsPanes
* @description
* # bsPanes
*/
angular.module('hatshopApp').directive('hsPage', function () {
return {
require: '^hsPages',
templateUrl: 'views/hs-page.html',
restrict: 'E',
transclude: true,
scope: {
route: '@'
},
link: function (scope, element, attrs, pagesCtrl) {
pagesCtrl.addPage(scope);
}
};
});
|
package org.jfteam.web.controller;
import org.jfteam.service.LookupItemService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RestController;
/**
* Created with IntelliJ IDEA.
* Description:
* User: fengwenping
* Date: 2018-07-17
* Time: 下午11:48
*/
@RestController
public class LookupItemController {
private static final Logger LOGGER = LoggerFactory.getLogger(LookupItemController.class);
@Autowired
private LookupItemService lookupItemService;
}
|
class Stack:
def __init__(self):
self.items = []
def push(self, item):
self.items.append(item)
def pop(self):
if not self.isEmpty():
return self.items.pop()
return None
def isEmpty(self):
return len(self.items) == 0 |
#!/bin/bash -eu
cat event-* | jq -s .
|
#!/usr/bin/env bash
if [ -z `which sed` ]; then
echo "Missing sed"
exit 1
fi
if [ -z "$1" ]; then
echo "Missing type"
exit 1
fi
THISMOD="covalent"
DATAROOT="../src/main/resources/data"
ASSETS="../src/main/resources/assets/${THISMOD}"
DATA="${DATAROOT}/${THISMOD}"
A=(${1//:/ })
if [ -z "${A[1]}" ]; then
NAMESPACE="minecraft"
TYPE="${A[0]}"
else
NAMESPACE="${A[0]}"
TYPE="${A[1]}"
fi
if [ ! -e "${DATA}/recipes/${NAMESPACE}" ]; then
mkdir "${DATA}/recipes/${NAMESPACE}"
fi
copy_replace() {
SRC=$1
DEST=$2
IT=$3
cp "${SRC}" "${DEST}"
sed -i "s/TYPE/${TYPE}/g" "${DEST}"
sed -i "s/NAMESPACE/${NAMESPACE}/g" "${DEST}"
sed -i "s/THISMOD/${THISMOD}/g" "${DEST}"
if [ -n "${IT}" ]; then
sed -i "s/?/${IT}/g" "${DEST}"
fi
}
add_lang_strings() {
NAME="$(tr '[:lower:]' '[:upper:]' <<< ${TYPE:0:1})${TYPE:1}"
LANGFILE="${ASSETS}/lang/en_us.json"
remove_last_entry "${LANGFILE}" "}"
# add new lang entries
{
echo " \"block.${THISMOD}.${TYPE}_barrel\": \"${NAME} Barrel\",";
echo " \"block.${THISMOD}.${TYPE}_bookcase\": \"${NAME} Bookcase\","
echo " \"block.${THISMOD}.${TYPE}_crate\": \"${NAME} Crate\",";
echo "}"
} >> $LANGFILE
}
add_tags() {
CONTAINER=$1
# barrels
for f in "${DATAROOT}/${CONTAINER}/tags/blocks/barrels.json" "${DATAROOT}/${CONTAINER}/tags/items/barrels.json"
do
if [ -e "$f" ]; then
remove_last_entry "${f}" "]" "}"
{
echo " \"${THISMOD}:${TYPE}_barrel\"";
echo " ]"
echo "}"
} >> $f
strip_empty_lines $f
fi
done
# bookshelves
for f in "${DATAROOT}/${CONTAINER}/tags/blocks/bookcases.json" "${DATAROOT}/${CONTAINER}/tags/items/bookcases.json"
do
if [ -e "$f" ]; then
remove_last_entry "${f}" "]" "}"
{
echo " \"${THISMOD}:${TYPE}_bookcase\"";
echo " ]"
echo "}"
} >> $f
strip_empty_lines $f
fi
done
# crates
for f in "${DATAROOT}/${CONTAINER}/tags/blocks/crates.json" "${DATAROOT}/${CONTAINER}/tags/items/crates.json"
do
if [ -e "$f" ]; then
remove_last_entry "${f}" "]" "}"
{
echo " \"${THISMOD}:${TYPE}_crate\"";
echo " ]"
echo "}"
} >> $f
strip_empty_lines $f
fi
done
}
remove_last_entry() {
sed -i ':a;N;$!ba;s/"\n/",\n/g' "${1}" # add a comma after the last entry
sed -i "s/${2}//g" "${1}" # remove bracket
if [ -n "$3" ]; then
sed -i "s/${3}//g" "${1}" # remove bracket
fi
}
strip_empty_lines() {
sed -i '/^ *$/d' "${1}"
}
# barrels
copy_replace "barrel_blockstate" "${ASSETS}/blockstates/${TYPE}_barrel.json"
copy_replace "barrel_item_model" "${ASSETS}/models/item/${TYPE}_barrel.json"
copy_replace "barrel_block_model" "${ASSETS}/models/block/${TYPE}_barrel.json"
copy_replace "barrel_open_block_model" "${ASSETS}/models/block/${TYPE}_barrel_open.json"
copy_replace "barrel_loot_table" "${DATA}/loot_tables/blocks/${TYPE}_barrel.json"
copy_replace "barrel_recipe" "${DATA}/recipes/${NAMESPACE}/${TYPE}_barrel.json"
# bookcases
copy_replace "bookcase_blockstate" "${ASSETS}/blockstates/${TYPE}_bookcase.json"
copy_replace "bookcase_item_model" "${ASSETS}/models/item/${TYPE}_bookcase.json"
copy_replace "bookcase_0_block_model" "${ASSETS}/models/block/${TYPE}_bookcase_0.json"
for i in {1..9}
do
copy_replace "bookcase_x_block_model" "${ASSETS}/models/block/${TYPE}_bookcase_${i}.json" $i
done
copy_replace "bookcase_loot_table" "${DATA}/loot_tables/blocks/${TYPE}_bookcase.json"
copy_replace "bookcase_recipe" "${DATA}/recipes/${NAMESPACE}/${TYPE}_bookcase.json"
# crates
copy_replace "crate_blockstate" "${ASSETS}/blockstates/${TYPE}_crate.json"
copy_replace "crate_block_model" "${ASSETS}/models/block/${TYPE}_crate.json"
copy_replace "crate_item_model" "${ASSETS}/models/item/${TYPE}_crate.json"
copy_replace "crate_loot_table" "${DATA}/loot_tables/blocks/${TYPE}_crate.json"
copy_replace "crate_recipe" "${DATA}/recipes/${NAMESPACE}/${TYPE}_crate.json"
add_tags "charm"
add_tags "forge"
add_lang_strings
echo "Done!" |
_complete_repo_name () {
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
comp_ccd_an_dirs=$(ls ${HOME}/go/src/bitbucket-eng-sjc1.cisco.com/an)
repos=""
for i in ${comp_ccd_an_dirs}
do
i=${i#ui-an-}
i=${i#ucs-}
repos="${repos} ${i}"
done
comp_bitbucket_context_paths="branches browse commits compare pull-requests pull-requests?create"
if [ "${COMP_CWORD}" == "2" ];then
comp_suggestions="${comp_bitbucket_context_paths}"
else
comp_suggestions="${repos}"
fi
COMPREPLY=( $(compgen -W "${comp_suggestions}" -- $cur))
return 0
}
complete -F _complete_repo_name icd bb
|
/*
UdpClient.java
Firefly Luciferin, very fast Java Screen Capture software designed
for Glow Worm Luciferin firmware.
Copyright (C) 2020 - 2022 <NAME> (https://github.com/sblantipodi)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.dpsoftware.network.tcpUdp;
import lombok.extern.slf4j.Slf4j;
import org.dpsoftware.audio.AudioLoopback;
import org.dpsoftware.config.Constants;
import org.dpsoftware.utilities.CommonUtility;
import java.awt.*;
import java.io.IOException;
import java.net.*;
import java.util.Arrays;
/**
* UDP Client to manage UDP wireless stream, this is an alternative to MQTT stream
*/
@Slf4j
public class UdpClient {
public final DatagramSocket socket;
private final InetAddress address;
final int UDP_PORT = Constants.UDP_PORT;
/**
* Constructor
* @param deviceIP device IP
*/
public UdpClient(String deviceIP) throws SocketException, UnknownHostException {
socket = new DatagramSocket();
socket.setSendBufferSize(Constants.UDP_MAX_BUFFER_SIZE);
socket.setTrafficClass(0x08);
address = InetAddress.getByName(deviceIP);
}
/**
* Send message
* @param msg to send
*/
public void sendUdpStream(String msg) {
byte[] buf = msg.getBytes();
DatagramPacket packet = new DatagramPacket(buf, buf.length, address, UDP_PORT);
try {
socket.send(packet);
} catch (IOException e) {
log.error(e.getMessage());
}
}
/**
* Organize led data and send it via UDP stream
* @param leds array containing color information
*/
public void manageStream(Color[] leds) {
int chunkTotal;
chunkTotal = (int) Math.ceil(leds.length / Constants.UDP_CHUNK_SIZE);
for (int chunkNum=0; chunkNum < chunkTotal; chunkNum++) {
StringBuilder sb = new StringBuilder();
sb.append("DPsoftware").append(",");
sb.append(leds.length).append(",");
sb.append((AudioLoopback.AUDIO_BRIGHTNESS == 255 ? CommonUtility.getNightBrightness() : AudioLoopback.AUDIO_BRIGHTNESS)).append(",");
sb.append(chunkTotal).append(",");
sb.append(chunkNum).append(",");
int chunkSizeInteger = (int) Constants.UDP_CHUNK_SIZE * chunkNum;
int nextChunk = (int) (chunkSizeInteger + Constants.UDP_CHUNK_SIZE);
Color[] ledChunk = Arrays.copyOfRange(leds, chunkSizeInteger, Math.min(nextChunk, leds.length));
for (int ledIndex=0; ledIndex<ledChunk.length; ledIndex++) {
sb.append(ledChunk[ledIndex].getRGB());
if (ledIndex < ledChunk.length-1) {
sb.append(",");
}
}
sendUdpStream(sb.toString());
// Let the microcontroller rest for 1 milliseconds before next stream
if (Constants.UDP_MICROCONTROLLER_REST_TIME > 0) {
CommonUtility.sleepMilliseconds(Constants.UDP_MICROCONTROLLER_REST_TIME);
}
}
}
/**
* Close stream
*/
public void close() {
socket.close();
}
}
|
<reponame>tvaisanen/embeddedGraphWidget
/**
* Created by toni on 20.7.2017.
*/
(function () {
// A list of all QUnit test Modules. Make sure you include the `.js`
// extension so RequireJS resolves them as relative paths rather than using
// the `baseUrl` value supplied above.
var testModules = [
"js/utils/tests/graphUtilsTest.js",
"js/components/tests/uiTest.js",
"js/components/tests/elementStyleTest.js",
"js/components/dynamicListInput/test.js",
"js/components/nodeForm/test.js",
"js/components/templateFormManager/test.js"
];
// Resolve all testModules and then start the Test Runner.
require(testModules, function(){
QUnit.load();
QUnit.start();
});
}()); |
<filename>js/cart.js
function setCart(){
//fonction qui affiche/actualise les valeurs du panier sur la page cart.html
var result = document.getElementById('resultCart');
result.style.backgroundColor="WhiteSmoke";
result.style.padding="0.3em";
result.innerHTML ='';
if(addProduct('quantity_golf')){
result.innerHTML += '<p>Quantité de Golf actualisé</p>';
}
else{
result.innerHTML += '<p class="error">Veuillez entrer une quantité de Golf entière et positive, merci.</p>';
}
if(addProduct('quantity_megane')){
result.innerHTML += '<p>Quantité de Mégane actualisé</p>';
}
else{
result.innerHTML += '<p class="error">Veuillez entrer une quantité de Mégane entière et positive, merci.</p>';
}
if(addProduct('quantity_c4')){
result.innerHTML += '<p>Quantité de C4 actualisé</p>';
}
else{
result.innerHTML += '<p class="error">Veuillez entrer une quantité de C4 entière et positive, merci.</p>';
}
showProductNum("cartProductNum");
showTotalPrice('cartPrice');
} |
from pso.analyzers import AbstractAnalyzer
from pso.fields import FieldType
from pso_solr import tokenizers
class |
<reponame>maschnetwork/spring-todo
package at.joanneum.swd.esamvc.repository;
import at.joanneum.swd.esamvc.entity.Todo;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface TodoRepository extends JpaRepository<Todo, Integer> {
Todo findById(int id);
List<Todo> findAllByOrderById();
}
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export PYTHONPATH=$PYTHONPATH:$DIR
export FLASK_APP=${DIR}/site/index.py
LC_ALL=zh_CN.UTF-8 flask run
|
<reponame>fuji-nakahara/nlp100<gh_stars>0
# テンプレートの内容を利用し,国旗画像のURLを取得せよ.
# (ヒント: MediaWiki APIのimageinfoを呼び出して,ファイル参照をURLに変換すればよい)
require 'json'
require 'open-uri'
require 'uri'
require_relative '20'
flag_file_name = BRITISH_ARTICLE_BODY[/国旗画像\s*=\s*(.*)/, 1]
uri = URI.parse('https://ja.wikipedia.org/w/api.php')
uri.query = URI.encode_www_form(
action: 'query',
titles: "File:#{flag_file_name}",
prop: 'imageinfo',
iiprop: 'url',
format: 'json'
)
res = JSON.parse(uri.open.read)['query']['pages']['-1']['imageinfo'][0]['url']
puts res
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.