text
stringlengths 1
1.05M
|
|---|
<reponame>df-service-e2e-test/x_khu2_9th_stress_test_5
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.collection.impl.queue.operations;
import com.hazelcast.collection.impl.queue.QueueContainer;
import com.hazelcast.collection.impl.queue.QueueDataSerializerHook;
import com.hazelcast.monitor.impl.LocalQueueStatsImpl;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.ReadonlyOperation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
/**
* Checks whether contain or not item in the Queue.
*/
public class ContainsOperation extends QueueOperation implements ReadonlyOperation {
private Collection<Data> dataList;
public ContainsOperation() {
}
public ContainsOperation(String name, Collection<Data> dataList) {
super(name);
this.dataList = dataList;
}
@Override
public void run() throws Exception {
QueueContainer queueContainer = getContainer();
response = queueContainer.contains(dataList);
}
@Override
public void afterRun() throws Exception {
LocalQueueStatsImpl stats = getQueueService().getLocalQueueStatsImpl(name);
stats.incrementOtherOperations();
}
@Override
public int getId() {
return QueueDataSerializerHook.CONTAINS;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeInt(dataList.size());
for (Data data : dataList) {
out.writeData(data);
}
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
int size = in.readInt();
dataList = new ArrayList<Data>(size);
for (int i = 0; i < size; i++) {
dataList.add(in.readData());
}
}
}
|
num = 1
sum = 0
while num <= 10:
sum = sum + num
num = num + 1
print(sum) # Output: 55
|
#!/bin/bash
# List of project directories
projects=("project1" "project2" "project3")
# Create bin directory if it doesn't exist
mkdir -p bin
# Iterate through each project directory
for i in "${projects[@]}"
do
echo "Building \"$i\""
cd "$i" || exit 1
go build -o "../bin/$i"
cd ..
done
|
package project.controllers.cf_controllers;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.Label;
import javafx.scene.control.PasswordField;
import javafx.scene.paint.Color;
import javafx.stage.Stage;
import project.controllers.BaseController;
import project.models.ClubForum;
import project.views.ViewFactory;
import java.net.URL;
import java.util.ResourceBundle;
public class DepartmentAccessWindowController extends BaseController implements Initializable {
private String ref;
private ClubForum clubForum;
private Label refLabel;
private CFDashboardController controller;
public DepartmentAccessWindowController(ViewFactory viewFactory, String fxmlName, String ref, ClubForum clubForum, Label refLabel, CFDashboardController controller) {
super(viewFactory, "ui/cf_views/"+fxmlName);
this.ref = ref;
this.clubForum = clubForum;
this.refLabel = refLabel;
this.controller = controller;
}
@FXML
private Label departmentIndicatorLabel;
@FXML
private Label successLabel;
@FXML
private PasswordField accessDeptPasswordField;
@Override
public void initialize(URL url, ResourceBundle resourceBundle) {
switch (ref) {
case "hr" -> {
departmentIndicatorLabel.setText("Enter password for HR");
}
case "pr" -> {
departmentIndicatorLabel.setText("Enter password for PR");
}
case "rnd" -> {
departmentIndicatorLabel.setText("Enter password for R&D");
}
case "treasury" -> {
departmentIndicatorLabel.setText("Enter password for Treasury");
}
case "settings" -> {
departmentIndicatorLabel.setText("Enter password for Settings");
}
}
}
@FXML
void onAccessBtnAction(ActionEvent event) {
// TODO: get password field text and match with password
switch (ref) {
case "hr" -> {
if(accessDeptPasswordField.getText().equals(this.clubForum.getHrPass())) {
super.getViewFactory().showCfHRWindow(this.clubForum);
super.getViewFactory().closeWindow((Stage) this.refLabel.getScene().getWindow());
super.getViewFactory().closeWindow((Stage) this.successLabel.getScene().getWindow());
super.getViewFactory().closeAccessWindow();
CFDashboardController.hrAccess = true;
} else {
successLabel.setText("Wrong password");
successLabel.setTextFill(Color.RED);
}
}
case "pr" -> {
if(accessDeptPasswordField.getText().equals(this.clubForum.getPrPass())) {
super.getViewFactory().showPRWindow(this.clubForum);
super.getViewFactory().closeWindow((Stage) this.refLabel.getScene().getWindow());
super.getViewFactory().closeWindow((Stage) this.successLabel.getScene().getWindow());
super.getViewFactory().closeAccessWindow();
CFDashboardController.prAccess = true;
} else {
successLabel.setText("Wrong password");
successLabel.setTextFill(Color.RED);
}
}
case "rnd" -> {
if(accessDeptPasswordField.getText().equals(this.clubForum.getRndPass())) {
super.getViewFactory().showRnDWindow(this.clubForum);
super.getViewFactory().closeWindow((Stage) this.refLabel.getScene().getWindow());
super.getViewFactory().closeWindow((Stage) this.successLabel.getScene().getWindow());
super.getViewFactory().closeAccessWindow();
CFDashboardController.rndAccess = true;
} else {
successLabel.setText("Wrong password");
successLabel.setTextFill(Color.RED);
}
}
case "treasury" -> {
if(accessDeptPasswordField.getText().equals(this.clubForum.getTreasuryPass())) {
super.getViewFactory().showTreasuryWindow(this.clubForum);
super.getViewFactory().closeWindow((Stage) this.refLabel.getScene().getWindow());
super.getViewFactory().closeWindow((Stage) this.successLabel.getScene().getWindow());
super.getViewFactory().closeAccessWindow();
CFDashboardController.treasuryAccess = true;
} else {
successLabel.setText("Wrong password");
successLabel.setTextFill(Color.RED);
}
}
case "settings" -> {
if(accessDeptPasswordField.getText().equals(this.clubForum.getSettingsPass())) {
super.getViewFactory().showCFSettingsWindow(this.clubForum, controller);
super.getViewFactory().closeWindow((Stage) this.successLabel.getScene().getWindow());
super.getViewFactory().closeAccessWindow();
CFDashboardController.settings = true;
} else {
successLabel.setText("Wrong password");
successLabel.setTextFill(Color.RED);
}
}
}
}
}
|
#!/bin/bash
image_name=$1
arch_list=("amd64" "arm64")
for arch in ${arch_list[@]}
do
echo "# build manifest for ${arch}"
img_of_cur_arch=${image_name}-${arch}
# check image
set +e
docker inspect ${img_of_cur_arch} 2>&1 > /dev/null
if [ $? -ne 0 ]; then
echo "[Warning] No ${arch} image found on local , image name is ${img_of_cur_arch}. aborting image manifest merge"
exit 2
fi
set -e
# push image
echo "[1/3] push image ${img_of_cur_arch} to hub.."
docker push ${img_of_cur_arch};
# creste manifest
echo "[2/3] add local newly build ${arch} image into manifest .."
docker manifest create --insecure --amend ${image_name} ${img_of_cur_arch} ;
# annotate manifest
echo "[3/3] add annotation image into manifest .."
docker manifest annotate ${image_name} ${img_of_cur_arch} --os linux --arch ${arch}
done
# push manifest
echo "[Info] pushing manifests .."
docker manifest push --insecure --purge ${image_name} ;
echo "done"
|
// This module was autogenerate. Please don't edit.
exports._UnsafeList = require("@material-ui/core/List").default;
|
def calculate_sum(n):
res = 0 # Initialize the variable to store the sum
for i in range(1, n+1): # Iterate through all positive integers up to n
res += i # Add the current integer to the sum
return res # Return the calculated sum
|
import requests
from requests.auth import HTTPBasicAuth
class RestApiService:
def __init__(self, connection):
self.conn = connection
def about(self):
endpoint = "/about"
url_format = f"{self.conn.get('homePageUrl')}{endpoint}"
response = requests.get(url_format, auth=HTTPBasicAuth(self.conn.get('username'), self.conn.get('password')))
return response.content
|
const Event = require('../../base/Event');
const Guild = require('../../models/Guild');
const data = {};
module.exports = class extends Event {
async run(message) {
if (message.channel.type === 'dm' || !message.channel.viewable || message.author.bot) return;
if (message.webhookID) return;
if(message.guild && !message.member){
await message.guild.members.fetch(message.author.id);
}
if(!message.channel.permissionsFor(message.guild.me).has(['SEND_MESSAGES', 'EMBED_LINKS'])) return;
const mentionRegex = new RegExp(`^<@!?${this.client.user.id}>$`);
const mentionRegexPrefix = new RegExp(`^<@!?${this.client.user.id}> `);
const client = this.client;
data.config = client.config;
if(message.guild){
// Gets guild data
const guild = await client.findOrCreateGuild({ id: message.guild.id });
message.guild.data = data.guild = guild;
}
client.prefix = data.guild.prefix;
if (message.content.match(mentionRegex)) return message.channel.send(`My prefix for ${message.guild.name} is \`${data.guild.prefix}\`.`);
const prefix = message.content.match(mentionRegexPrefix) ? message.content.match(mentionRegexPrefix)[0] : data.guild.prefix;
if(!message.content.startsWith(prefix)) return;
const [cmd, ...args] = message.content.slice(prefix.length).trim().split(/ +/g);
const command = this.client.commands.get(cmd.toLowerCase()) || this.client.commands.get(this.client.aliases.get(cmd.toLowerCase()));
if (!command) return;
const permission = command.checkPermissions(message);
if(permission) {
try {
command.run(message, args, data);
this.client.log('INFO',`[${message.guild.name}] ${message.author.tag} > ${command.name}`)
} catch(e) {
this.client.log('ERROR',e.stack)
return message.channel.send({embed: {
title: `An error occured on ${command.name}, the error has been reported!`,
color: this.client.utils.hexColor
}});
}
}
}
};
|
set -e
if [ ! -n "$OH_MY_VIM" ]; then
OH_MY_VIM=~/.oh-my-vim
fi
if [ -d "$OH_MY_VIM" ]; then
echo "\033[0;33mYou already have Oh My Vim installed.\033[0m You'll need to remove $OH_MY_VIM if you want to install"
exit
fi
echo "\033[0;34mCloning Oh My Vim...\033[0m"
hash git >/dev/null 2>&1 && /usr/bin/env git clone https://github.com/liangxianzhe/oh-my-vim.git $OH_MY_VIM || {
echo "git not installed"
exit
}
echo "\033[0;34mLooking for an existing vim config...\033[0m"
if [ -f ~/.vimrc ] || [ -h ~/.vimrc ]; then
echo "\033[0;33mFound ~/.vimrc.\033[0m \033[0;32mBacking up to ~/.vimrc.pre-oh-my-vim\033[0m";
mv ~/.vimrc ~/.vimrc.pre-oh-my-vim;
fi
echo "\033[0;34mUsing the Oh My Vim template file and adding it to ~/.vimrc\033[0m"
cp $OH_MY_VIM/templates/vimrc-template ~/.vimrc
sed -i -e "/OH_MY_VIM=/ c\\
let \$OH_MY_VIM=\"$OH_MY_VIM\"
" ~/.vimrc
if [ ! -d "bundle" ]; then
echo "\033[0;34mCreating a bundle directory...\033[0m"
mkdir $OH_MY_VIM/bundle
fi
# Copy the font if using mac
if [[ `uname` == 'Darwin' ]]; then
echo "\033[0;34mInstalling font...\033[0m"
cp $OH_MY_VIM"/font/DroidSansMono/Droid Sans Mono for Powerline.otf" ~/Library/fonts/
fi
echo "\033[0;32m"' ____ __ __ _____ __ _ ________ ___ '"\033[0m"
echo "\033[0;32m"' / __ \/ / / |/ /\ \/ / | | / / _/ |/ / '"\033[0m"
echo "\033[0;32m"'/ /_/ / _ \ / /|_/ / \ / | |/ // // /|_/ / '"\033[0m"
echo "\033[0;32m"'\____/_//_/ /_/ /_/ /_/ |___/___/_/ /_/ '"\033[0m"
echo "\n\n \033[0;32mPlease look over the ~/.vimrc file to select plugins, themes, and options.\033[0m"
echo "\n\n \033[0;32mThen open you vim and it will install plugins for you.\033[0m"
echo "\n\n \033[0;32mEnjoy!.\033[0m"
|
#! /bin/bash
set -e
# Settings from environment
UDACITY_SOURCE=${UDACITY_SOURCE:-`pwd`}
UDACITY_IMAGE=${UDACITY_IMAGE:-bydavy/carnd-capstone}
CONTAINER_NAME="udacity_carnd"
if [ "$(docker ps -a | grep ${CONTAINER_NAME})" ]; then
echo "Attaching to running container..."
docker exec -it ${CONTAINER_NAME} bash $@
else
docker run --name ${CONTAINER_NAME} --rm -it -p 4567:4567 -v "${UDACITY_SOURCE}:/udacity" ${UDACITY_IMAGE} $@
fi
|
import pandas as pd
def process_geographical_data(geographical_data: pd.DataFrame) -> pd.DataFrame:
geographical_data['GEOID_long_a'] = geographical_data['GEOID_a']
geographical_data['GEOID_long_b'] = geographical_data['GEOID_b']
geographical_data['GEOID_a'] = geographical_data['GEOID_a'].str.replace("53033", "")
geographical_data['GEOID_b'] = geographical_data['GEOID_b'].str.replace("53033", "")
return geographical_data
# Test the function
geographical_data = pd.DataFrame({
'GEOID_a': ['53033123', '53033234', '53033567'],
'GEOID_b': ['53033111', '53033222', '53033444']
})
processed_data = process_geographical_data(geographical_data)
print(processed_data)
|
<reponame>AfterGlowShadow/daaiwujiang<gh_stars>0
function slideController () {
var partners = document.getElementById("partners"),
ul = partners.getElementsByTagName("ul")[0],
li = ul.getElementsByTagName("li"),
height = li[0].offsetHeight;
ul.style = "height:" + height + "px;";
document.getElementById("slide-up").addEventListener("click", function() {
for(var i = 0; i < 5; i++) {
var node = li[i].cloneNode(true);
ul.removeChild(li[i]);
ul.appendChild(node);
}
});
document.getElementById("slide-down").addEventListener("click", function() {
var count = li.length - 1;
console.log(count)
for(var i = 0; i < 5; i++) {
count -= i;
var node = li[count].cloneNode(true);
ul.removeChild(li[count]);
ul.insertBefore(node, li[0]);
}
});
}
function moveTy (){
document.getElementsByClassName("hn-left")[0].getElementsByTagName('dl')[1].addEventListener("click", function() {
var hnLeft = document.getElementsByClassName('hn-left')[0];
var dl = hnLeft.getElementsByTagName('dl')[1];
for (var i=0;i<2;i++){
hnLeft.getElementsByTagName('dl')[i].className = ''
}
dl.className = 'hn-left-active';
document.getElementsByClassName('h-bk-a')[0].innerHTML = '尊享A类客户';
document.getElementsByClassName('h-bk-b')[0].innerHTML = '尊享B类客户';
document.getElementsByClassName('h-bk-c')[0].innerHTML = '尊享C类客户';
document.getElementsByClassName('h-bk-d')[0].innerHTML = '尊享D类客户';
document.getElementsByClassName('h-bk-aprice')[0].innerHTML = '大爱体验价:¥<span>4800</span>';
document.getElementsByClassName('h-bk-bprice')[0].innerHTML = '大爱体验价:¥<span>7600</span>';
document.getElementsByClassName('h-bk-cprice')[0].innerHTML = '大爱体验价:¥<span>9600</span>';
document.getElementsByClassName('h-bk-dprice')[0].innerHTML = '大爱体验价:¥<span>12000</span>';
});
document.getElementsByClassName("hn-left")[0].getElementsByTagName('dl')[0].addEventListener("click", function() {
var hnLeft = document.getElementsByClassName('hn-left')[0];
var dl = hnLeft.getElementsByTagName('dl')[0];
for (var i=0;i<2;i++){
hnLeft.getElementsByTagName('dl')[i].className = ''
}
dl.className = 'hn-left-active';
document.getElementsByClassName('h-bk-a')[0].innerHTML = '体验A类客户';
document.getElementsByClassName('h-bk-b')[0].innerHTML = '体验B类客户';
document.getElementsByClassName('h-bk-c')[0].innerHTML = '体验C类客户';
document.getElementsByClassName('h-bk-d')[0].innerHTML = '体验D类客户';
document.getElementsByClassName('h-bk-aprice')[0].innerHTML = '大爱体验价:¥<span>4890</span>';
document.getElementsByClassName('h-bk-bprice')[0].innerHTML = '大爱体验价:¥<span>7780</span>';
document.getElementsByClassName('h-bk-cprice')[0].innerHTML = '大爱体验价:¥<span>9870</span>';
document.getElementsByClassName('h-bk-dprice')[0].innerHTML = '大爱体验价:¥<span>12360</span>';
});
}
window.onload = function() {
slideController();
primaryFormController();
moveTy();
document.getElementsByClassName('moreSpan')[0].addEventListener('click',function () {
document.getElementsByClassName('moreSpan')[0].style.display = 'none';
document.getElementById('hiddenS').style.display = 'inline';
document.getElementById('lastP').style.display = 'block';
document.getElementsByClassName('home-healthy')[0].style.paddingBottom = '0'
});
document.getElementById('hiddenS').addEventListener('click',function () {
document.getElementById('hiddenS').style.display = 'none';
document.getElementById('lastP').style.display = 'none';
document.getElementsByClassName('moreSpan')[0].style.display = 'inline';
document.getElementsByClassName('home-healthy')[0].style.paddingBottom = '0'
})
};
|
<gh_stars>1000+
# set async_mode to 'threading', 'eventlet', 'gevent' or 'gevent_uwsgi' to
# force a mode else, the best mode is selected automatically from what's
# installed
async_mode = None
import time
from flask import Flask, render_template
import socketio
sio = socketio.Server(logger=True, async_mode=async_mode)
app = Flask(__name__)
app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app)
app.config['SECRET_KEY'] = 'secret!'
thread = None
def background_thread():
"""Example of how to send server generated events to clients."""
count = 0
while True:
sio.sleep(10)
count += 1
sio.emit('my_response', {'data': 'Server generated event'})
@app.route('/')
def index():
global thread
if thread is None:
thread = sio.start_background_task(background_thread)
return render_template('index.html')
@sio.event
def my_event(sid, message):
sio.emit('my_response', {'data': message['data']}, room=sid)
@sio.event
def my_broadcast_event(sid, message):
sio.emit('my_response', {'data': message['data']})
@sio.event
def join(sid, message):
sio.enter_room(sid, message['room'])
sio.emit('my_response', {'data': 'Entered room: ' + message['room']},
room=sid)
@sio.event
def leave(sid, message):
sio.leave_room(sid, message['room'])
sio.emit('my_response', {'data': 'Left room: ' + message['room']},
room=sid)
@sio.event
def close_room(sid, message):
sio.emit('my_response',
{'data': 'Room ' + message['room'] + ' is closing.'},
room=message['room'])
sio.close_room(message['room'])
@sio.event
def my_room_event(sid, message):
sio.emit('my_response', {'data': message['data']}, room=message['room'])
@sio.event
def disconnect_request(sid):
sio.disconnect(sid)
@sio.event
def connect(sid, environ):
sio.emit('my_response', {'data': 'Connected', 'count': 0}, room=sid)
@sio.event
def disconnect(sid):
print('Client disconnected')
if __name__ == '__main__':
if sio.async_mode == 'threading':
# deploy with Werkzeug
app.run(threaded=True)
elif sio.async_mode == 'eventlet':
# deploy with eventlet
import eventlet
import eventlet.wsgi
eventlet.wsgi.server(eventlet.listen(('', 5000)), app)
elif sio.async_mode == 'gevent':
# deploy with gevent
from gevent import pywsgi
try:
from geventwebsocket.handler import WebSocketHandler
websocket = True
except ImportError:
websocket = False
if websocket:
pywsgi.WSGIServer(('', 5000), app,
handler_class=WebSocketHandler).serve_forever()
else:
pywsgi.WSGIServer(('', 5000), app).serve_forever()
elif sio.async_mode == 'gevent_uwsgi':
print('Start the application through the uwsgi server. Example:')
print('uwsgi --http :5000 --gevent 1000 --http-websockets --master '
'--wsgi-file app.py --callable app')
else:
print('Unknown async_mode: ' + sio.async_mode)
|
CP="bin:lib/guava-23.0.jar"
MAIN="ptatoolkit.scaler.doop.Main"
#echo
#echo $*
java -Xmx48g -cp $CP $MAIN $*
|
#!/bin/bash -e
HERE=$(dirname $(readlink -f $0))
source $HERE/../helpers.sh
require_params MACHINE IMAGE OTA_LITE_TAG
OSTREE_BRANCHNAME="${OSTREE_BRANCHNAME-lmp-localdev}"
SOTA_CLIENT="${SOTA_CLIENT-aktualizr}"
AKLITE_TAG="${AKLITE_TAG-promoted}"
H_BUILD="${H_BUILD-lmp-localdev}"
LMP_VERSION=$(git --git-dir=.repo/manifests/.git describe --tags 2>/dev/null || echo unknown)
FACTORY="${FACTORY-lmp}"
LMP_DEVICE_API="${LMP_DEVICE_API-https://api.foundries.io/ota/devices/}"
UBOOT_SIGN_ENABLE="${UBOOT_SIGN_ENABLE-0}"
DISABLE_GPLV3="${DISABLE_GPLV3-0}"
ENABLE_PTEST="${ENABLE_PTEST-0}"
DOCKER_MAX_CONCURRENT_DOWNLOADS="${DOCKER_MAX_CONCURRENT_DOWNLOADS-3}"
DOCKER_MAX_DOWNLOAD_ATTEMPTS="${DOCKER_MAX_DOWNLOAD_ATTEMPTS-5}"
MFGTOOL_FLASH_IMAGE="${MFGTOOL_FLASH_IMAGE-lmp-factory-image}"
SSTATE_CACHE_MIRROR="${SSTATE_CACHE_MIRROR-/sstate-cache-mirror}"
APP_IMAGES_PRELOAD_LOG_FILE="${APP_IMAGES_PRELOAD_LOG_FILE-${archive}/app-images-preload.log}"
APP_IMAGES_ROOT_DIR="${APP_IMAGES_ROOT_DIR-/var/cache/bitbake/app-images}"
USE_FIOTOOLS="${USE_FIOTOOLS-1}"
OSTREE_API_VERSION="${OSTREE_API_VERSION-v2}"
GARAGE_CUSTOMIZE_TARGET_PARAMS='${MACHINE} ${IMAGE_BASENAME} ${TARGET_ARCH}'
if [ "$ENABLE_PTEST" = "1" ] ; then
OSTREE_BRANCHNAME="${OSTREE_BRANCHNAME}-ptest"
fi
if [ -f "/secrets/targets.sec" ] ; then
status "Generating credentials.zip"
dynamic=$(mktemp --suffix=.zip)
$HERE/../create-creds $dynamic
SOTA_PACKED_CREDENTIALS=$dynamic
fi
source setup-environment build
CONF_VERSION=$(grep ^CONF_VERSION conf/local.conf | cut -d'"' -f 2)
if [ "$CONF_VERSION" == "1" ]; then
cat << EOFEOF >> conf/local.conf
ACCEPT_EULA_stm32mp1-disco = "1"
ACCEPT_EULA_stm32mp1-eval = "1"
EOFEOF
else
cat << EOFEOF >> conf/local.conf
ACCEPT_EULA:stm32mp1-disco = "1"
ACCEPT_EULA:stm32mp1-eval = "1"
EOFEOF
fi
cat << EOFEOF >> conf/local.conf
CONNECTIVITY_CHECK_URIS = "https://www.google.com/"
ACCEPT_FSL_EULA = "1"
BB_GENERATE_MIRROR_TARBALLS = "1"
UBOOT_SIGN_ENABLE = "${UBOOT_SIGN_ENABLE}"
# SOTA params
SOTA_PACKED_CREDENTIALS = "${SOTA_PACKED_CREDENTIALS}"
OSTREE_BRANCHNAME = "${MACHINE}-${OSTREE_BRANCHNAME}"
GARAGE_SIGN_REPO = "/tmp/garage_sign_repo"
GARAGE_TARGET_VERSION = "${H_BUILD}"
GARAGE_TARGET_URL = "https://ci.foundries.io/projects/${H_PROJECT}/builds/${H_BUILD}"
GARAGE_CUSTOMIZE_TARGET = "${HERE}/customize-target.sh ${FACTORY} ${OTA_LITE_TAG} ${GARAGE_CUSTOMIZE_TARGET_PARAMS}"
DOCKER_COMPOSE_APP = "${DOCKER_COMPOSE_APP}"
APP_IMAGES_PRELOADER = "PYTHONPATH=${HERE}/../ ${HERE}/preload-app-images"
APP_IMAGES_PRELOAD_LOG_FILE = "${APP_IMAGES_PRELOAD_LOG_FILE}"
APP_IMAGES_ROOT_DIR = "${APP_IMAGES_ROOT_DIR}"
DOCKER_COMPOSE_APP_PRELOAD = "${DOCKER_COMPOSE_APP_PRELOAD}"
DOCKER_COMPOSE_APP_SHORTLIST = "${APP_SHORTLIST}"
USE_FIOTOOLS = "${USE_FIOTOOLS}"
OSTREE_API_VERSION = "${OSTREE_API_VERSION}"
# Default SOTA client
SOTA_CLIENT = "${SOTA_CLIENT}"
# git-describe version of LMP
LMP_VERSION = "${LMP_VERSION}"
# Default AKLITE tag
AKLITE_TAG = "${AKLITE_TAG}"
# dockerd params
DOCKER_MAX_CONCURRENT_DOWNLOADS = "${DOCKER_MAX_CONCURRENT_DOWNLOADS}"
DOCKER_MAX_DOWNLOAD_ATTEMPTS = "${DOCKER_MAX_DOWNLOAD_ATTEMPTS}"
# mfgtool params
MFGTOOL_FLASH_IMAGE = "${MFGTOOL_FLASH_IMAGE}"
# Bitbake custom logconfig
BB_LOGCONFIG = "${HERE}/bb_logconfig.json"
EOFEOF
# Additional packages based on the CI job used
if [ "$CONF_VERSION" == "1" ]; then
cat << EOFEOF >> conf/local.conf
IMAGE_INSTALL_append = " ${EXTRA_IMAGE_INSTALL}"
EOFEOF
else
cat << EOFEOF >> conf/local.conf
IMAGE_INSTALL:append = " ${EXTRA_IMAGE_INSTALL}"
EOFEOF
fi
# Ptest-based builds require the same build settings and variables,
# but the final image needs to be tagged differently, such as
# <main tag>-ptest, so perform the change at the OTA_LITE_TAG variable
if [ "${ENABLE_PTEST}" = "1" ]; then
IFS=","
PTAGS=""
for tag in ${OTA_LITE_TAG}; do
lmptag=$(echo $tag | cut -d: -f1)
PTAGS="${PTAGS} ${lmptag}-ptest"
contag=$(echo $tag | cut -s -d: -f1 --complement)
if [ -n "${contag}" ]; then
PTAGS="${PTAGS}:${contag}"
fi
done
unset IFS
OTA_LITE_TAG=$(echo ${PTAGS} | sed -e "s/ /,/g")
status "PTEST enabled, OTA_LITE_TAG updated to: ${OTA_LITE_TAG}"
# Install ptest related packages via extra image features
cat << EOFEOF >> conf/local.conf
EXTRA_IMAGE_FEATURES += " ptest-pkgs"
EOFEOF
fi
cat << EOFEOF >> conf/local.conf
# Take a tag from a spec like:
# https://docs.foundries.io/latest/reference/advanced-tagging.html
# and find the first tag name to produce a senible default
LMP_DEVICE_REGISTER_TAG = "$(echo ${OTA_LITE_TAG} | cut -d: -f1 | cut -d, -f1)"
LMP_DEVICE_FACTORY = "${FACTORY}"
LMP_DEVICE_API = "${LMP_DEVICE_API}"
EOFEOF
if [ -z "$SOTA_PACKED_CREDENTIALS" ] || [ ! -f $SOTA_PACKED_CREDENTIALS ] ; then
status "SOTA_PACKED_CREDENTIALS not found, disabling OSTree publishing logic"
cat << EOFEOF >> conf/local.conf
SOTA_PACKED_CREDENTIALS = ""
EOFEOF
fi
if [ "${DISABLE_GPLV3}" = "1" ]; then
cat << EOFEOF >> conf/local.conf
INHERIT += "image-license-checker lmp-disable-gplv3"
IMAGE_LICENSE_CHECKER_ROOTFS_BLACKLIST = "GPL-3.0 LGPL-3.0 AGPL-3.0"
IMAGE_LICENSE_CHECKER_NON_ROOTFS_BLACKLIST = "GPL-3.0 LGPL-3.0 AGPL-3.0"
EOFEOF
fi
if [ -d $SSTATE_CACHE_MIRROR ]; then
cat << EOFEOF >> conf/local.conf
SSTATE_MIRRORS = "file://.* file://${SSTATE_CACHE_MIRROR}/PATH"
EOFEOF
fi
# Add build id H_BUILD to output files names
if [ "$CONF_VERSION" == "1" ]; then
cat << EOFEOF >> conf/local.conf
DISTRO_VERSION_append = "-${H_BUILD}-${LMP_VERSION}"
EOFEOF
else
cat << EOFEOF >> conf/local.conf
DISTRO_VERSION:append = "-${H_BUILD}-${LMP_VERSION}"
EOFEOF
fi
cat << EOFEOF >> conf/auto.conf
# get build stats to make sure that we use sstate properly
INHERIT += "buildstats buildstats-summary"
# archive sources for target recipes (for license compliance)
INHERIT += "archiver"
COPYLEFT_RECIPE_TYPES = "target"
ARCHIVER_MODE[src] = "original"
ARCHIVER_MODE[diff] = "1"
EOFEOF
if [ $(ls ../sstate-cache | wc -l) -ne 0 ] ; then
status "Found existing sstate cache, using local copy"
echo 'SSTATE_MIRRORS = ""' >> conf/auto.conf
fi
for x in $(ls conf/*.conf) ; do
status "$x"
cat $x | indent
done
|
<reponame>drapisarda/local-websites
// Styles
import './styles/main.scss';
// Data
import categories from './data/categories.json';
import countries from './data/countries.js';
// App
import dom from './core.js';
const app = document.querySelector('app');
dom.setElement('total-country-count', Object.keys(countries).length);
dom.setSelect('all-cities', countries, 'country_name');
// List
const elements = [document.getElementById('from'), document.getElementById('to')];
for (const i in elements) {
if (elements.hasOwnProperty(i)) {
const element = elements[i];
element.addEventListener('change', function (el) {
const from = elements[0].options[elements[0].options.selectedIndex].value;
const to = elements[1].options[elements[1].options.selectedIndex].value;
if (from !== "-1" && to !== "-1") {
document.querySelector('.is-list-open').classList.add('yes');
dom.getListOfWebsites(categories, countries[from], countries[to]);
dom.setElement('from', from);
dom.setElement('to', to);
}
})
}
}
|
<reponame>tbhuabi/stream<gh_stars>1-10
import { PartialObserver } from './observable';
import { Subject } from './subject';
import { Subscription } from './subscription'
export class BehaviorSubject<T> extends Subject<T> {
private currentValue: T;
constructor(defaultValue: T) {
super()
this.currentValue = defaultValue;
}
next(newValue: T) {
this.currentValue = newValue;
super.next(newValue);
}
subscribe(observer?: PartialObserver<T>): Subscription;
subscribe(observer?: (value: T) => void): Subscription;
subscribe(
observer: any = function () {
//
}): Subscription {
const subscriber = this.toSubscriber(observer);
const subscription = this.trySubscribe(subscriber);
subscriber.next(this.currentValue);
return subscription;
}
}
|
#pragma once
#include <iostream>
#include "planargraph.hh"
class Triangulation;
// Cubic planar graphs (polyhedral graphs)
struct CubicGraph : public PlanarGraph {
CubicGraph() {}
CubicGraph(const PlanarGraph& g) : PlanarGraph(g) {
for(node_t u=0;u<N;u++)
if(neighbours[u].size() != 3){
fprintf(stderr,"Graph not cubic: deg(%d) = %d\n",u,int(neighbours[u].size()));
abort();
}
}
CubicGraph(const Graph& g, const vector<coord2d>& layout) : PlanarGraph(g,layout) {}
CubicGraph(const int N, const vector<int>& spiral_string, const jumplist_t& jumps = jumplist_t());
CubicGraph(const spiral_nomenclature &fsn);
bool get_spiral_from_cg(const node_t f1, const node_t f2, const node_t f3, vector<int> &spiral, jumplist_t &jumps, const bool general=true) const;
bool get_spiral_from_cg(vector<int> &spiral, jumplist_t &jumps, const bool canonical=true, const bool general=true, const bool pentagon_start=true) const;
vector<node_t> vertex_numbers(const Triangulation &T, const vector<vector<node_t>> &perm, const vector<node_t>& loc) const;
};
|
use reqwest::Client;
use serde::Deserialize;
// Define the Clue struct based on the expected structure
#[derive(Debug, Deserialize)]
struct Clue {
// Define the fields of the Clue struct based on the actual structure
// For example:
// id: u64,
// text: String,
// category: String,
// ...
}
// Implement the get_random_clues function
async fn get_random_clues(client: &Client, count: u64) -> Result<Vec<Clue>, reqwest::Error> {
// Create options for the request
let mut create_options = /* Define and initialize the options for the request */;
// Make the HTTP GET request to the API endpoint /clues with the specified options
let options = f(&mut create_options);
let response = client.get(api!("/clues"))
.query(&options.0)
.send()
.await?;
// Deserialize the JSON response into a vector of Clue objects
let clues: Vec<Clue> = response.json().await?;
// Return the vector of Clue objects
Ok(clues)
}
|
import { TestBed, inject } from '@angular/core/testing';
import { XformParserService } from './xform-parser.service';
describe('XformParserService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [XformParserService]
});
});
it('should be created', inject([XformParserService], (service: XformParserService) => {
expect(service).toBeTruthy();
}));
});
|
#include <iostream>
int main()
{
double fahrenheit = 72;
double celsius = (fahrenheit - 32) * 5 / 9;
std::cout << "Celsius = " << celsius;
return 0;
}
|
#include <stdint.h>
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <openssl/rsa.h>
#include <openssl/err.h>
#include "inc.h"
#include "tcpcrypt_ctl.h"
#include "tcpcrypt.h"
#include "tcpcryptd.h"
#include "crypto.h"
#include "profile.h"
static struct crypt_pub *RSA_HKDF_new(void)
{
struct crypt_pub *cp = xmalloc(sizeof(*cp));
memset(cp, 0, sizeof(*cp));
cp->cp_hkdf = crypt_HKDF_SHA256_new();
cp->cp_pub = crypt_RSA_new();
cp->cp_n_c = 32;
cp->cp_n_s = 48;
cp->cp_k_len = 32;
cp->cp_min_key = (2048 / 8);
cp->cp_max_key = (4096 / 8);
cp->cp_cipher_len = (4096 / 8);
return cp;
}
static struct crypt_pub *ECDHE_HKDF_new(struct crypt*(*ctr)(void), int klen)
{
struct crypt_pub *cp = xmalloc(sizeof(*cp));
memset(cp, 0, sizeof(*cp));
cp->cp_hkdf = crypt_HKDF_SHA256_new();
cp->cp_pub = ctr();
cp->cp_n_c = 32;
cp->cp_n_s = 32;
cp->cp_k_len = 32;
cp->cp_max_key = (4096 / 8);
cp->cp_cipher_len = 1 + cp->cp_n_s + klen;
cp->cp_key_agreement = 1;
return cp;
}
static struct crypt_pub *ECDHE256_HKDF_new(void)
{
return ECDHE_HKDF_new(crypt_ECDHE256_new, 65);
}
static struct crypt_pub *ECDHE521_HKDF_new(void)
{
return ECDHE_HKDF_new(crypt_ECDHE521_new, 133);
}
static struct crypt_sym *AES_HMAC_new(void)
{
struct crypt_sym *cs = xmalloc(sizeof(*cs));
memset(cs, 0, sizeof(*cs));
cs->cs_cipher = crypt_AES_new();
cs->cs_mac = crypt_HMAC_SHA256_new();
cs->cs_ack_mac = crypt_AES_new();
cs->cs_mac_len = (128 / 8);
return cs;
}
static void register_pub(unsigned int id, struct crypt_pub *(*ctr)(void))
{
crypt_register(TYPE_PKEY, id, (crypt_ctr) ctr);
}
static void register_sym(unsigned int id, struct crypt_sym *(*ctr)(void))
{
crypt_register(TYPE_SYM, id, (crypt_ctr) ctr);
}
static void __register_ciphers(void) __attribute__ ((constructor));
static void __register_ciphers(void)
{
register_pub(TC_CIPHER_OAEP_RSA_3, RSA_HKDF_new);
register_pub(TC_CIPHER_ECDHE_P256, ECDHE256_HKDF_new);
register_pub(TC_CIPHER_ECDHE_P521, ECDHE521_HKDF_new);
register_sym(TC_AES128_HMAC_SHA2, AES_HMAC_new);
}
|
<filename>modules/caas/backend/src/main/java/io/cattle/platform/agent/instance/factory/impl/AgentBuilderRequest.java
package io.cattle.platform.agent.instance.factory.impl;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.util.SystemLabels;
import io.cattle.platform.object.util.DataAccessor;
import java.util.Map;
import java.util.Set;
public class AgentBuilderRequest {
Long resourceAccountId;
String uri;
Long clusterId;
Set<String> requestedRoles;
public AgentBuilderRequest(Instance instance, Set<String> roles) {
String uriPrefix = "event";
Map<String, Object> labels = DataAccessor.fieldMap(instance, InstanceConstants.FIELD_LABELS);
Object prefix = labels.get(SystemLabels.LABEL_AGENT_URI_PREFIX);
if (prefix != null) {
uriPrefix = prefix.toString();
}
this.uri = uriPrefix + ":///instanceId=" + instance.getId();
this.resourceAccountId = instance.getAccountId();
this.requestedRoles = roles;
this.clusterId = instance.getClusterId();
}
public Long getClusterId() {
return clusterId;
}
public String getUri() {
return uri;
}
public Set<String> getRequestedRoles() {
return requestedRoles;
}
public Long getResourceAccountId() {
return resourceAccountId;
}
}
|
/**
* Copyright 2018-2020 Dynatrace LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dynatrace.openkit.util.json.objects;
import org.junit.Test;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
public class JSONObjectValueTest {
private static final Map<String, JSONValue> EMPTY_MAP = Collections.emptyMap();
@Test
public void isNullReturnsFalse() {
// then
assertThat(JSONObjectValue.fromMap(EMPTY_MAP).isNull(), is(false));
}
@Test
public void isBooleanReturnsFalse() {
// then
assertThat(JSONObjectValue.fromMap(EMPTY_MAP).isBoolean(), is(false));
}
@Test
public void isNumberReturnsFalse() {
// then
assertThat(JSONObjectValue.fromMap(EMPTY_MAP).isNumber(), is(false));
}
@Test
public void isStringReturnsFalse() {
// then
assertThat(JSONObjectValue.fromMap(EMPTY_MAP).isString(), is(false));
}
@Test
public void isArrayReturnsFalse() {
// then
assertThat(JSONObjectValue.fromMap(EMPTY_MAP).isArray(), is(false));
}
@Test
public void isObjectReturnsTrue() {
// then
assertThat(JSONObjectValue.fromMap(EMPTY_MAP).isObject(), is(true));
}
@Test
public void fromMapGivesNullIfArgumentIsNull() {
// when constructed with null, then
assertThat(JSONObjectValue.fromMap(null), is(nullValue()));
}
@Test
public void keySetDelegatesTheCallToTheUnderlyingMap() {
// given
@SuppressWarnings("unchecked") Map<String, JSONValue> jsonObjectMap = mock(Map.class);
when(jsonObjectMap.keySet()).thenReturn(Collections.singleton("foobar"));
JSONObjectValue target = JSONObjectValue.fromMap(jsonObjectMap);
// when
Set<String> obtained = target.keySet();
// then
assertThat(obtained, is(notNullValue()));
verify(jsonObjectMap, times(1)).keySet();
verifyNoMoreInteractions(jsonObjectMap);
}
@Test
public void sizeDelegatesTheCallToTheUnderlyingMap() {
// given
@SuppressWarnings("unchecked") Map<String, JSONValue> jsonObjectMap = mock(Map.class);
when(jsonObjectMap.size()).thenReturn(42);
JSONObjectValue target = JSONObjectValue.fromMap(jsonObjectMap);
// when
int obtained = target.size();
// then
assertThat(obtained, is(42));
verify(jsonObjectMap, times(1)).size();
verifyNoMoreInteractions(jsonObjectMap);
}
@Test
public void containsKeyDelegatesTheCallToTheUnderlyingMap() {
// given
@SuppressWarnings("unchecked") Map<String, JSONValue> jsonObjectMap = mock(Map.class);
when(jsonObjectMap.containsKey(anyString())).thenReturn(true);
JSONObjectValue target = JSONObjectValue.fromMap(jsonObjectMap);
// when
boolean obtained = target.containsKey("foo");
// then
assertThat(obtained, is(true));
verify(jsonObjectMap, times(1)).containsKey("foo");
verifyNoMoreInteractions(jsonObjectMap);
// and when
obtained = target.containsKey("bar");
// then
assertThat(obtained, is(true));
verify(jsonObjectMap, times(1)).containsKey("bar");
verifyNoMoreInteractions(jsonObjectMap);
}
@Test
public void getDelegatesTheCallToTheUnderlyingMap() {
// given
@SuppressWarnings("unchecked") Map<String, JSONValue> jsonObjectMap = mock(Map.class);
when(jsonObjectMap.get(anyString())).thenReturn(mock(JSONValue.class));
JSONObjectValue target = JSONObjectValue.fromMap(jsonObjectMap);
// when
JSONValue obtained = target.get("foo");
// then
assertThat(obtained, is(notNullValue()));
verify(jsonObjectMap, times(1)).get("foo");
verifyNoMoreInteractions(jsonObjectMap);
// and when
obtained = target.get("bar");
// then
assertThat(obtained, is(notNullValue()));
verify(jsonObjectMap, times(1)).get("bar");
verifyNoMoreInteractions(jsonObjectMap);
}
}
|
python3 -W ignore main_chexpert.py --wandb_project_name debug \
--log_dir /data/selfsupervision/log/chexpert \
--num_epoch 3 \
--model_name densenet121 \
--optimizer adam \
--lr 0.001 \
--batch_size 16 \
--num_workers 8 \
--iters_per_eval 100 \
--gpu_ids 3 \
--resize_shape 320 \
--crop_shape 320 \
--rotation_range 20 \
--img_type Frontal \
--lr_decay 0.1 \
--weight_decay 0.0 \
--momentum 0.9 \
--sgd_dampening 0.0 \
--uncertain ignore \
--threshold 0.5 \
--gaussian_noise_mean 0.0 \
--gaussian_noise_std 0.05 \
--gaussian_blur 1.0
|
# Map network drive
mkdir /home/pi/WD2TB
sudo mount -t cifs -o username=<>,password=<> //SERVER.local/WD2TB /home/pi/WD2TB
|
<reponame>normancarcamo/boilerplate-react-universal
function MyPlugin(options) {
this.nombre = options;
}
MyPlugin.prototype.apply = function(compiler) {
// ...
compiler.plugin('compilation', function(compilation) {
compilation.plugin('html-webpack-plugin-before-html-processing', function(data, callback) {
// console.log('************************************');
// console.log('1.', data.assets.js);
// console.log('2.', data.assets.css);
// console.log('************************************');
callback(null, data)
})
})
};
module.exports = MyPlugin;
|
package io.yggdrash.node;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import io.yggdrash.TestUtils;
import io.yggdrash.contract.ContractQry;
import io.yggdrash.core.Address;
import io.yggdrash.core.BranchId;
import io.yggdrash.core.TransactionHusk;
import io.yggdrash.core.Wallet;
import io.yggdrash.node.api.JsonRpcConfig;
import io.yggdrash.node.controller.TransactionDto;
import org.apache.commons.codec.binary.Hex;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Scanner;
import static io.yggdrash.contract.ContractTx.createBranch;
import static io.yggdrash.contract.ContractTx.createStemTxByBranch;
import static io.yggdrash.contract.ContractTx.createYeedTx;
public class NodeContractDemoClient {
private static final JsonRpcConfig rpc = new JsonRpcConfig();
private static final Wallet wallet = TestUtils.wallet();
private static final Scanner scan = new Scanner(System.in);
private static final String SERVER_PROD = "10.10.10.100";
private static final String SERVER_STG = "10.10.20.100";
private static final String TRANSFER_TO = "1a0cdead3d1d1dbeef848fef9053b4f0ae06db9e";
private static final int TRANSFER_AMOUNT = 1;
public static void main(String[] args) throws Exception {
while (true) {
run();
}
}
private static void run() throws Exception {
System.out.print("===============\n");
System.out.print("[1] 트랜잭션 전송\n[2] 트랜잭션 조회\n[3] 브랜치 수정\n[4] 브랜치 조회\n[5] 발란스 조회\n[6] 종료\n>");
String num = scan.nextLine();
switch (num) {
case "2" :
txReceipt();
break;
case "3" :
update();
break;
case "4" :
view();
break;
case "5" :
balance();
break;
case "6" :
System.exit(0);
break;
default :
sendTx();
break;
}
}
private static void sendTx() throws Exception {
System.out.print("[1] STEM [2] YEED\n> ");
if (scan.nextLine().equals("2")) {
sendYeedTx();
} else {
sendStemTx();
}
}
private static void sendStemTx() throws Exception {
System.out.print("사용할 .json 파일명을 입력하세요 (기본값: yeed.seed.json)\n> ");
String json = scan.nextLine();
if ("".equals(json)) {
json = "yeed.seed.json";
}
JsonObject branch;
if (!json.contains("seed")) {
branch = getBranchFile(json);
} else {
JsonObject seed = getSeedFile(json);
branch = createBranch(seed, wallet.getHexAddress());
}
sendStemTx(branch, "create");
}
private static void sendStemTx(JsonObject branch, String method) {
int times = getSendTimes();
String serverAddress = getServerAddress();
for (int i = 0; i < times; i++) {
TransactionHusk tx = createStemTxByBranch(wallet, branch, method);
rpc.transactionApi(serverAddress).sendTransaction(TransactionDto.createBy(tx));
}
}
private static void sendYeedTx() throws Exception {
System.out.println("전송할 주소를 입력해주세요 (기본값 : " + TRANSFER_TO + ")");
System.out.println(">");
String addressHex = scan.nextLine();
addressHex = addressHex.length() > 0 ? addressHex : TRANSFER_TO;
Address address = new Address(Hex.decodeHex(addressHex));
sendYeedTx(address, TRANSFER_AMOUNT);
}
private static void sendYeedTx(Address address, int amount) {
int times = getSendTimes();
String serverAddress = getServerAddress();
for (int i = 0; i < times; i++) {
TransactionHusk tx = createYeedTx(wallet, address, amount);
rpc.transactionApi(serverAddress).sendTransaction(TransactionDto.createBy(tx));
}
}
private static void view() throws Exception {
String branchId = getBranchId();
JsonArray params = ContractQry.createParams("branchId", branchId);
JsonObject qry = ContractQry.createQuery(BranchId.STEM, "view", params);
String serverAddress = getServerAddress();
rpc.contractApi(serverAddress).query(qry.toString());
}
private static void update() throws Exception {
System.out.println("수정할 .json 파일명을 입력하세요 (기본값: yeed.json)\n>");
String json = scan.nextLine();
if ("".equals(json)) {
json = "yeed.json";
}
JsonObject branch = getBranchFile(json);
System.out.println("수정할 description 의 내용을 적어주세요\n>");
branch.addProperty("description", scan.nextLine());
sendStemTx(branch, "update");
}
private static void txReceipt() {
String branchId = getBranchId();
System.out.println("조회할 트랜잭션 해시를 적어주세요\n>");
String txHash = scan.nextLine();
String serverAddress = getServerAddress();
rpc.transactionApi(serverAddress).getTransactionReceipt(branchId, txHash);
}
private static void balance() {
System.out.println("조회할 주소를 적어주세요\n>");
JsonObject qry = ContractQry.createQuery(BranchId.YEED,
"balanceOf",
ContractQry.createParams("address", scan.nextLine()));
String serverAddress = getServerAddress();
rpc.accountApi(serverAddress).balanceOf(qry.toString());
}
private static String getServerAddress() {
System.out.println(String.format("전송할 노드 : [1] 로컬 [2] 스테이지(%s) [3] 운영(%s) \n>",
SERVER_STG, SERVER_PROD));
String num = scan.nextLine();
switch (num) {
case "2":
return SERVER_STG;
case "3":
return SERVER_PROD;
default:
return "localhost";
}
}
private static String getBranchId() {
System.out.println("조회할 트랜잭션의 브랜치 아이디 : [1] STEM [2] YEED [3] etc\n>");
String branchId = scan.nextLine();
switch (branchId) {
case "1":
return BranchId.stem().toString();
case "2":
return BranchId.yeed().toString();
default:
return branchId;
}
}
private static int getSendTimes() {
System.out.print("전송할 횟수를 입력하세요 기본값(1)\n> ");
String times = scan.nextLine();
if ("".equals(times)) {
return 1;
} else {
return Integer.valueOf(times);
}
}
private static JsonObject getSeedFile(String seed) throws Exception {
String seedPath = String.format("classpath:/seed/%s", seed);
ResourceLoader resourceLoader = new DefaultResourceLoader();
Resource resource = resourceLoader.getResource(seedPath);
JsonParser jsonParser = new JsonParser();
return (JsonObject) jsonParser.parse(
new InputStreamReader(resource.getInputStream(), StandardCharsets.UTF_8));
}
private static JsonObject getBranchFile(String fileName) throws FileNotFoundException {
String userDir = System.getProperty("user.dir");
userDir += "/yggdrash-node/src/test/resources/branch/%s";
JsonParser jsonParser = new JsonParser();
return (JsonObject) jsonParser.parse(
new FileReader(String.format(userDir, fileName)));
}
}
|
module BrNfe
module Service
module Thema
module V1
class ConsultaNfse < BrNfe::Service::Thema::V1::Base
include BrNfe::Service::Concerns::Rules::ConsultaNfse
def wsdl
get_wsdl_by_city[:consult]
end
def method_wsdl
:consultar_nfse
end
def xml_builder
render_xml 'servico_consultar_nfse_envio'
end
# Tag root da requisição
#
def soap_body_root_tag
'consultarNfse'
end
private
def set_response
@response = BrNfe::Service::Response::Build::ConsultaNfse.new(
savon_response: @original_response, # Rsposta da requisição SOAP
keys_root_path: [], # Caminho inicial da resposta / Chave pai principal
body_xml_path: [:consultar_nfse_response, :return],
xml_encode: response_encoding, # Codificação do xml de resposta
#//Envelope/Body/ConsultarLoteRpsEnvioResponse/ConsultarLoteRpsResposta
nfe_xml_path: '//*',
invoices_path: [:consultar_nfse_resposta, :lista_nfse, :comp_nfse],
message_errors_path: [:consultar_nfse_resposta, :lista_mensagem_retorno, :mensagem_retorno]
).response
end
def response_class
BrNfe::Service::Response::ConsultaNfse
end
end
end
end
end
end
|
<reponame>rockenbf/ze_oss<filename>ze_imu/include/ze/imu/imu_rig.hpp
// Copyright (c) 2015-2016, ETH Zurich, <NAME>, Zurich Eye
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the ETH Zurich, Wyss Zurich, Zurich Eye nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL ETH Zurich, Wyss Zurich, Zurich Eye BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include <string>
#include <vector>
#include <ze/imu/imu_model.hpp>
#include <ze/common/macros.hpp>
#include <ze/common/types.hpp>
#include <ze/common/transformation.hpp>
namespace ze {
using ImuVector = std::vector<ImuModel::Ptr>;
class ImuRig
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
ZE_POINTER_TYPEDEFS(ImuRig);
ImuRig() = delete;
ImuRig(
const TransformationVector& T_B_S,
const ImuVector& imus,
const std::string& label);
//! Load a imu rig form a yaml file. Returns a nullptr if the loading fails.
static ImuRig::Ptr loadFromYaml(const std::string& yaml_file);
//! @name Imu poses with respect to body frame.
//! @{
inline const Transformation& T_S_B(size_t imu_index) const
{
DEBUG_CHECK_LT(imu_index, T_S_B_.size());
return T_S_B_[imu_index];
}
inline const TransformationVector& T_S_B_vec() const
{
return T_S_B_;
}
inline const Transformation& T_B_S(size_t imu_index) const
{
DEBUG_CHECK_LT(imu_index, T_B_S_.size());
return T_B_S_[imu_index];
}
inline const TransformationVector& T_B_S_vec() const
{
return T_B_S_;
}
//! @}
//! @name Imu accessors.
//! @{
inline const ImuModel& at(size_t imu_index) const
{
return *imus_.at(imu_index);
}
inline ImuModel::Ptr atShared(size_t imu_index)
{
return imus_.at(imu_index);
}
inline std::shared_ptr<const ImuModel> atShared(size_t imu_index) const
{
return imus_.at(imu_index);
}
inline const ImuVector& imus() const { return imus_; }
//! @}
inline size_t size() const { return imus_.size(); }
inline const std::string& label() const { return label_; }
//! @name Imu iteration.
//! @{
typedef ImuVector::value_type value_type;
typedef ImuVector::iterator iterator;
typedef ImuVector::const_iterator const_iterator;
ImuVector::iterator begin() { return imus_.begin(); }
ImuVector::iterator end() { return imus_.end(); }
ImuVector::const_iterator begin() const { return imus_.begin(); }
ImuVector::const_iterator end() const { return imus_.end(); }
ImuVector::const_iterator cbegin() const { return imus_.cbegin(); }
ImuVector::const_iterator cend() const { return imus_.cend(); }
//! @}
private:
//! The mounting transformations.
TransformationVector T_S_B_;
TransformationVector T_B_S_;
//! The imu models.
ImuVector imus_;
//! The rig label
std::string label_;
};
} // namespace ze
|
'use strict';
var rule = require('../../../lib/rules/terminating-properties');
var RuleTester = require('eslint').RuleTester;
var ruleTester = new RuleTester();
ruleTester.run('terminating-properties', rule, {
valid: [{
code: [
'it("works as expected", function() {',
' expect(true).to.be.ok;',
'});'
].join('\n')
}, {
code: [
'ok();'
].join('\n')
}, {
code: [
'somethingElse.ok();'
].join('\n')
}, {
code: 'expect(something).to.equal(somethingElse);'
}],
invalid: [{
code: [
'it("fails as expected", function() {',
' expect(true).to.be.ok();',
'});'
].join('\n'),
errors: [{
message: '"to.be.ok" used as function'
}]
}, {
code: [
'it("fails as expected", function() {',
' expect(true).to.be.false();',
'});'
].join('\n'),
errors: [{
message: '"to.be.false" used as function'
}]
}, {
code: [
'it("fails as expected", function() {',
' expect(true).to.exist();',
'});'
].join('\n'),
errors: [{
message: '"to.exist" used as function'
}]
}, {
options: [{properties:['something', 'somethingElse']}],
code: [
'it("fails as expected", function() {',
' expect(result).to.be.something();',
' expect(result).to.be.somethingElse();',
'});'
].join('\n'),
errors: [{
message: '"to.be.something" used as function'
}, {
message: '"to.be.somethingElse" used as function'
}]
}]
});
|
<filename>src/main/java/com/gastos/deputado/dto/Deputado.java
package com.gastos.deputado.dto;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Getter;
import lombok.Setter;
@JsonIgnoreProperties(ignoreUnknown = true)
@Getter
@Setter
@Entity
public class Deputado{
@Id
@GeneratedValue
private int id;
private String nome;
private String partido;
private String uf;
private String cotaParlamentar;
private String auxilioMoradia;
private String verbaGabinete;
private boolean renunciouAuxilioMoradia;
public void setNome(String nome) {
this.nome = nome.split("\\(")[0].trim();
partido = nome.split("\\(")[1].split("-")[0];
uf = nome.split("\\(")[1].split("-")[1].replace(")", "");
}
}
|
set cmd:fail-exit yes
set cmd:default-protocol ftps
set ftp:ssl-auth TLS
set ftp:ssl-force yes
set ftp:ssl-protect-list yes
set ftp:ssl-protect-data yes
set ftps:initial-prot ""
|
<gh_stars>1-10
package au.org.noojee.irrigation.servlets;
import java.util.List;
import javax.persistence.EntityManagerFactory;
import javax.servlet.ServletContextEvent;
import javax.servlet.annotation.WebListener;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.pi4j.io.gpio.GpioController;
import com.pi4j.io.gpio.GpioFactory;
import com.pi4j.io.gpio.GpioPinDigitalOutput;
import com.pi4j.io.gpio.Pin;
import com.pi4j.io.gpio.PinState;
import com.pi4j.io.gpio.RaspiPin;
import au.org.noojee.irrigation.controllers.GardenBedController;
import au.org.noojee.irrigation.dao.EndPointDao;
import au.org.noojee.irrigation.dao.EntityManagerRunnable;
import au.org.noojee.irrigation.dao.MyEntityManagerUtil;
import au.org.noojee.irrigation.entities.EndPoint;
/**
* @author <NAME>
*/
// Wire in this listener
@WebListener
public class PgContextListener extends VUEntityManagerContextListener
{
public static Logger logger;
private boolean databaseInitialised;
public PgContextListener()
{
}
@Override
public void contextInitialized(ServletContextEvent sce)
{
System.out.println("Pi-gation is starting.");
System.out.println("Pi-gation: starting logger");
logger = LogManager.getLogger();
// in development mode you need to simulate the raspberry pi.
// set the following environment variables to:
// PI4J_PLATFORM="Simulated"
// SimulatedPlatform="RaspberryPi GPIO Provider"
//
// In production neither of these variables are used.
logger.info("PI PLATFORM: " + System.getenv("PI4J_PLATFORM"));
logger.info("Simulated PLATFORM - simulated: " + System.getenv("SimulatedPlatform"));
MyEntityManagerUtil.init();
// Now our db is up we can let the EntityManagerProvider be initialised via base class.
super.contextInitialized(sce);
databaseInitialised = true;
// As we are not in a servlet request we don't have an EM injected
// so we need to inject our own.
new EntityManagerRunnable(() ->
{
provisionPins();
GardenBedController.init();
}).run();
}
public static void provisionPins()
{
// create gpio controller
final GpioController gpio = GpioFactory.getInstance();
EndPointDao daoEndPoint = new EndPointDao();
List<au.org.noojee.irrigation.entities.EndPoint> pins = daoEndPoint.getAll();
// Set default states for pins.
for (Pin pin : RaspiPin.allPins())
{
PinState offState = PinState.HIGH;
au.org.noojee.irrigation.entities.EndPoint configuredPin = getConfiguredPin(pin, pins);
if (configuredPin != null)
offState = configuredPin.getPinActiviationType().getOffState();
GpioPinDigitalOutput digitPin = gpio.provisionDigitalOutputPin(pin, offState);
digitPin.setShutdownOptions(true, PinState.LOW);
}
}
static private EndPoint getConfiguredPin(Pin pin,
List<au.org.noojee.irrigation.entities.EndPoint> pins)
{
au.org.noojee.irrigation.entities.EndPoint configuredPin = null;
for (au.org.noojee.irrigation.entities.EndPoint checkPin : pins)
{
if (checkPin.getPinNo() == pin.getAddress())
{
configuredPin = checkPin;
break;
}
}
return configuredPin;
}
@Override
public void contextDestroyed(ServletContextEvent sce)
{
logger.info("Irrigation Manager is shutting down.");
// stop all GPIO activity/threads by shutting down the GPIO controller
// (this method will forcefully shutdown all GPIO monitoring threads and
// scheduled tasks)
final GpioController gpio = GpioFactory.getInstance();
gpio.shutdown();
// Close down the entity manager.
super.contextDestroyed(sce);
// Only shutdown the db if we actually got to the point of initialising it.
if (databaseInitialised)
MyEntityManagerUtil.databaseShutdown();
LogManager.shutdown();
}
@Override
protected EntityManagerFactory getEntityManagerFactory()
{
return MyEntityManagerUtil.getEntityManagerFactory();
}
}
|
//Implements a function using JavaScript that takes two parameters
//and checks if the first parameter is greater than the second
function greaterThan(x, y) {
if (x > y) {
return true;
} else {
return false;
}
}
|
<gh_stars>1-10
module.exports = {
RUNNING: "RUNNING",
DELETED: "DELETED",
COMPLETED: "COMPLETED",
NONE: "NONE"
};
|
#include <Python.h>
/* a preexisting C-level function you want to expose, e.g: */
static double total(double* data, int len)
{
double total = 0.0;
int i;
for(i=0; i<len; ++i)
total += data[i];
return total;
}
/* here is how you expose it to Python code: */
static PyObject *totalDoubles(PyObject *self, PyObject *args)
{
PyObject* seq;
double *dbar;
double result;
int seqlen;
int i;
/* get one argument as a sequence */
if(!PyArg_ParseTuple(args, "O", &seq))
return 0;
seq = PySequence_Fast(seq, "argument must be iterable");
if(!seq)
return 0;
/* prepare data as an array of doubles */
seqlen = PySequence_Fast_GET_SIZE(seq);
dbar = malloc(seqlen*sizeof(double));
if(!dbar) {
Py_DECREF(seq);
return PyErr_NoMemory();
}
for(i=0; i < seqlen; i++) {
PyObject *fitem;
PyObject *item = PySequence_Fast_GET_ITEM(seq, i);
if(!item) {
Py_DECREF(seq);
free(dbar);
return 0;
}
fitem = PyNumber_Float(item);
if(!fitem) {
Py_DECREF(seq);
free(dbar);
PyErr_SetString(PyExc_TypeError, "all items must be numbers");
return 0;
}
dbar[i] = PyFloat_AS_DOUBLE(fitem);
Py_DECREF(fitem);
}
/* clean up, compute, and return result */
Py_DECREF(seq);
result = total(dbar, seqlen);
free(dbar);
return Py_BuildValue("d", result);
}
static PyMethodDef totalMethods[] = {
{"total", totalDoubles, METH_VARARGS, "Sum a sequence of numbers."},
{0} /* sentinel */
};
void
inittotal(void)
{
(void) Py_InitModule("total", totalMethods);
}
|
#!/bin/bash
#
# Copyright IBM Corp All Rights Reserved
#
# SPDX-License-Identifier: Apache-2.0
#
# This script brings up a Hyperledger Fabric network for testing smart contracts
# and applications. The test network consists of two organizations with one
# peer each, and a single node Raft ordering service. Users can also use this
# script to create a channel deploy a chaincode on the channel
#
# prepending $PWD/../bin to PATH to ensure we are picking up the correct binaries
# this may be commented out to resolve installed version of tools if desired
export PATH=${PWD}/../bin:$PATH
export FABRIC_CFG_PATH=${PWD}/configtx
export VERBOSE=false
source scriptUtils.sh
# Obtain CONTAINER_IDS and remove them
# TODO Might want to make this optional - could clear other containers
# This function is called when you bring a network down
function clearContainers() {
CONTAINER_IDS=$(docker ps -a | awk '($2 ~ /dev-peer.*/) {print $1}')
if [ -z "$CONTAINER_IDS" -o "$CONTAINER_IDS" == " " ]; then
infoln "No containers available for deletion"
else
docker rm -f $CONTAINER_IDS
fi
}
# Delete any images that were generated as a part of this setup
# specifically the following images are often left behind:
# This function is called when you bring the network down
function removeUnwantedImages() {
DOCKER_IMAGE_IDS=$(docker images | awk '($1 ~ /dev-peer.*/) {print $3}')
if [ -z "$DOCKER_IMAGE_IDS" -o "$DOCKER_IMAGE_IDS" == " " ]; then
infoln "No images available for deletion"
else
docker rmi -f $DOCKER_IMAGE_IDS
fi
}
# Versions of fabric known not to work with the test network
NONWORKING_VERSIONS="^1\.0\. ^1\.1\. ^1\.2\. ^1\.3\. ^1\.4\."
# Do some basic sanity checking to make sure that the appropriate versions of fabric
# binaries/images are available. In the future, additional checking for the presence
# of go or other items could be added.
function checkPrereqs() {
## Check if your have cloned the peer binaries and configuration files.
peer version > /dev/null 2>&1
if [[ $? -ne 0 || ! -d "../config" ]]; then
errorln "Peer binary and configuration files not found.."
errorln
errorln "Follow the instructions in the Fabric docs to install the Fabric Binaries:"
errorln "https://hyperledger-fabric.readthedocs.io/en/latest/install.html"
exit 1
fi
# use the fabric tools container to see if the samples and binaries match your
# docker images
LOCAL_VERSION=$(peer version | sed -ne 's/ Version: //p')
DOCKER_IMAGE_VERSION=$(docker run --rm hyperledger/fabric-tools:$IMAGETAG peer version | sed -ne 's/ Version: //p' | head -1)
infoln "LOCAL_VERSION=$LOCAL_VERSION"
infoln "DOCKER_IMAGE_VERSION=$DOCKER_IMAGE_VERSION"
if [ "$LOCAL_VERSION" != "$DOCKER_IMAGE_VERSION" ]; then
warnln "Local fabric binaries and docker images are out of sync. This may cause problems."
fi
for UNSUPPORTED_VERSION in $NONWORKING_VERSIONS; do
infoln "$LOCAL_VERSION" | grep -q $UNSUPPORTED_VERSION
if [ $? -eq 0 ]; then
fatalln "Local Fabric binary version of $LOCAL_VERSION does not match the versions supported by the test network."
fi
infoln "$DOCKER_IMAGE_VERSION" | grep -q $UNSUPPORTED_VERSION
if [ $? -eq 0 ]; then
fatalln "Fabric Docker image version of $DOCKER_IMAGE_VERSION does not match the versions supported by the test network."
fi
done
## Check for fabric-ca
if [ "$CRYPTO" == "Certificate Authorities" ]; then
fabric-ca-client version > /dev/null 2>&1
if [[ $? -ne 0 ]]; then
errorln "fabric-ca-client binary not found.."
errorln
errorln "Follow the instructions in the Fabric docs to install the Fabric Binaries:"
errorln "https://hyperledger-fabric.readthedocs.io/en/latest/install.html"
exit 1
fi
CA_LOCAL_VERSION=$(fabric-ca-client version | sed -ne 's/ Version: //p')
CA_DOCKER_IMAGE_VERSION=$(docker run --rm hyperledger/fabric-ca:$CA_IMAGETAG fabric-ca-client version | sed -ne 's/ Version: //p' | head -1)
infoln "CA_LOCAL_VERSION=$CA_LOCAL_VERSION"
infoln "CA_DOCKER_IMAGE_VERSION=$CA_DOCKER_IMAGE_VERSION"
if [ "$CA_LOCAL_VERSION" != "$CA_DOCKER_IMAGE_VERSION" ]; then
warnln "Local fabric-ca binaries and docker images are out of sync. This may cause problems."
fi
fi
}
# Before you can bring up a network, each organization needs to generate the crypto
# material that will define that organization on the network. Because Hyperledger
# Fabric is a permissioned blockchain, each node and user on the network needs to
# use certificates and keys to sign and verify its actions. In addition, each user
# needs to belong to an organization that is recognized as a member of the network.
# You can use the Cryptogen tool or Fabric CAs to generate the organization crypto
# material.
# By default, the sample network uses cryptogen. Cryptogen is a tool that is
# meant for development and testing that can quickly create the certificates and keys
# that can be consumed by a Fabric network. The cryptogen tool consumes a series
# of configuration files for each organization in the "organizations/cryptogen"
# directory. Cryptogen uses the files to generate the crypto material for each
# org in the "organizations" directory.
# You can also Fabric CAs to generate the crypto material. CAs sign the certificates
# and keys that they generate to create a valid root of trust for each organization.
# The script uses Docker Compose to bring up three CAs, one for each peer organization
# and the ordering organization. The configuration file for creating the Fabric CA
# servers are in the "organizations/fabric-ca" directory. Within the same directory,
# the "registerEnroll.sh" script uses the Fabric CA client to create the identities,
# certificates, and MSP folders that are needed to create the test network in the
# "organizations/ordererOrganizations" directory.
# Create Organization crypto material using cryptogen or CAs
function createOrgs() {
if [ -d "organizations/peerOrganizations" ]; then
rm -Rf organizations/peerOrganizations && rm -Rf organizations/ordererOrganizations
fi
# Create crypto material using cryptogen
if [ "$CRYPTO" == "cryptogen" ]; then
which cryptogen
if [ "$?" -ne 0 ]; then
fatalln "cryptogen tool not found. exiting"
fi
infoln "Generate certificates using cryptogen tool"
infoln "Create Org1 Identities"
set -x
cryptogen generate --config=./organizations/cryptogen/crypto-config-org1.yaml --output="organizations"
res=$?
{ set +x; } 2>/dev/null
if [ $res -ne 0 ]; then
fatalln "Failed to generate certificates..."
fi
infoln "Create Org2 Identities"
set -x
cryptogen generate --config=./organizations/cryptogen/crypto-config-org2.yaml --output="organizations"
res=$?
{ set +x; } 2>/dev/null
if [ $res -ne 0 ]; then
fatalln "Failed to generate certificates..."
fi
infoln "Create Orderer Org Identities"
set -x
cryptogen generate --config=./organizations/cryptogen/crypto-config-orderer.yaml --output="organizations"
res=$?
{ set +x; } 2>/dev/null
if [ $res -ne 0 ]; then
fatalln "Failed to generate certificates..."
fi
fi
# Create crypto material using Fabric CAs
if [ "$CRYPTO" == "Certificate Authorities" ]; then
infoln "Generate certificates using Fabric CA's"
IMAGE_TAG=${CA_IMAGETAG} docker-compose -f $COMPOSE_FILE_CA up -d 2>&1
. organizations/fabric-ca/registerEnroll.sh
while :
do
if [ ! -f "organizations/fabric-ca/org1/tls-cert.pem" ]; then
sleep 1
else
break
fi
done
infoln "Create Org1 Identities"
createOrg1
infoln "Create Org2 Identities"
createOrg2
infoln "Create Orderer Org Identities"
createOrderer
fi
infoln "Generate CCP files for Org1 and Org2"
./organizations/ccp-generate.sh
}
# Once you create the organization crypto material, you need to create the
# genesis block of the orderer system channel. This block is required to bring
# up any orderer nodes and create any application channels.
# The configtxgen tool is used to create the genesis block. Configtxgen consumes a
# "configtx.yaml" file that contains the definitions for the sample network. The
# genesis block is defined using the "TwoOrgsOrdererGenesis" profile at the bottom
# of the file. This profile defines a sample consortium, "SampleConsortium",
# consisting of our two Peer Orgs. This consortium defines which organizations are
# recognized as members of the network. The peer and ordering organizations are defined
# in the "Profiles" section at the top of the file. As part of each organization
# profile, the file points to a the location of the MSP directory for each member.
# This MSP is used to create the channel MSP that defines the root of trust for
# each organization. In essence, the channel MSP allows the nodes and users to be
# recognized as network members. The file also specifies the anchor peers for each
# peer org. In future steps, this same file is used to create the channel creation
# transaction and the anchor peer updates.
#
#
# If you receive the following warning, it can be safely ignored:
#
# [bccsp] GetDefault -> WARN 001 Before using BCCSP, please call InitFactories(). Falling back to bootBCCSP.
#
# You can ignore the logs regarding intermediate certs, we are not using them in
# this crypto implementation.
# Generate orderer system channel genesis block.
function createConsortium() {
which configtxgen
if [ "$?" -ne 0 ]; then
fatalln "configtxgen tool not found."
fi
infoln "Generating Orderer Genesis block"
# Note: For some unknown reason (at least for now) the block file can't be
# named orderer.genesis.block or the orderer will fail to launch!
set -x
configtxgen -profile TwoOrgsOrdererGenesis -channelID system-channel -outputBlock ./system-genesis-block/genesis.block
res=$?
{ set +x; } 2>/dev/null
if [ $res -ne 0 ]; then
fatalln "Failed to generate orderer genesis block..."
fi
}
# After we create the org crypto material and the system channel genesis block,
# we can now bring up the peers and ordering service. By default, the base
# file for creating the network is "docker-compose-test-net.yaml" in the ``docker``
# folder. This file defines the environment variables and file mounts that
# point the crypto material and genesis block that were created in earlier.
# Bring up the peer and orderer nodes using docker compose.
function networkUp() {
checkPrereqs
# generate artifacts if they don't exist
if [ ! -d "organizations/peerOrganizations" ]; then
createOrgs
createConsortium
fi
COMPOSE_FILES="-f ${COMPOSE_FILE_BASE}"
if [ "${DATABASE}" == "couchdb" ]; then
COMPOSE_FILES="${COMPOSE_FILES} -f ${COMPOSE_FILE_COUCH}"
fi
IMAGE_TAG=$IMAGETAG docker-compose ${COMPOSE_FILES} up -d 2>&1
docker ps -a
if [ $? -ne 0 ]; then
fatalln "Unable to start network"
fi
}
## call the script to join create the channel and join the peers of org1 and org2
function createChannel() {
## Bring up the network if it is not already up.
if [ ! -d "organizations/peerOrganizations" ]; then
infoln "Bringing up network"
networkUp
fi
# now run the script that creates a channel. This script uses configtxgen once
# more to create the channel creation transaction and the anchor peer updates.
# configtx.yaml is mounted in the cli container, which allows us to use it to
# create the channel artifacts
scripts/createChannel.sh $CHANNEL_NAME $CLI_DELAY $MAX_RETRY $VERBOSE
if [ $? -ne 0 ]; then
fatalln "Create channel failed"
fi
}
## Call the script to deploy a chaincode to the channel
function deployCC() {
scripts/deployCC.sh $CHANNEL_NAME $CC_NAME $CC_SRC_PATH $CC_SRC_LANGUAGE $CC_VERSION $CC_SEQUENCE $CC_INIT_FCN $CC_END_POLICY $CC_COLL_CONFIG $CLI_DELAY $MAX_RETRY $VERBOSE
if [ $? -ne 0 ]; then
fatalln "Deploying chaincode failed"
fi
exit 0
}
# Tear down running network
function networkDown() {
# stop org3 containers also in addition to org1 and org2, in case we were running sample to add org3
docker-compose -f $COMPOSE_FILE_BASE -f $COMPOSE_FILE_COUCH -f $COMPOSE_FILE_CA docker
docker-compose -f $COMPOSE_FILE_COUCH_ORG3 -f $COMPOSE_FILE_ORG3 down --volumes --remove-orphans
# Don't remove the generated artifacts -- note, the ledgers are always removed
if [ "$MODE" != "restart" ]; then
# Bring down the network, deleting the volumes
#Cleanup the chaincode containers
clearContainers
#Cleanup images
removeUnwantedImages
# remove orderer block and other channel configuration transactions and certs
docker run --rm -v $(pwd):/data busybox sh -c 'cd /data && rm -rf system-genesis-block/*.block organizations/peerOrganizations organizations/ordererOrganizations'
## remove fabric ca artifacts
docker run --rm -v $(pwd):/data busybox sh -c 'cd /data && rm -rf organizations/fabric-ca/org1/msp organizations/fabric-ca/org1/tls-cert.pem organizations/fabric-ca/org1/ca-cert.pem organizations/fabric-ca/org1/IssuerPublicKey organizations/fabric-ca/org1/IssuerRevocationPublicKey organizations/fabric-ca/org1/fabric-ca-server.db'
docker run --rm -v $(pwd):/data busybox sh -c 'cd /data && rm -rf organizations/fabric-ca/org2/msp organizations/fabric-ca/org2/tls-cert.pem organizations/fabric-ca/org2/ca-cert.pem organizations/fabric-ca/org2/IssuerPublicKey organizations/fabric-ca/org2/IssuerRevocationPublicKey organizations/fabric-ca/org2/fabric-ca-server.db'
docker run --rm -v $(pwd):/data busybox sh -c 'cd /data && rm -rf organizations/fabric-ca/ordererOrg/msp organizations/fabric-ca/ordererOrg/tls-cert.pem organizations/fabric-ca/ordererOrg/ca-cert.pem organizations/fabric-ca/ordererOrg/IssuerPublicKey organizations/fabric-ca/ordererOrg/IssuerRevocationPublicKey organizations/fabric-ca/ordererOrg/fabric-ca-server.db'
docker run --rm -v $(pwd):/data busybox sh -c 'cd /data && rm -rf addOrg3/fabric-ca/org3/msp addOrg3/fabric-ca/org3/tls-cert.pem addOrg3/fabric-ca/org3/ca-cert.pem addOrg3/fabric-ca/org3/IssuerPublicKey addOrg3/fabric-ca/org3/IssuerRevocationPublicKey addOrg3/fabric-ca/org3/fabric-ca-server.db'
# remove channel and script artifacts
docker run --rm -v $(pwd):/data busybox sh -c 'cd /data && rm -rf channel-artifacts log.txt *.tar.gz'
fi
}
# Obtain the OS and Architecture string that will be used to select the correct
# native binaries for your platform, e.g., darwin-amd64 or linux-amd64
OS_ARCH=$(echo "$(uname -s | tr '[:upper:]' '[:lower:]' | sed 's/mingw64_nt.*/windows/')-$(uname -m | sed 's/x86_64/amd64/g')" | awk '{print tolower($0)}')
# Using crpto vs CA. default is cryptogen
CRYPTO="cryptogen"
# timeout duration - the duration the CLI should wait for a response from
# another container before giving up
MAX_RETRY=5
# default for delay between commands
CLI_DELAY=3
# channel name defaults to "mychannel"
CHANNEL_NAME="mychannel"
# chaincode name defaults to "basic"
CC_NAME="basic"
# chaincode path defaults to "NA"
CC_SRC_PATH="NA"
# endorsement policy defaults to "NA". This would allow chaincodes to use the majority default policy.
CC_END_POLICY="NA"
# collection configuration defaults to "NA"
CC_COLL_CONFIG="NA"
# chaincode init function defaults to "NA"
CC_INIT_FCN="NA"
# use this as the default docker-compose yaml definition
COMPOSE_FILE_BASE=docker/docker-compose-test-net.yaml
# docker-compose.yaml file if you are using couchdb
COMPOSE_FILE_COUCH=docker/docker-compose-couch.yaml
# certificate authorities compose file
COMPOSE_FILE_CA=docker/docker-compose-ca.yaml
# use this as the docker compose couch file for org3
COMPOSE_FILE_COUCH_ORG3=addOrg3/docker/docker-compose-couch-org3.yaml
# use this as the default docker-compose yaml definition for org3
COMPOSE_FILE_ORG3=addOrg3/docker/docker-compose-org3.yaml
#
# use go as the default language for chaincode
CC_SRC_LANGUAGE="go"
# Chaincode version
CC_VERSION="1.0"
# Chaincode definition sequence
CC_SEQUENCE=1
# default image tag
IMAGETAG="latest"
# default ca image tag
CA_IMAGETAG="latest"
# default database
DATABASE="leveldb"
# Parse commandline args
## Parse mode
if [[ $# -lt 1 ]] ; then
printHelp
exit 0
else
MODE=$1
shift
fi
# parse a createChannel subcommand if used
if [[ $# -ge 1 ]] ; then
key="$1"
if [[ "$key" == "createChannel" ]]; then
export MODE="createChannel"
shift
fi
fi
# parse flags
while [[ $# -ge 1 ]] ; do
key="$1"
case $key in
-h )
printHelp $MODE
exit 0
;;
-c )
CHANNEL_NAME="$2"
shift
;;
-ca )
CRYPTO="Certificate Authorities"
;;
-r )
MAX_RETRY="$2"
shift
;;
-d )
CLI_DELAY="$2"
shift
;;
-s )
DATABASE="$2"
shift
;;
-ccl )
CC_SRC_LANGUAGE="$2"
shift
;;
-ccn )
CC_NAME="$2"
shift
;;
-ccv )
CC_VERSION="$2"
shift
;;
-ccs )
CC_SEQUENCE="$2"
shift
;;
-ccp )
CC_SRC_PATH="$2"
shift
;;
-ccep )
CC_END_POLICY="$2"
shift
;;
-cccg )
CC_COLL_CONFIG="$2"
shift
;;
-cci )
CC_INIT_FCN="$2"
shift
;;
-i )
IMAGETAG="$2"
shift
;;
-cai )
CA_IMAGETAG="$2"
shift
;;
-verbose )
VERBOSE=true
shift
;;
* )
errorln "Unknown flag: $key"
printHelp
exit 1
;;
esac
shift
done
# Are we generating crypto material with this command?
if [ ! -d "organizations/peerOrganizations" ]; then
CRYPTO_MODE="with crypto from '${CRYPTO}'"
else
CRYPTO_MODE=""
fi
# Determine mode of operation and printing out what we asked for
if [ "$MODE" == "up" ]; then
infoln "Starting nodes with CLI timeout of '${MAX_RETRY}' tries and CLI delay of '${CLI_DELAY}' seconds and using database '${DATABASE}' ${CRYPTO_MODE}"
elif [ "$MODE" == "createChannel" ]; then
infoln "Creating channel '${CHANNEL_NAME}'."
infoln "If network is not up, starting nodes with CLI timeout of '${MAX_RETRY}' tries and CLI delay of '${CLI_DELAY}' seconds and using database '${DATABASE} ${CRYPTO_MODE}"
elif [ "$MODE" == "down" ]; then
infoln "Stopping network"
elif [ "$MODE" == "restart" ]; then
infoln "Restarting network"
elif [ "$MODE" == "deployCC" ]; then
infoln "deploying chaincode on channel '${CHANNEL_NAME}'"
else
printHelp
exit 1
fi
if [ "${MODE}" == "up" ]; then
networkUp
elif [ "${MODE}" == "createChannel" ]; then
createChannel
elif [ "${MODE}" == "deployCC" ]; then
deployCC
elif [ "${MODE}" == "down" ]; then
networkDown
else
printHelp
exit 1
fi
|
public extension Double {
/// Print the decimal number with specific precision. For example, 12.3.
///
/// - Parameter precision: The precision specified. If it is nil, the original value will be returned.
/// - Returns: The formatted string or nil in case of precision error.
func decimalString(withPrecision precision: Int? = nil) -> String? {
if let positivePrecision = precision, positivePrecision < 0 {
Logger.standard.logError(Self.precisionError, withDetail: precision)
return nil
}
let numberFormatter = NumberFormatter()
numberFormatter.numberStyle = NumberFormatter.Style.decimal
if let precision = precision {
numberFormatter.minimumFractionDigits = precision
numberFormatter.maximumFractionDigits = precision
}
return numberFormatter.string(from: NSNumber(value: self))
}
}
|
# encoding: utf-8
name 'ubuntu_base'
maintainer '<NAME>'
maintainer_email '<EMAIL>'
license 'Apache 2.0'
description 'Installs/configures all base Ubuntu installs.'
version '0.14.0'
recipe 'ubuntu_base::default', 'Installs/configures all base Ubuntu installs.'
depends 'apt'
depends 'chef-sugar'
depends 'sysdig'
|
<gh_stars>1-10
package operation
import (
"math"
"reflect"
"testing"
"github.com/gojek/merlin/pkg/transformer/types/series"
"github.com/stretchr/testify/assert"
)
func TestOperationNode_Execute(t *testing.T) {
tests := []struct {
name string
node *OperationNode
want interface{}
wantErr bool
}{
{
name: "add two series",
node: &OperationNode{
LeftVal: series.New([]interface{}{1, 2, 3, 4, 5, nil}, series.Int, ""),
RightVal: series.New([]int{2, 3, 4, 5, 6, 7}, series.Int, ""),
Operation: Add,
},
want: series.New([]interface{}{3, 5, 7, 9, 11, nil}, series.Int, ""),
},
{
name: "add int32 with int64",
node: &OperationNode{
LeftVal: int32(5),
RightVal: int64(6),
Operation: Add,
},
want: int64(11),
},
{
name: "divide int",
node: &OperationNode{
LeftVal: 10,
RightVal: 2,
Operation: Divide,
},
want: int64(5),
},
{
name: "multiply int",
node: &OperationNode{
LeftVal: 10,
RightVal: 2,
Operation: Multiply,
},
want: int64(20),
},
{
name: "modulo int",
node: &OperationNode{
LeftVal: 10,
RightVal: 3,
Operation: Modulo,
},
want: int64(1),
},
{
name: "add int with string",
node: &OperationNode{
LeftVal: 10,
RightVal: "abc",
Operation: Add,
},
wantErr: true,
},
{
name: "substract int64",
node: &OperationNode{
LeftVal: int64(4),
RightVal: int64(2),
Operation: Substract,
},
want: int64(2),
},
{
name: "add float32 with int64",
node: &OperationNode{
LeftVal: float32(5.0),
RightVal: int64(6),
Operation: Add,
},
want: float64(11.0),
},
{
name: "divide float64",
node: &OperationNode{
LeftVal: float64(6),
RightVal: float64(1.5),
Operation: Divide,
},
want: float64(4.0),
},
{
name: "divide 0 float64",
node: &OperationNode{
LeftVal: float64(6),
RightVal: 0,
Operation: Divide,
},
want: math.NaN(),
},
{
name: "multiply float64",
node: &OperationNode{
LeftVal: float64(2.0),
RightVal: float64(3.0),
Operation: Multiply,
},
want: float64(6.0),
},
{
name: "modulo float",
node: &OperationNode{
LeftVal: 10.1,
RightVal: 3.2,
Operation: Modulo,
},
wantErr: true,
},
{
name: "substract float",
node: &OperationNode{
LeftVal: float64(10.25),
RightVal: float64(10.0),
Operation: Substract,
},
want: float64(0.25),
},
{
name: "add two string",
node: &OperationNode{
LeftVal: "abcd",
RightVal: "efgh",
Operation: Add,
},
want: "abcdefgh",
},
{
name: "add string with int - error",
node: &OperationNode{
LeftVal: "abcd",
RightVal: 5,
Operation: Add,
},
wantErr: true,
},
{
name: "add bool with int - error",
node: &OperationNode{
LeftVal: true,
RightVal: 5,
Operation: Add,
},
wantErr: true,
},
{
name: "add string with series of string",
node: &OperationNode{
LeftVal: "prefix_",
RightVal: series.New([]string{"a", "b", "c", "d"}, series.String, ""),
Operation: Add,
},
want: series.New([]string{"prefix_a", "prefix_b", "prefix_c", "prefix_d"}, series.String, ""),
},
{
name: "add multiple series",
node: &OperationNode{
LeftVal: series.New([]interface{}{1, 2, 3, 4, 5, nil}, series.Int, ""),
RightVal: series.New([]int{2, 3, 4, 5, 6, 7}, series.Int, ""),
Operation: Add,
Next: &OperationNode{
RightVal: &OperationNode{
LeftVal: series.New([]interface{}{1, 1, 1, 1, 1, nil}, series.Int, ""),
RightVal: series.New([]int{1, 1, 1, 1, 1, 1}, series.Int, ""),
Operation: Add,
},
Operation: Add,
},
},
want: series.New([]interface{}{5, 7, 9, 11, 13, nil}, series.Int, ""),
},
{
name: "substract two series",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: series.New([]int{2, 3, 4, 5, 6}, series.Int, ""),
Operation: Substract,
},
want: series.New([]int{-1, -1, -1, -1, -1}, series.Int, ""),
},
{
name: "multiply int with float series",
node: &OperationNode{
LeftVal: 2,
RightVal: series.New([]float64{2, 3, 4, 5, 6}, series.Float, ""),
Operation: Multiply,
},
want: series.New([]float64{4, 6, 8, 10, 12}, series.Float, ""),
},
{
name: "add float64 with float series",
node: &OperationNode{
LeftVal: float64(2.0),
RightVal: series.New([]float64{2, 3, 4, 5, 6}, series.Float, ""),
Operation: Add,
},
want: series.New([]float64{4, 5, 6, 7, 8}, series.Float, ""),
},
{
name: "add float64 with bool",
node: &OperationNode{
LeftVal: float64(2.0),
RightVal: true,
Operation: Add,
},
wantErr: true,
},
{
name: "divide float series",
node: &OperationNode{
LeftVal: series.New([]float64{2, 3, 4, 5, 6}, series.Float, ""),
RightVal: series.New([]int{2}, series.Int, ""),
Operation: Divide,
},
want: series.New([]float64{1, 1.5, 2, 2.5, 3}, series.Float, ""),
},
{
name: "modulo int series",
node: &OperationNode{
LeftVal: series.New([]int{2, 3, 4, 5, 6}, series.Int, ""),
RightVal: series.New([]int{2}, series.Int, ""),
Operation: Modulo,
},
want: series.New([]int{0, 1, 0, 1, 0}, series.Int, ""),
},
{
name: "add int with float",
node: &OperationNode{
LeftVal: 4,
RightVal: 5.2,
Operation: Add,
},
want: 9.2,
},
{
name: "multiply int with bool",
node: &OperationNode{
LeftVal: 4,
RightVal: true,
Operation: Multiply,
},
wantErr: true,
},
{
name: "greater int with float",
node: &OperationNode{
LeftVal: 3,
RightVal: 3.5,
Operation: Greater,
},
want: false,
},
{
name: "and operator for bool",
node: &OperationNode{
LeftVal: true,
RightVal: false,
Operation: And,
},
want: false,
},
{
name: "and operator for bool and int",
node: &OperationNode{
LeftVal: true,
RightVal: 3,
Operation: And,
},
wantErr: true,
},
{
name: "or operator for string and bool",
node: &OperationNode{
LeftVal: 3,
RightVal: true,
Operation: Or,
},
wantErr: true,
},
{
name: "or operator for bool",
node: &OperationNode{
LeftVal: true,
RightVal: false,
Operation: Or,
},
want: true,
},
{
name: "and operation between bool and series of bool",
node: &OperationNode{
LeftVal: true,
RightVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
Operation: And,
},
want: series.New([]bool{false, true, false, true}, series.Bool, ""),
},
{
name: "or operation between bool and series of bool",
node: &OperationNode{
LeftVal: true,
RightVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
Operation: Or,
},
want: series.New([]bool{true, true, true, true}, series.Bool, ""),
},
{
name: "and operation between series of bool",
node: &OperationNode{
LeftVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
RightVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
Operation: And,
},
want: series.New([]bool{false, true, false, true}, series.Bool, ""),
},
{
name: "or operation between series of bool",
node: &OperationNode{
LeftVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
RightVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
Operation: Or,
},
want: series.New([]bool{false, true, false, true}, series.Bool, ""),
},
{
name: "or operation between series of bool different dimension",
node: &OperationNode{
LeftVal: series.New([]bool{false, true, false, true, true}, series.Bool, ""),
RightVal: series.New([]bool{false, true, false, true}, series.Bool, ""),
Operation: Or,
},
wantErr: true,
},
{
name: "greater: int with float64",
node: &OperationNode{
LeftVal: 4,
RightVal: float64(3.8),
Operation: Greater,
},
want: true,
},
{
name: "greater equal: int64",
node: &OperationNode{
LeftVal: int64(2),
RightVal: int64(2),
Operation: GreaterEq,
},
want: true,
},
{
name: "greater equal: int64 with series",
node: &OperationNode{
LeftVal: int64(2),
RightVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
Operation: GreaterEq,
},
want: series.New([]bool{true, true, false, false, false}, series.Bool, ""),
},
{
name: "greater: series with int",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: 3,
Operation: Greater,
},
want: series.New([]bool{false, false, false, true, true}, series.Bool, ""),
},
{
name: "less operation: int with float64",
node: &OperationNode{
LeftVal: 4,
RightVal: float64(3.8),
Operation: Less,
},
want: false,
},
{
name: "less operation: float64",
node: &OperationNode{
LeftVal: float64(2),
RightVal: float64(2),
Operation: Less,
},
want: false,
},
{
name: "less operation: int64 with series",
node: &OperationNode{
LeftVal: int64(2),
RightVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
Operation: Less,
},
want: series.New([]bool{false, false, true, true, true}, series.Bool, ""),
},
{
name: "less operation: series with int",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: 3,
Operation: Less,
},
want: series.New([]bool{true, true, false, false, false}, series.Bool, ""),
},
{
name: "lesseq operation: int with float64",
node: &OperationNode{
LeftVal: 4,
RightVal: float64(3.8),
Operation: LessEq,
},
want: false,
},
{
name: "lesseq operation: int64",
node: &OperationNode{
LeftVal: int64(2),
RightVal: int64(2),
Operation: LessEq,
},
want: true,
},
{
name: "lesseq operation: int64 with series",
node: &OperationNode{
LeftVal: int64(2),
RightVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
Operation: LessEq,
},
want: series.New([]bool{false, true, true, true, true}, series.Bool, ""),
},
{
name: "lesseq operation: series with int",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: 3,
Operation: LessEq,
},
want: series.New([]bool{true, true, true, false, false}, series.Bool, ""),
},
{
name: "lesseq operation: float with series",
node: &OperationNode{
LeftVal: float64(3.2),
RightVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
Operation: LessEq,
},
want: series.New([]bool{false, false, false, true, true}, series.Bool, ""),
},
{
name: "equal series with series",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: series.New([]int{1, 2, 3, 5, 5}, series.Int, ""),
Operation: Eq,
},
want: series.New([]bool{true, true, true, false, true}, series.Bool, ""),
},
{
name: "equal list series with list series",
node: &OperationNode{
LeftVal: series.New([][]interface{}{{1, 2, 4}, nil}, series.IntList, ""),
RightVal: series.New([][]interface{}{{1, 2, 4}, nil}, series.IntList, ""),
Operation: Eq,
},
want: series.New([]bool{true, true}, series.Bool, ""),
},
{
name: "not equal series with series",
node: &OperationNode{
LeftVal: series.New([]interface{}{1, 2, 3, 4, nil}, series.Int, ""),
RightVal: nil,
Operation: Neq,
},
want: series.New([]bool{true, true, true, true, false}, series.Bool, ""),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.node.Execute()
if (err != nil) != tt.wantErr {
t.Errorf("OperationNode.Execute() error = %v, wantErr %v", err, tt.wantErr)
return
}
if want, validType := tt.want.(float64); validType && math.IsNaN(want) {
gotFloat64, ok := got.(float64)
assert.True(t, ok)
if !math.IsNaN(gotFloat64) {
t.Errorf("OperationNode.Execute() = %v, want %v", got, tt.want)
}
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("OperationNode.Execute() = %v, want %v", got, tt.want)
}
})
}
}
func TestOperationNode_ExecuteSubset(t *testing.T) {
tests := []struct {
name string
node *OperationNode
subset *series.Series
want interface{}
wantErr bool
}{
{
name: "add two series",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: series.New([]int{2, 3, 4, 5, 6}, series.Int, ""),
Operation: Add,
},
subset: series.New([]bool{true, true, false, false, false}, series.Bool, ""),
want: series.New([]interface{}{3, 5, nil, nil, nil}, series.Int, ""),
},
{
name: "substract two series",
node: &OperationNode{
LeftVal: series.New([]int{1, 2, 3, 4, 5}, series.Int, ""),
RightVal: series.New([]int{2, 3, 4, 5, 6}, series.Int, ""),
Operation: Substract,
},
subset: series.New([]bool{false, false, false, false, true}, series.Bool, ""),
want: series.New([]interface{}{nil, nil, nil, nil, -1}, series.Int, ""),
},
{
name: "multiply int with float series",
node: &OperationNode{
LeftVal: 2,
RightVal: series.New([]float64{2, 3, 4, 5, 6}, series.Float, ""),
Operation: Multiply,
},
subset: series.New([]bool{false, false, false, false, true}, series.Bool, ""),
want: series.New([]interface{}{nil, nil, nil, nil, 12}, series.Float, ""),
},
{
name: "divide float series",
node: &OperationNode{
LeftVal: series.New([]float64{2, 3, 4, 5, 6}, series.Float, ""),
RightVal: series.New([]int{2, 2, 2, 2, 0}, series.Int, ""),
Operation: Divide,
},
subset: series.New([]bool{true, true, false, true, true}, series.Bool, ""),
want: series.New([]interface{}{1, 1.5, nil, 2.5, nil}, series.Float, ""),
},
{
name: "modulo int series",
node: &OperationNode{
LeftVal: series.New([]int{2, 3, 4, 5, 6}, series.Int, ""),
RightVal: series.New([]int{2}, series.Int, ""),
Operation: Modulo,
},
subset: series.New([]bool{false, false, true, false, false}, series.Bool, ""),
want: series.New([]interface{}{nil, nil, 0, nil, nil}, series.Int, ""),
},
{
name: "add int with float",
node: &OperationNode{
LeftVal: 4,
RightVal: 5.2,
Operation: Add,
},
want: 9.2,
},
{
name: "multiply int with bool",
node: &OperationNode{
LeftVal: 4,
RightVal: true,
Operation: Multiply,
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.node.ExecuteSubset(tt.subset)
if (err != nil) != tt.wantErr {
t.Errorf("OperationNode.ExecuteSubset() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("OperationNode.ExecuteSubset() = %v, want %v", got, tt.want)
}
})
}
}
func TestRegisterOperation(t *testing.T) {
type args struct {
leftVal interface{}
rightVal interface{}
operator Operator
}
tests := []struct {
name string
args args
want *OperationNode
}{
{
name: "simple left and right val",
args: args{
leftVal: 2,
rightVal: 3,
operator: Add,
},
want: &OperationNode{
LeftVal: 2,
RightVal: 3,
Operation: Add,
},
},
{
name: "left value is operation node",
args: args{
leftVal: OperationNode{
LeftVal: 1,
RightVal: 3,
Operation: Multiply,
},
rightVal: &OperationNode{
LeftVal: 2,
RightVal: 4,
Operation: Add,
},
operator: Substract,
},
want: &OperationNode{
LeftVal: 1,
RightVal: 3,
Operation: Multiply,
Next: &OperationNode{
LeftVal: nil,
RightVal: &OperationNode{
LeftVal: 2,
RightVal: 4,
Operation: Add,
},
Operation: Substract,
},
},
},
{
name: "left value is pointer operation node",
args: args{
leftVal: &OperationNode{
LeftVal: 1,
RightVal: 3,
Operation: Multiply,
},
rightVal: &OperationNode{
LeftVal: 2,
RightVal: 4,
Operation: Add,
},
operator: Substract,
},
want: &OperationNode{
LeftVal: 1,
RightVal: 3,
Operation: Multiply,
Next: &OperationNode{
LeftVal: nil,
RightVal: &OperationNode{
LeftVal: 2,
RightVal: 4,
Operation: Add,
},
Operation: Substract,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := RegisterOperation(tt.args.leftVal, tt.args.rightVal, tt.args.operator); !reflect.DeepEqual(got, tt.want) {
t.Errorf("RegisterOperation() = %v, want %v", got, tt.want)
}
})
}
}
|
<reponame>rafaelbr/chatbot-clima
import flask, json
import os
from utils import post_facebook_message, get_weather
from flask import Blueprint, request, jsonify, make_response
from flask_restful import Api, Resource
testBP = Blueprint('test', __name__)
api = Api(testBP, prefix='/test')
class TestEndpoint(Resource):
def __init__(self):
self.vcap = json.loads(os.environ['VCAP_SERVICES'])
def get(self):
return make_response(json.dumps(self.vcap, indent=2))
api.add_resource(TestEndpoint, '/')
|
<reponame>schinmayee/nimbus
//#####################################################################
// Copyright 2006-2009, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Arrays/INDIRECT_ARRAY.h>
#include <PhysBAM_Tools/Log/LOG.h>
#include <PhysBAM_Tools/Read_Write/Arrays/READ_WRITE_ARRAY.h>
#include <PhysBAM_Tools/Read_Write/Matrices_And_Vectors/READ_WRITE_DIAGONAL_MATRIX_3X3.h>
#include <PhysBAM_Tools/Read_Write/Matrices_And_Vectors/READ_WRITE_MATRIX_0X0.h>
#include <PhysBAM_Tools/Read_Write/Matrices_And_Vectors/READ_WRITE_MATRIX_1X1.h>
#include <PhysBAM_Tools/Read_Write/Utilities/FILE_UTILITIES.h>
#include <PhysBAM_Tools/Utilities/DEBUG_CAST.h>
#include <PhysBAM_Geometry/Collisions/COLLISION_GEOMETRY.h>
#include <PhysBAM_Geometry/Collisions/COLLISION_GEOMETRY_COLLECTION.h>
#include <PhysBAM_Geometry/Collisions/COLLISION_GEOMETRY_IMPULSE_ACCUMULATOR.h>
#include <PhysBAM_Geometry/Read_Write/Geometry/READ_WRITE_RIGID_GEOMETRY_COLLECTION.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/STRUCTURE_LIST.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Articulated_Rigid_Bodies/ARTICULATED_RIGID_BODY_1D.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Articulated_Rigid_Bodies/ARTICULATED_RIGID_BODY_2D.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Articulated_Rigid_Bodies/ARTICULATED_RIGID_BODY_3D.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Forces_And_Torques/RIGIDS_EXAMPLE_FORCES_AND_VELOCITIES.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Forces_And_Torques/RIGIDS_FORCES.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY_EVOLUTION_PARAMETERS.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Body_Clusters/RIGID_BODY_CLUSTER_BINDINGS.h>
#include <climits>
namespace PhysBAM{
template<class TV>
struct ALLOCATE_BODY_HELPER:public ALLOCATE_HELPER<TV>
{
RIGID_BODY_COLLECTION<TV>& collection;
ALLOCATE_BODY_HELPER(RIGID_BODY_COLLECTION<TV>& collection_input):collection(collection_input) {}
RIGID_BODY<TV>* Create(int index=0) PHYSBAM_OVERRIDE {return new RIGID_BODY<TV>(collection,true,index);}
virtual ~ALLOCATE_BODY_HELPER(){}
};
//#####################################################################
// Constructor
//#####################################################################
template<class TV> RIGID_BODY_COLLECTION<TV>::
RIGID_BODY_COLLECTION(RIGIDS_EXAMPLE_FORCES_AND_VELOCITIES<TV>* rigids_example_forces_and_velocities_input,COLLISION_GEOMETRY_COLLECTION<TV>* collision_body_list_input,
ARRAY_COLLECTION* array_collection)
:rigid_body_particle(array_collection?array_collection:new ARRAY_COLLECTION),
rigid_geometry_collection(rigid_body_particle,rigids_example_forces_and_velocities_input,collision_body_list_input,new ALLOCATE_BODY_HELPER<TV>(*this)),
articulated_rigid_body(*new ARTICULATED_RIGID_BODY<TV>(*this)),rigid_body_cluster_bindings(*new RIGID_BODY_CLUSTER_BINDINGS<TV>(*this,articulated_rigid_body)),
rigids_example_forces_and_velocities(rigids_example_forces_and_velocities_input),dynamic_rigid_body_particles(0),static_rigid_bodies(rigid_geometry_collection.static_rigid_geometry),
kinematic_rigid_bodies(rigid_geometry_collection.kinematic_rigid_geometry),static_and_kinematic_rigid_bodies(0),print_diagnostics(false),print_residuals(false),print_energy(false),iterations_used_diagnostic(0)
{rigid_body_particle.Store_Velocity();}
//#####################################################################
// Destructor
//#####################################################################
template<class TV> RIGID_BODY_COLLECTION<TV>::
~RIGID_BODY_COLLECTION()
{
rigids_forces.Delete_Pointers_And_Clean_Memory();
delete &articulated_rigid_body;
delete &rigid_body_cluster_bindings;
}
//#####################################################################
// Function Rigid_Body
//#####################################################################
template<class TV> RIGID_BODY<TV>& RIGID_BODY_COLLECTION<TV>::
Rigid_Body(const int particle_index)
{
return *debug_cast<RIGID_BODY<TV>*>(rigid_body_particle.rigid_geometry(particle_index));
}
//#####################################################################
// Function Rigid_Body
//#####################################################################
template<class TV> const RIGID_BODY<TV>& RIGID_BODY_COLLECTION<TV>::
Rigid_Body(const int particle_index) const
{
return *debug_cast<RIGID_BODY<TV>*>(rigid_body_particle.rigid_geometry(particle_index));
}
//#####################################################################
// Function Add_Rigid_Body
//#####################################################################
// structures already added to their respective lists
template<class TV> int RIGID_BODY_COLLECTION<TV>::
Add_Rigid_Body(RIGID_BODY<TV>* rigid_body,const int simplicial_boundary_id,const int implicit_object_id,const int simplicial_interior_id)
{
int id=rigid_body->particle_index;
if(simplicial_boundary_id) rigid_body_particle.structure_ids(id)(1)=simplicial_boundary_id;
if(implicit_object_id) rigid_body_particle.structure_ids(id)(2)=implicit_object_id;
if(simplicial_interior_id) rigid_body_particle.structure_ids(id)(3)=simplicial_interior_id;
for(int i=1;i<=rigid_body_particle.structure_ids(id).m;i++) if(rigid_body_particle.structure_ids(id)(i) && !rigid_geometry_collection.structure_list.Element(rigid_body_particle.structure_ids(id)(i))) PHYSBAM_FATAL_ERROR();
return id;
}
//#####################################################################
// Function Add_Rigid_Body
//#####################################################################
// adds body's segmented curve and implicit curve to their respective lists
template<class TV> int RIGID_BODY_COLLECTION<TV>::
Add_Rigid_Body_And_Geometry(RIGID_BODY<TV>* rigid_body)
{
return rigid_geometry_collection.Add_Rigid_Geometry(rigid_body);
}
//#####################################################################
// Function Add_Rigid_Body
//#####################################################################
template<class TV> int RIGID_BODY_COLLECTION<TV>::
Add_Rigid_Body(const STREAM_TYPE stream_type,const std::string& basename,const T scaling_factor,const bool read_simplicial_boundary,const bool read_implicit_object,
const bool read_simplicial_interior,const bool read_rgd_file)
{
return Add_Rigid_Body(stream_type,false,basename,scaling_factor,read_simplicial_boundary,read_implicit_object,read_simplicial_interior,read_rgd_file);
}
//#####################################################################
// Function Add_Rigid_Body
//#####################################################################
template<class TV> int RIGID_BODY_COLLECTION<TV>::
Add_Rigid_Body(const STREAM_TYPE stream_type,const bool thin_shell,const std::string& basename,const T scaling_factor,const bool read_simplicial_boundary,const bool read_implicit_object,
const bool read_simplicial_interior,const bool read_rgd_file)
{
RIGID_BODY<TV>* rigid_body=new RIGID_BODY<TV>(*this,true);
rigid_body->thin_shell=thin_shell;
// rigid body
std::string rgd=TV::dimension==2?"rgd2d":"rgd";
if(read_rgd_file){
try{FILE_UTILITIES::Read_From_File(stream_type,basename+"."+rgd,rigid_body->Mass(),rigid_body->Inertia_Tensor(),rigid_body->X(),rigid_body->Rotation());}
catch(FILESYSTEM_ERROR&){{std::stringstream ss;ss<<"Note: No "<<rgd<<" file for "<<basename<<" (using default values)"<<std::endl;LOG::filecout(ss.str());}}}
if(scaling_factor!=1) rigid_body->Rescale(scaling_factor);
rigid_body->Update_Angular_Velocity();
int id=rigid_geometry_collection.Add_Rigid_Geometry(rigid_body,stream_type,basename,scaling_factor,read_simplicial_boundary,read_implicit_object,read_simplicial_interior,read_rgd_file);
return id;
}
//#####################################################################
// Function Reset_Impulse_Accumulators
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Reset_Impulse_Accumulators()
{
for(int i=1;i<=rigid_body_particle.array_collection->Size();i++)
if(Is_Active(i) && rigid_geometry_collection.collision_body_list->geometry_id_to_collision_geometry_id.Contains(i) && rigid_geometry_collection.collision_body_list->bodies(rigid_geometry_collection.collision_body_list->geometry_id_to_collision_geometry_id.Get(i))->impulse_accumulator)
rigid_geometry_collection.collision_body_list->bodies(rigid_geometry_collection.collision_body_list->geometry_id_to_collision_geometry_id.Get(i))->impulse_accumulator->Reset();
}
//#####################################################################
// Function Update_Angular_Velocity
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Update_Angular_Velocity()
{
for(int p=1;p<=rigid_body_particle.array_collection->Size();p++) if(Is_Active(p)) Rigid_Body(p).Update_Angular_Velocity();
}
//#####################################################################
// Function Update_Angular_Momentum
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Update_Angular_Momentum()
{
for(int p=1;p<=rigid_body_particle.array_collection->Size();p++) if(Is_Active(p)) Rigid_Body(p).Update_Angular_Momentum();
}
//#####################################################################
// Function Update_Angular_Velocity
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Update_Angular_Velocity(const ARRAY<int>& rigid_body_particles)
{
for(int i=1;i<=rigid_body_particles.m;i++) Rigid_Body(rigid_body_particles(i)).Update_Angular_Velocity();
}
//#####################################################################
// Function Update_Angular_Momentum
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Update_Angular_Momentum(const ARRAY<int>& rigid_body_particles)
{
for(int i=1;i<=rigid_body_particles.m;i++) Rigid_Body(rigid_body_particles(i)).Update_Angular_Momentum();
}
//#####################################################################
// Function Read
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Read(const STREAM_TYPE stream_type,const std::string& directory,const int frame,ARRAY<int>* needs_init,ARRAY<int>* needs_destroy)
{
if(!stream_type.use_doubles)
Read_Write<RIGID_GEOMETRY_COLLECTION<TV>,float>::Read(stream_type,directory,frame,rigid_geometry_collection,needs_init,needs_destroy);
else
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
Read_Write<RIGID_GEOMETRY_COLLECTION<TV>,double>::Read(stream_type,directory,frame,rigid_geometry_collection,needs_init,needs_destroy);
#else
PHYSBAM_FATAL_ERROR("Cannot read doubles");
#endif
}
//#####################################################################
// Function Write
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Write(const STREAM_TYPE stream_type,const std::string& directory,const int frame) const
{
if(!stream_type.use_doubles)
Read_Write<RIGID_GEOMETRY_COLLECTION<TV>,float>::Write(stream_type,directory,frame,rigid_geometry_collection);
else
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
Read_Write<RIGID_GEOMETRY_COLLECTION<TV>,double>::Write(stream_type,directory,frame,rigid_geometry_collection);
#else
PHYSBAM_FATAL_ERROR("Cannot read doubles");
#endif
articulated_rigid_body.Write(stream_type,directory,frame);
}
//#####################################################################
// Function Update_Simulated_Particles
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Update_Simulated_Particles()
{
int rigid_particles_number=rigid_body_particle.array_collection->Size();
ARRAY<bool> particle_is_simulated(rigid_particles_number);
INDIRECT_ARRAY<ARRAY<bool>,ARRAY<int>&> simulated_subset=particle_is_simulated.Subset(rigid_body_particle.array_collection->deletion_list);
ARRAYS_COMPUTATIONS::Fill(simulated_subset,false);
for(int i=1;i<=rigid_particles_number;i++)
if(Is_Active(i) && Rigid_Body(i).Is_Simulated()) // TODO: Can't everything be defaulted to true?
particle_is_simulated(i)=true;
rigids_example_forces_and_velocities->Set_Rigid_Particle_Is_Simulated(particle_is_simulated);
for(int i=1;i<=rigid_particles_number;i++)
if(!Is_Active(i) || !Rigid_Body(i).Is_Simulated())
particle_is_simulated(i)=false;
simulated_rigid_body_particles.Remove_All();
dynamic_rigid_body_particles.Remove_All();
for(int p=1;p<=rigid_particles_number;p++) if(particle_is_simulated(p)) simulated_rigid_body_particles.Append(p);
rigid_body_cluster_bindings.Clear_Hard_Bound_Particles(particle_is_simulated);
for(int p=1;p<=rigid_particles_number;p++) if(particle_is_simulated(p)) dynamic_rigid_body_particles.Append(p);
static_rigid_bodies.Remove_All();kinematic_rigid_bodies.Remove_All();static_and_kinematic_rigid_bodies.Remove_All();
for(int p=1;p<=rigid_particles_number;p++) if(Is_Active(p)){RIGID_BODY<TV>& rigid_body=Rigid_Body(p);
if(rigid_body.is_static){static_rigid_bodies.Append(p);static_and_kinematic_rigid_bodies.Append(p);}
if(rigid_body_particle.kinematic(p)){kinematic_rigid_bodies.Append(p);static_and_kinematic_rigid_bodies.Append(p);}}
ARRAY<bool> rigid_particle_is_simulated(rigid_particles_number);
INDIRECT_ARRAY<ARRAY<bool>,ARRAY<int>&> simulated_particle_subset=rigid_particle_is_simulated.Subset(simulated_rigid_body_particles);
ARRAYS_COMPUTATIONS::Fill(simulated_particle_subset,true);
for(int i=1;i<=rigids_forces.m;i++) rigids_forces(i)->Update_Mpi(rigid_particle_is_simulated);
}
//#####################################################################
// Function Add_Velocity_Independent_Forces
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Add_Velocity_Independent_Forces(ARRAY_VIEW<TWIST<TV> > rigid_F_full,const T time) const
{
for(int k=1;k<=rigids_forces.m;k++)
if(rigids_forces(k)->use_velocity_independent_forces) rigids_forces(k)->Add_Velocity_Independent_Forces(rigid_F_full,time);
}
//#####################################################################
// Function Add_Velocity_Dependent_Forces
//#####################################################################
// can depend on position too
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Add_Velocity_Dependent_Forces(ARRAY_VIEW<const TWIST<TV> > rigid_V_full,ARRAY_VIEW<TWIST<TV> > rigid_F_full,const T time) const
{
for(int k=1;k<=rigids_forces.m;k++)
if(rigids_forces(k)->use_velocity_dependent_forces) rigids_forces(k)->Add_Velocity_Dependent_Forces(rigid_V_full,rigid_F_full,time);
}
//#####################################################################
// Function Implicit_Velocity_Independent_Forces
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Implicit_Velocity_Independent_Forces(ARRAY_VIEW<const TWIST<TV> > rigid_V_full,ARRAY_VIEW<TWIST<TV> > rigid_F_full,const T scale,const T time) const
{
assert(rigid_F_full.Size()==rigid_body_particle.array_collection->Size());
INDIRECT_ARRAY<ARRAY_VIEW<TWIST<TV> > > rigid_F_subset(rigid_F_full.Subset(dynamic_rigid_body_particles));
ARRAYS_COMPUTATIONS::Fill(rigid_F_subset,TWIST<TV>()); // note we zero here because we will scale the forces below
bool added=false;
for(int k=1;k<=rigids_forces.m;k++) if(rigids_forces(k)->use_implicit_velocity_independent_forces){
rigids_forces(k)->Add_Implicit_Velocity_Independent_Forces(rigid_V_full,rigid_F_full,time);added=true;}
if(added) rigid_F_full.Subset(simulated_rigid_body_particles)*=scale;
}
//#####################################################################
// Function Update_Position_Based_State
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Update_Position_Based_State(const T time)
{
for(int k=1;k<=rigids_forces.m;k++)
if(rigids_forces(k)->use_position_based_state) rigids_forces(k)->Update_Position_Based_State(time);
}
//#####################################################################
// Function Compute_Energy
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Compute_Energy(const T time,T& kinetic_energy,T& potential_energy) const
{
potential_energy=0;
kinetic_energy=0;
for(int i=1;i<=rigids_forces.m;i++) potential_energy+=rigids_forces(i)->Potential_Energy(time);
for(int i=1;i<=dynamic_rigid_body_particles.m;i++){int p=dynamic_rigid_body_particles(i);
kinetic_energy+=Rigid_Body(p).Kinetic_Energy();}
}
//#####################################################################
// Function Print_Energy
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Print_Energy(const T time,const int step) const
{
if(print_energy){
T potential_energy=0,kinetic_energy=0;
Compute_Energy(time,kinetic_energy,potential_energy);
{std::stringstream ss;ss<<"total energy = "<<(potential_energy+kinetic_energy)<<" (KE = "<<kinetic_energy<<" PE = "<<potential_energy<<") Step "<<step<<std::endl;LOG::filecout(ss.str());}}
}
//#####################################################################
// Function CFL_Rigid
//#####################################################################
template<class TV> typename TV::SCALAR RIGID_BODY_COLLECTION<TV>::
CFL_Rigid(const RIGID_BODY_EVOLUTION_PARAMETERS<TV>& rigid_body_evolution_parameters,const bool verbose_dt)
{
static T static_min_bounding_box_width=FLT_MAX;
T min_bounding_box_width=FLT_MAX;
for(int i(1);i<=rigid_body_particle.array_collection->Size();i++) if(Is_Active(i)){
const RANGE<TV>& box=Rigid_Body(i).Object_Space_Bounding_Box();
TV edge_lengths=box.Edge_Lengths();min_bounding_box_width=min(min_bounding_box_width,edge_lengths.Min());}
if(min_bounding_box_width!=static_min_bounding_box_width){
static_min_bounding_box_width=min_bounding_box_width;
LOG::Stat("minimum rigid body bounding box width",min_bounding_box_width);}
T max_distance_per_time_step=rigid_body_evolution_parameters.max_rigid_body_linear_movement_fraction_per_time_step*min_bounding_box_width;
T dt=FLT_MAX;
bool no_active_bodies=true;
for(int p=1;p<=rigid_body_particle.array_collection->Size();p++) if(Is_Active(p)){
dt=min(dt,Rigid_Body(p).CFL(max_distance_per_time_step,rigid_body_evolution_parameters.max_rigid_body_rotation_per_time_step,verbose_dt));
no_active_bodies=false;}
if(no_active_bodies) return FLT_MAX; // don't apply rigid dt bounds if there aren't any active rigid bodies
dt=Robust_Multiply(rigid_body_evolution_parameters.rigid_cfl,dt);
T dt_clamped=clamp(dt,rigid_body_evolution_parameters.rigid_minimum_dt,rigid_body_evolution_parameters.rigid_maximum_dt);
if(dt_clamped>dt && verbose_dt) {std::stringstream ss;ss<<"Warning: taking larger time step ("<<dt_clamped<<") than CFL dt ("<<dt<<")"<<std::endl;LOG::filecout(ss.str());}
return dt_clamped;
}
//#####################################################################
// Function Add_Force
//#####################################################################
template<class TV> int RIGID_BODY_COLLECTION<TV>::
Add_Force(RIGIDS_FORCES<TV>* force)
{
rigids_forces.Append(force);
force->Set_CFL_Number((T).5);
return rigids_forces.m;
}
//#####################################################################
// Function Save_Potential_Energy
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Save_Potential_Energy(const T time)
{
for(int i=1;i<=rigids_forces.m;i++) rigids_forces(i)->Save_Potential_Energy(time);
}
//#####################################################################
// Function Compute_Energy_Error
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Compute_Energy_Error(ARRAY_VIEW<const TWIST<TV> > rigid_velocity_save,const T time,const T dt)
{
for(int i=1;i<=rigids_forces.m;i++) rigids_forces(i)->Compute_Energy_Error(rigid_velocity_save,time,dt);
}
//#####################################################################
// Function Add_Energy_Correction_Force
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Add_Energy_Correction_Force(ARRAY_VIEW<const TWIST<TV> > rigid_velocity_save,const T time,const T dt)
{
for(int i=1;i<=rigids_forces.m;i++) rigids_forces(i)->Add_Energy_Correction_Force(rigid_velocity_save,time,dt);
}
//#####################################################################
// Function Compute_Previously_Applied_Forces
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Compute_Previously_Applied_Forces()
{
for(int i=1;i<=rigids_forces.m;i++) rigids_forces(i)->Compute_Previously_Applied_Forces();
}
//#####################################################################
// Function Store_Velocities
//#####################################################################
template<class TV> void RIGID_BODY_COLLECTION<TV>::
Store_Velocities()
{
for(int i=1;i<=rigids_forces.m;i++) rigids_forces(i)->Store_Velocities();
}
//#####################################################################
template class RIGID_BODY_COLLECTION<VECTOR<float,1> >;
template class RIGID_BODY_COLLECTION<VECTOR<float,2> >;
template class RIGID_BODY_COLLECTION<VECTOR<float,3> >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class RIGID_BODY_COLLECTION<VECTOR<double,1> >;
template class RIGID_BODY_COLLECTION<VECTOR<double,2> >;
template class RIGID_BODY_COLLECTION<VECTOR<double,3> >;
#endif
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.policy.autoscaling;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.Entities;
import brooklyn.entity.proxying.EntitySpec;
import brooklyn.entity.trait.Resizable;
import brooklyn.event.basic.BasicNotificationSensor;
import brooklyn.test.Asserts;
import brooklyn.test.entity.TestApplication;
import brooklyn.test.entity.TestCluster;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.time.Duration;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
public class AutoScalerPolicyTest {
private static long TIMEOUT_MS = 10*1000;
private static long SHORT_WAIT_MS = 250;
private static long OVERHEAD_DURATION_MS = 500;
private static long EARLY_RETURN_MS = 10;
AutoScalerPolicy policy;
TestCluster cluster;
LocallyResizableEntity resizable;
TestApplication app;
@BeforeMethod(alwaysRun=true)
public void setUp() throws Exception {
app = TestApplication.Factory.newManagedInstanceForTests();
cluster = app.createAndManageChild(EntitySpec.create(TestCluster.class).configure(TestCluster.INITIAL_SIZE, 1));
resizable = new LocallyResizableEntity(cluster, cluster);
Entities.manage(resizable);
policy = new AutoScalerPolicy();
resizable.addPolicy(policy);
}
@AfterMethod(alwaysRun=true)
public void tearDown() throws Exception {
if (policy != null) policy.destroy();
if (app != null) Entities.destroyAll(app.getManagementContext());
cluster = null;
resizable = null;
policy = null;
}
@Test
public void testShrinkColdPool() throws Exception {
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 30L, 4*10L, 4*20L));
// expect pool to shrink to 3 (i.e. maximum to have >= 40 per container)
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 3));
}
@Test
public void testShrinkColdPoolRoundsUpDesiredNumberOfContainers() throws Exception {
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 1L, 4*10L, 4*20L));
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 1));
}
@Test
public void testGrowHotPool() throws Exception {
resizable.resize(2);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(2, 41L, 2*10L, 2*20L));
// expect pool to grow to 3 (i.e. minimum to have <= 80 per container)
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 3));
}
@Test
public void testHasId() throws Exception {
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.minPoolSize(2)
.build();
resizable.addPolicy(policy);
Assert.assertTrue(policy.getId()!=null);
}
@Test
public void testNeverShrinkBelowMinimum() throws Exception {
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.minPoolSize(2)
.build();
resizable.addPolicy(policy);
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 0L, 4*10L, 4*20L));
// expect pool to shrink only to the minimum
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 2));
}
@Test
public void testNeverGrowAboveMaximmum() throws Exception {
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.maxPoolSize(5)
.build();
resizable.addPolicy(policy);
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(4, 1000000L, 4*10L, 4*20L));
// expect pool to grow only to the maximum
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 5));
}
@Test
public void testNeverGrowColdPool() throws Exception {
resizable.resize(2);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(2, 1000L, 2*10L, 2*20L));
Thread.sleep(SHORT_WAIT_MS);
assertEquals(resizable.getCurrentSize(), (Integer)2);
}
@Test
public void testNeverShrinkHotPool() throws Exception {
resizable.resizeSleepTime = 0;
resizable.resize(2);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(2, 0L, 2*10L, 2*20L));
// if had been a POOL_COLD, would have shrunk to 3
Thread.sleep(SHORT_WAIT_MS);
assertEquals(resizable.getCurrentSize(), (Integer)2);
}
@Test(groups="Integration")
public void testConcurrentShrinkShrink() throws Exception {
resizable.resizeSleepTime = 250;
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 30L, 4*10L, 4*20L));
// would cause pool to shrink to 3
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 1L, 4*10L, 4*20L));
// now expect pool to shrink to 1
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 1));
}
@Test(groups="Integration")
public void testConcurrentGrowGrow() throws Exception {
resizable.resizeSleepTime = 250;
resizable.resize(2);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(2, 41L, 2*10L, 2*20L));
// would cause pool to grow to 3
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(2, 81L, 2*10L, 2*20L));
// now expect pool to grow to 5
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 5));
}
@Test(groups="Integration")
public void testConcurrentGrowShrink() throws Exception {
resizable.resizeSleepTime = 250;
resizable.resize(2);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(2, 81L, 2*10L, 2*20L));
// would cause pool to grow to 5
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(2, 1L, 2*10L, 2*20L));
// now expect pool to shrink to 1
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 1));
}
@Test(groups="Integration")
public void testConcurrentShrinkGrow() throws Exception {
resizable.resizeSleepTime = 250;
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 1L, 4*10L, 4*20L));
// would cause pool to shrink to 1
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(4, 81L, 4*10L, 4*20L));
// now expect pool to grow to 5
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 5));
}
// FIXME failed in jenkins (e.g. #1035); with "lists don't have the same size expected:<3> but was:<2>"
// Is it just too time sensitive? But I'd have expected > 3 rather than less
@Test(groups="WIP")
public void testRepeatedQueuedResizeTakesLatestValueRatherThanIntermediateValues() throws Exception {
// TODO is this too time sensitive? the resize takes only 250ms so if it finishes before the next emit we'd also see size=2
resizable.resizeSleepTime = 500;
resizable.resize(4);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 30L, 4*10L, 4*20L)); // shrink to 3
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 20L, 4*10L, 4*20L)); // shrink to 2
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(4, 10L, 4*10L, 4*20L)); // shrink to 1
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 1));
assertEquals(resizable.sizes, ImmutableList.of(4, 3, 1));
}
@Test
public void testUsesResizeOperatorOverride() throws Exception {
resizable.removePolicy(policy);
final AtomicInteger counter = new AtomicInteger();
policy = AutoScalerPolicy.builder()
.resizeOperator(new ResizeOperator() {
@Override public Integer resize(Entity entity, Integer desiredSize) {
counter.incrementAndGet();
return desiredSize;
}})
.build();
resizable.addPolicy(policy);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(1, 21L, 1*10L, 1*20L)); // grow to 2
Asserts.succeedsEventually(MutableMap.of("timeout",TIMEOUT_MS), new Runnable() {
public void run() {
assertTrue(counter.get() >= 1, "cccounter="+counter);
}});
}
@Test
public void testUsesCustomSensorOverride() throws Exception {
resizable.removePolicy(policy);
BasicNotificationSensor<Map> customPoolHotSensor = new BasicNotificationSensor<Map>(Map.class, "custom.hot", "");
BasicNotificationSensor<Map> customPoolColdSensor = new BasicNotificationSensor<Map>(Map.class, "custom.cold", "");
BasicNotificationSensor<Map> customPoolOkSensor = new BasicNotificationSensor<Map>(Map.class, "custom.ok", "");
policy = AutoScalerPolicy.builder()
.poolHotSensor(customPoolHotSensor)
.poolColdSensor(customPoolColdSensor)
.poolOkSensor(customPoolOkSensor)
.build();
resizable.addPolicy(policy);
resizable.emit(customPoolHotSensor, message(1, 21L, 1*10L, 1*20L)); // grow to 2
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 2));
resizable.emit(customPoolColdSensor, message(2, 1L, 1*10L, 1*20L)); // shrink to 1
Asserts.succeedsEventually(ImmutableMap.of("timeout", TIMEOUT_MS), currentSizeAsserter(resizable, 1));
}
@Test(groups="Integration")
public void testResizeUpStabilizationDelayIgnoresBlip() throws Exception {
long resizeUpStabilizationDelay = 1000L;
Duration minPeriodBetweenExecs = Duration.ZERO;
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.resizeUpStabilizationDelay(Duration.of(resizeUpStabilizationDelay, TimeUnit.MILLISECONDS))
.minPeriodBetweenExecs(minPeriodBetweenExecs)
.build();
resizable.addPolicy(policy);
resizable.resize(1);
// Ignores temporary blip
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(1, 61L, 1*10L, 1*20L)); // would grow to 4
Thread.sleep(resizeUpStabilizationDelay-OVERHEAD_DURATION_MS);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_OK_SENSOR, message(1, 11L, 4*10L, 4*20L)); // but 1 is still adequate
assertEquals(resizable.getCurrentSize(), (Integer)1);
Asserts.succeedsContinually(MutableMap.of("duration", 2000L), new Runnable() {
@Override public void run() {
assertEquals(resizable.sizes, ImmutableList.of(1));
}});
}
// FIXME decreased invocationCount from 100, because was failing in jenkins occassionally.
// Error was things like it taking a couple of seconds too long to scale-up. This is *not*
// just caused by a slow GC (running with -verbose:gc shows during a failure several
// incremental GCs that usually don't amount to more than 0.2 of a second at most, often less).
// Doing a thread-dump etc immediately after the too-long delay shows no strange thread usage,
// and shows releng3 system load averages of numbers like 1.73, 2.87 and 1.22.
//
// Have put it in the "Acceptance" group for now.
@Test(groups={"Integration", "Acceptance"}, invocationCount=100)
public void testRepeatedResizeUpStabilizationDelayTakesMaxSustainedDesired() throws Throwable {
try {
testResizeUpStabilizationDelayTakesMaxSustainedDesired();
} catch (Throwable t) {
dumpThreadsEtc();
throw t;
}
}
@Test(groups="Integration")
public void testResizeUpStabilizationDelayTakesMaxSustainedDesired() throws Exception {
long resizeUpStabilizationDelay = 1100L;
Duration minPeriodBetweenExecs = Duration.ZERO;
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.resizeUpStabilizationDelay(Duration.of(resizeUpStabilizationDelay, TimeUnit.MILLISECONDS))
.minPeriodBetweenExecs(minPeriodBetweenExecs)
.build();
resizable.addPolicy(policy);
resizable.resize(1);
// Will grow to only the max sustained in this time window
// (i.e. to 2 within the first $resizeUpStabilizationDelay milliseconds)
Stopwatch stopwatch = Stopwatch.createStarted();
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(1, 61L, 1*10L, 1*20L)); // would grow to 4
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(1, 21L, 1*10L, 1*20L)); // would grow to 2
Thread.sleep(resizeUpStabilizationDelay-OVERHEAD_DURATION_MS);
long postSleepTime = stopwatch.elapsed(TimeUnit.MILLISECONDS);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, message(1, 61L, 1*10L, 1*20L)); // would grow to 4
// Wait for it to reach size 2, and confirm take expected time
// TODO This is time sensitive, and sometimes fails in CI with size=4 if we wait for currentSize==2 (presumably GC kicking in?)
// Therefore do strong assertion of currentSize==2 later, so can write out times if it goes wrong.
Asserts.succeedsEventually(MutableMap.of("period", 1, "timeout", TIMEOUT_MS), new Runnable() {
public void run() {
assertTrue(resizable.getCurrentSize() >= 2, "currentSize="+resizable.getCurrentSize());
}});
assertEquals(resizable.getCurrentSize(), (Integer)2,
stopwatch.elapsed(TimeUnit.MILLISECONDS)+"ms after first emission; "+(stopwatch.elapsed(TimeUnit.MILLISECONDS)-postSleepTime)+"ms after last");
long timeToResizeTo2 = stopwatch.elapsed(TimeUnit.MILLISECONDS);
assertTrue(timeToResizeTo2 >= resizeUpStabilizationDelay-EARLY_RETURN_MS &&
timeToResizeTo2 <= resizeUpStabilizationDelay+OVERHEAD_DURATION_MS,
"Resizing to 2: time="+timeToResizeTo2+"; resizeUpStabilizationDelay="+resizeUpStabilizationDelay);
// Will then grow to 4 $resizeUpStabilizationDelay milliseconds after that emission
Asserts.succeedsEventually(MutableMap.of("period", 1, "timeout", TIMEOUT_MS),
currentSizeAsserter(resizable, 4));
long timeToResizeTo4 = stopwatch.elapsed(TimeUnit.MILLISECONDS) - postSleepTime;
assertTrue(timeToResizeTo4 >= resizeUpStabilizationDelay-EARLY_RETURN_MS &&
timeToResizeTo4 <= resizeUpStabilizationDelay+OVERHEAD_DURATION_MS,
"Resizing to 4: timeToResizeTo4="+timeToResizeTo4+"; timeToResizeTo2="+timeToResizeTo2+"; resizeUpStabilizationDelay="+resizeUpStabilizationDelay);
}
@Test(groups="Integration")
public void testResizeUpStabilizationDelayResizesAfterDelay() {
final long resizeUpStabilizationDelay = 1000L;
Duration minPeriodBetweenExecs = Duration.ZERO;
resizable.removePolicy(policy);
policy = resizable.addPolicy(AutoScalerPolicy.builder()
.resizeUpStabilizationDelay(Duration.of(resizeUpStabilizationDelay, TimeUnit.MILLISECONDS))
.minPeriodBetweenExecs(minPeriodBetweenExecs)
.buildSpec());
resizable.resize(1);
// After suitable delay, grows to desired
final long emitTime = System.currentTimeMillis();
final Map<String, Object> need4 = message(1, 61L, 1*10L, 1*20L);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, need4); // would grow to 4
final AtomicInteger emitCount = new AtomicInteger(0);
Asserts.succeedsEventually(MutableMap.of("timeout", TIMEOUT_MS), new Runnable() {
public void run() {
if (System.currentTimeMillis() - emitTime > (2+emitCount.get())*resizeUpStabilizationDelay) {
//first one may not have been received, in a registration race
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_HOT_SENSOR, need4);
emitCount.incrementAndGet();
}
assertEquals(resizable.getCurrentSize(), (Integer)4);
}});
long resizeDelay = System.currentTimeMillis() - emitTime;
assertTrue(resizeDelay >= (resizeUpStabilizationDelay-EARLY_RETURN_MS), "resizeDelay="+resizeDelay);
}
@Test(groups="Integration")
public void testResizeDownStabilizationDelayIgnoresBlip() throws Exception {
long resizeStabilizationDelay = 1000L;
Duration minPeriodBetweenExecs = Duration.ZERO;
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.resizeDownStabilizationDelay(Duration.of(resizeStabilizationDelay, TimeUnit.MILLISECONDS))
.minPeriodBetweenExecs(minPeriodBetweenExecs)
.build();
resizable.addPolicy(policy);
resizable.resize(2);
// Ignores temporary blip
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(2, 1L, 2*10L, 2*20L)); // would shrink to 1
Thread.sleep(resizeStabilizationDelay-OVERHEAD_DURATION_MS);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_OK_SENSOR, message(2, 20L, 1*10L, 1*20L)); // but 2 is still adequate
assertEquals(resizable.getCurrentSize(), (Integer)2);
Asserts.succeedsContinually(MutableMap.of("duration", 2000L), new Runnable() {
public void run() {
assertEquals(resizable.sizes, ImmutableList.of(2));
}});
}
// FIXME decreased invocationCount from 100; see comment against testRepeatedResizeUpStabilizationDelayTakesMaxSustainedDesired
// Have put it in the "Acceptance" group for now.
@Test(groups={"Integration", "Acceptance"}, invocationCount=100)
public void testRepeatedResizeDownStabilizationDelayTakesMinSustainedDesired() throws Throwable {
try {
testResizeDownStabilizationDelayTakesMinSustainedDesired();
} catch (Throwable t) {
dumpThreadsEtc();
throw t;
}
}
@Test(groups="Integration")
public void testResizeDownStabilizationDelayTakesMinSustainedDesired() throws Exception {
long resizeDownStabilizationDelay = 1100L;
Duration minPeriodBetweenExecs = Duration.ZERO;
policy.suspend();
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.resizeDownStabilizationDelay(Duration.of(resizeDownStabilizationDelay, TimeUnit.MILLISECONDS))
.minPeriodBetweenExecs(minPeriodBetweenExecs)
.build();
resizable.addPolicy(policy);
resizable.resize(3);
// Will shrink to only the min sustained in this time window
// (i.e. to 2 within the first $resizeUpStabilizationDelay milliseconds)
Stopwatch stopwatch = Stopwatch.createStarted();
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(3, 1L, 3*10L, 3*20L)); // would shrink to 1
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(3, 20L, 3*10L, 3*20L)); // would shrink to 2
Thread.sleep(resizeDownStabilizationDelay-OVERHEAD_DURATION_MS);
long postSleepTime = stopwatch.elapsed(TimeUnit.MILLISECONDS);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, message(3, 1L, 3*10L, 3*20L)); // would shrink to 1
// Wait for it to reach size 2, and confirm take expected time
// TODO This is time sensitive, and sometimes fails in CI with size=1 if we wait for currentSize==2 (presumably GC kicking in?)
// Therefore do strong assertion of currentSize==2 later, so can write out times if it goes wrong.
Asserts.succeedsEventually(MutableMap.of("period", 1, "timeout", TIMEOUT_MS), new Runnable() {
public void run() {
assertTrue(resizable.getCurrentSize() <= 2, "currentSize="+resizable.getCurrentSize());
}});
assertEquals(resizable.getCurrentSize(), (Integer)2,
stopwatch.elapsed(TimeUnit.MILLISECONDS)+"ms after first emission; "+(stopwatch.elapsed(TimeUnit.MILLISECONDS)-postSleepTime)+"ms after last");
long timeToResizeTo2 = stopwatch.elapsed(TimeUnit.MILLISECONDS);
assertTrue(timeToResizeTo2 >= resizeDownStabilizationDelay-EARLY_RETURN_MS &&
timeToResizeTo2 <= resizeDownStabilizationDelay+OVERHEAD_DURATION_MS,
"Resizing to 2: time="+timeToResizeTo2+"; resizeDownStabilizationDelay="+resizeDownStabilizationDelay);
// Will then shrink to 1 $resizeUpStabilizationDelay milliseconds after that emission
Asserts.succeedsEventually(MutableMap.of("period", 1, "timeout", TIMEOUT_MS),
currentSizeAsserter(resizable, 1));
long timeToResizeTo1 = stopwatch.elapsed(TimeUnit.MILLISECONDS) - postSleepTime;
assertTrue(timeToResizeTo1 >= resizeDownStabilizationDelay-EARLY_RETURN_MS &&
timeToResizeTo1 <= resizeDownStabilizationDelay+OVERHEAD_DURATION_MS,
"Resizing to 1: timeToResizeTo1="+timeToResizeTo1+"; timeToResizeTo2="+timeToResizeTo2+"; resizeDownStabilizationDelay="+resizeDownStabilizationDelay);
}
@Test(groups="Integration")
public void testResizeDownStabilizationDelayResizesAfterDelay() throws Exception {
final long resizeDownStabilizationDelay = 1000L;
Duration minPeriodBetweenExecs = Duration.ZERO;
resizable.removePolicy(policy);
policy = AutoScalerPolicy.builder()
.resizeDownStabilizationDelay(Duration.of(resizeDownStabilizationDelay, TimeUnit.MILLISECONDS))
.minPeriodBetweenExecs(minPeriodBetweenExecs)
.build();
resizable.addPolicy(policy);
resizable.resize(2);
// After suitable delay, grows to desired
final long emitTime = System.currentTimeMillis();
final Map<String, Object> needJust1 = message(2, 1L, 2*10L, 2*20L);
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, needJust1); // would shrink to 1
final AtomicInteger emitCount = new AtomicInteger(0);
Asserts.succeedsEventually(MutableMap.of("timeout", TIMEOUT_MS), new Runnable() {
public void run() {
if (System.currentTimeMillis() - emitTime > (2+emitCount.get())*resizeDownStabilizationDelay) {
//first one may not have been received, in a registration race
resizable.emit(AutoScalerPolicy.DEFAULT_POOL_COLD_SENSOR, needJust1); // would shrink to 1
emitCount.incrementAndGet();
}
assertEquals(resizable.getCurrentSize(), (Integer)1);
}});
long resizeDelay = System.currentTimeMillis() - emitTime;
assertTrue(resizeDelay >= (resizeDownStabilizationDelay-EARLY_RETURN_MS), "resizeDelay="+resizeDelay);
}
static Map<String, Object> message(int currentSize, double currentWorkrate, double lowThreshold, double highThreshold) {
return ImmutableMap.<String,Object>of(
AutoScalerPolicy.POOL_CURRENT_SIZE_KEY, currentSize,
AutoScalerPolicy.POOL_CURRENT_WORKRATE_KEY, currentWorkrate,
AutoScalerPolicy.POOL_LOW_THRESHOLD_KEY, lowThreshold,
AutoScalerPolicy.POOL_HIGH_THRESHOLD_KEY, highThreshold);
}
public static Runnable currentSizeAsserter(final Resizable resizable, final Integer desired) {
return new Runnable() {
public void run() {
assertEquals(resizable.getCurrentSize(), desired);
}
};
}
public static void dumpThreadsEtc() {
ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
ThreadInfo[] threads = threadMXBean.dumpAllThreads(true, true);
for (ThreadInfo thread : threads) {
System.out.println(thread.getThreadName()+" ("+thread.getThreadState()+")");
for (StackTraceElement stackTraceElement : thread.getStackTrace()) {
System.out.println("\t"+stackTraceElement);
}
}
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
MemoryUsage heapMemoryUsage = memoryMXBean.getHeapMemoryUsage();
MemoryUsage nonHeapMemoryUsage = memoryMXBean.getNonHeapMemoryUsage();
System.out.println("Memory:");
System.out.println("\tHeap: used="+heapMemoryUsage.getUsed()+"; max="+heapMemoryUsage.getMax()+"; init="+heapMemoryUsage.getInit()+"; committed="+heapMemoryUsage.getCommitted());
System.out.println("\tNon-heap: used="+nonHeapMemoryUsage.getUsed()+"; max="+nonHeapMemoryUsage.getMax()+"; init="+nonHeapMemoryUsage.getInit()+"; committed="+nonHeapMemoryUsage.getCommitted());
OperatingSystemMXBean operatingSystemMXBean = ManagementFactory.getOperatingSystemMXBean();
System.out.println("OS:");
System.out.println("\tsysLoadAvg="+operatingSystemMXBean.getSystemLoadAverage()+"; availableProcessors="+operatingSystemMXBean.getAvailableProcessors()+"; arch="+operatingSystemMXBean.getArch());
}
}
|
var text = fs.readFileSync(fileName, "utf8");
var file = fs.createWriteStream(fileName, { encoding: "utf8" });
file.write("???");
file.end();
|
if [ -e .env ]
then
IFS='
'
export $(cat .env | grep -v "#" | grep -v '^$' | xargs -0)
IFS=
php -S localhost:3000 -t .
else
echo ".env File was not found. Create a .env file in the root folder of the project following the example in .env.default"
fi
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
current_dir=`dirname "$0"`
current_dir=`cd "$current_dir"; pwd`
root_dir=${current_dir}/../../../../../
workload_config=${root_dir}/conf/workloads/micro/dfsioe.conf
. "${root_dir}/bin/functions/load-bench-config.sh"
enter_bench HadoopDfsioe-write ${workload_config} ${current_dir}
show_bannar start
#path check
rmr-hdfs ${OUTPUT_HDFS} || true
# pre-running
SIZE=`dir_size $INPUT_HDFS`
OPTION="-write -nrFiles ${WT_NUM_OF_FILES} -fileSize ${WT_FILE_SIZE} -bufferSize 4096 -plotInteval 1000 -sampleUnit m -sampleInteval 200 -sumThreshold 0.5 -tputReportTotal -Dtest.build.data=${INPUT_HDFS}"
OLD_HADOOP_OPTS=${HADOOP_OPTS:-}
export HADOOP_OPTS="${HADOOP_OPTS:-} -Dtest.build.data=${INPUT_HDFS} "
MONITOR_PID=`start-monitor`
START_TIME=`timestamp`
#run benchmark
run-hadoop-job ${DATATOOLS} org.apache.hadoop.fs.dfsioe.TestDFSIOEnh \
-Dmapreduce.map.java.opts=\"-Dtest.build.data=${INPUT_HDFS} $MAP_JAVA_OPTS\" \
-Dmapreduce.reduce.java.opts=\"-Dtest.build.data=${INPUT_HDFS} $RED_JAVA_OPTS\" \
${OPTION} -resFile ${WORKLOAD_RESULT_FOLDER}/result_write.txt \
-tputFile ${WORKLOAD_RESULT_FOLDER}/throughput_write.csv
# post-running
END_TIME=`timestamp`
export HADOOP_OPTS="$OLD_HADOOP_OPTS"
stop-monitor $MONITOR_PID
gen_report ${START_TIME} ${END_TIME} ${SIZE}
show_bannar finish
leave_bench
|
[
[2, 3, 9, 7],
[7, 8, 1, 4],
[4, 6, 5, 2],
[1, 5, 3, 8]
];
|
#!/bin/bash
# This is an example script of training and running model ensembles.
# train 5 models with different seeds
CUDA_VISIBLE_DEVICES=3 python train.py --seed 1234 --data_dir /home/scratch/gis/datasets/tacred/data/json/ --vocab_dir /home/scratch/gis/datasets/tacred-relation_data/ --id 01 --info "Position-aware attention model" --optim adagrad --nas_rnn True --nas_mlp False --no-attn --save_dir /home/scratch/gis/saved_models --test_save_dir /home/scratch/gis/tacred_test_performances
CUDA_VISIBLE_DEVICES=3 python train.py --seed 17 --data_dir /home/scratch/gis/datasets/tacred/data/json/ --vocab_dir /home/scratch/gis/datasets/tacred-relation_data/ --id 02 --info "Position-aware attention model" --optim adagrad --nas_rnn True --nas_mlp False --no-attn --save_dir /home/scratch/gis/saved_models --test_save_dir /home/scratch/gis/tacred_test_performances
CUDA_VISIBLE_DEVICES=3 python train.py --seed 1776 --data_dir /home/scratch/gis/datasets/tacred/data/json/ --vocab_dir /home/scratch/gis/datasets/tacred-relation_data/ --id 03 --info "Position-aware attention model" --optim adagrad --nas_rnn True --nas_mlp False --no-attn --save_dir /home/scratch/gis/saved_models --test_save_dir /home/scratch/gis/tacred_test_performances
CUDA_VISIBLE_DEVICES=3 python train.py --seed 2019 --data_dir /home/scratch/gis/datasets/tacred/data/json/ --vocab_dir /home/scratch/gis/datasets/tacred-relation_data/ --id 04 --info "Position-aware attention model" --optim adagrad --nas_rnn True --nas_mlp False --no-attn --save_dir /home/scratch/gis/saved_models --test_save_dir /home/scratch/gis/tacred_test_performances
CUDA_VISIBLE_DEVICES=3 python train.py --seed 420 --data_dir /home/scratch/gis/datasets/tacred/data/json/ --vocab_dir /home/scratch/gis/datasets/tacred-relation_data/ --id 05 --info "Position-aware attention model" --optim adagrad --nas_rnn True --nas_mlp False --no-attn --save_dir /home/scratch/gis/saved_models --test_save_dir /home/scratch/gis/tacred_test_performances
# evaluate on test sets and save prediction files
#CUDA_VISIBLE_DEVICES=3 python eval.py /home/scratch/gis/datasets/tacred-relation_data/saved_models/01 --out /home/scratch/gis/datasets/tacred-relation_data/saved_models/out/test_1.pkl
#CUDA_VISIBLE_DEVICES=3 python eval.py /home/scratch/gis/datasets/tacred-relation_data/saved_models/02 --out /home/scratch/gis/datasets/tacred-relation_data/saved_models/out/test_2.pkl
#CUDA_VISIBLE_DEVICES=3 python eval.py /home/scratch/gis/datasets/tacred-relation_data/saved_models/03 --out /home/scratch/gis/datasets/tacred-relation_data/saved_models/out/test_3.pkl
#CUDA_VISIBLE_DEVICES=3 python eval.py /home/scratch/gis/datasets/tacred-relation_data/saved_models/04 --out /home/scratch/gis/datasets/tacred-relation_data/saved_models/out/test_4.pkl
#CUDA_VISIBLE_DEVICES=3 python eval.py /home/scratch/gis/datasets/tacred-relation_data/saved_models/05 --out /home/scratch/gis/datasets/tacred-relation_data/saved_models/out/test_5.pkl
# run ensemble
ARGS="--data_dir /home/scratch/gis/datasets/tacred/data/json/"
for id in 1 2 3 4 5; do
OUT="/home/scratch/gis/datasets/tacred-relation_data/saved_models/out/test_${id}.pkl"
ARGS="$ARGS $OUT"
done
python ensemble.py --dataset test $ARGS
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SuperSet = void 0;
class SuperSet extends Set {
constructor(prototype) {
super(prototype);
}
squash(...values) {
values.forEach((x) => {
this.add(x);
});
}
get(value) {
const values = this.values();
value -= 1;
while (value-- > 0) {
values.next();
}
return values.next().value;
}
array() {
return Array.from(this);
}
map(func) {
return this.array().map(func);
}
isSubset(set) {
const subset = Array.from(set);
const Set = this.array();
return subset.every((x) => Set.find((y) => y === x));
}
isParent(set) {
const subset = this.array();
const parent = Array.from(set);
return subset.every((x) => parent.find((y) => y === x));
}
filter(data) {
const filter = this.array().filter(data);
return new SuperSet(filter);
}
find(data) {
return this.array().find(data);
}
union(...sets) {
const mainSet = sets.shift();
if (!mainSet)
return;
for (const oset of sets) {
Array.from(oset).forEach((x) => {
mainSet.add(x);
});
}
return mainSet;
}
intersection(...sets) {
const mainSet = sets.shift();
if (!mainSet)
return;
for (const set of sets) {
Array.from(set).forEach((x) => {
if (!mainSet.has(x))
mainSet.delete(x);
else {
}
});
}
return mainSet;
}
difference(setA, setB) {
for (const value of Array.from(setB)) {
setA.delete(value);
}
return setA;
}
equal(set) {
let pass = true;
const thisSet = this.array();
const arrayset = Array.from(set);
if (this.size !== arrayset.length)
pass = false;
for (let i = 0; i < thisSet.length; i++) {
if (thisSet[0] === arrayset[0])
continue;
else {
pass = false;
break;
}
}
return pass;
}
sort(func) {
return new SuperSet(this.array().sort(func));
}
}
exports.SuperSet = SuperSet;
//# sourceMappingURL=superset.js.map
|
#!/bin/sh
################################################################################
# This file is part of the package effrb. It is subject to the license
# terms in the LICENSE.md file found in the top-level directory of
# this distribution and at https://github.com/pjones/effrb. No part of
# the effrb package, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained
# in the LICENSE.md file.
################################################################################
. `dirname $0`/../common.sh
################################################################################
run_irb_replace_nil <<EOF | grep -Ev '^[[:space:]]*from'
h = {};nil
h[:weekdays] = h.fetch(:weekdays, []) << "Monday"
h.fetch(:missing_key)
EOF
|
<gh_stars>100-1000
#ifndef NNET_IMAGE_H_
#define NNET_IMAGE_H_
#include "nnet_common.h"
#include "hls_stream.h"
#include <math.h>
namespace nnet {
struct resize_config {
static const unsigned height = 10;
static const unsigned width = 10;
static const unsigned n_chan = 10;
static const unsigned new_height = 10;
static const unsigned new_width = 10;
};
template<class data_T, typename CONFIG_T>
void resize_nearest(
data_T image[CONFIG_T::height * CONFIG_T::width * CONFIG_T::n_chan],
data_T resized[CONFIG_T::new_height * CONFIG_T::new_width * CONFIG_T::n_chan]
) {
int y_ratio = (int)((CONFIG_T::height << 16) / CONFIG_T::new_height) + 1;
int x_ratio = (int)((CONFIG_T::width << 16) / CONFIG_T::new_width) + 1;
int x2, y2;
#pragma HLS PIPELINE
for (int i = 0; i < CONFIG_T::new_height; i++) {
for (int j = 0; j < CONFIG_T::new_width; j++) {
x2 = ((j * x_ratio) >> 16);
y2 = ((i * y_ratio) >> 16);
for (int k = 0; k < CONFIG_T::n_chan; k++) {
resized[(i * CONFIG_T::new_width * CONFIG_T::n_chan) + j * CONFIG_T::n_chan + k] = image[(y2 * CONFIG_T::width * CONFIG_T::n_chan) + x2 * CONFIG_T::n_chan + k];
}
}
}
}
}
#endif
|
<gh_stars>0
import React from 'react';
import Slider from "react-slick";
import alamin from './img/team/alamin.jpg'
import rasel from './img/team/rasel.jpg'
import akash from './img/team/akash.jpg'
// import redwan from './img/team/redwan.jpg'
export default class Team_slider extends React.Component {
render() {
return (
<section className="our-team-slider-wrapper bg-dot">
<div className="container">
<div className="row justify-content-center text-center">
<div className="col-xl-6">
<div className="section-title">
<h2><span>Our </span>Team</h2>
<p>Meet with passionate and dedicated members of Familiar-IT</p>
</div>
</div>
</div>
<Member />
</div>
</section>
);
}
}
class Member extends React.Component {
render() {
var settings = {
dots: true,
infinite: true,
speed: 500,
arrows: false,
slidesToShow: 3,
slidesToScroll: 2,
responsive: [{
breakpoint: 992,
settings: {
slidesToShow: 2,
slidesToScroll: 1,
}
},
{
breakpoint: 768,
settings: {
slidesToShow: 2,
slidesToScroll: 1
}
},
{
breakpoint: 576,
settings: {
slidesToShow: 1,
slidesToScroll: 1
}
}
]
};
return (
<Slider {...settings} className="row team-slider-active">
<TeamCard name="<NAME>" title="Project Manager" facebook="https://www.facebook.com/alaminislam05" github="https://github.com/alaminAB" linkedin="https://www.linkedin.com/in/familiar-it-5b1a67128/" img={alamin}/>
<TeamCard name="<NAME>" title="Full Stact Developer" facebook="https://www.facebook.com/rslahmed383/" github="https://github.com/rslahmed" linkedin="https://www.linkedin.com/in/rslahmed/" img={rasel}/>
<TeamCard name="<NAME>" title="Developer" facebook="https://www.facebook.com/shanboy.akash" github="https://github.com/r-yan-akash" linkedin="https://www.linkedin.com/mwlite/in/shanboy-jisan-7a1111175" img={akash}/>
</Slider>
);
}
}
function TeamCard(props){
return (
<div className="col-12">
<div className="single-team-slider">
<div className="team-slider-img">
<a href="/"><img src={props.img} className="img-fluid" alt="Alamiln Islam" /></a>
<div className="team-social-icons">
<a target="_blank" rel="noopener noreferrer" href={props.facebook}><i className="fab fa-facebook-f"></i></a>
<a target="_blank" rel="noopener noreferrer" href={props.github}><i className="fab fa-github"></i></a>
<a target="_blank" rel="noopener noreferrer" href={props.linkedin}><i className="fab fa-linkedin-in"></i></a>
</div>
</div>
<a href="/">
<h4>{props.name}</h4>
</a>
<p>{props.title}</p>
</div>
</div>
)
}
|
import React, {Component} from "react";
import Router from "next/router";
import Link from "next/link";
import axios, {AxiosResponse} from 'axios';
import Layout from "../components/layout/Layout";
import Form from "../components/Form/Form";
import {providerInterface} from "../components/Provider/Provider";
class Payment extends Component {
state = {
provider: {
id: null,
name: ''
}
};
componentDidMount(): void {
const router = Router;
const protocol = window.location.protocol;
const url = protocol + '//' + window.location.hostname + ':' + window.location.port;
axios.get(`${url}/api/provider?id=${router.query.provider}`)
.then((response: AxiosResponse) => {
response.data.map((prov: providerInterface) => {
this.setState({provider: prov})
});
}
);
}
render(): React.ReactElement<any, string | React.JSXElementConstructor<any>> | string | number | {} | React.ReactNodeArray | React.ReactPortal | boolean | null | undefined {
return (
<Layout>
<section className="row">
<div className="col-md-8 offset-md-2">
<div className="card">
<div className="card-header text-center">
<h3>Please take your Pay</h3>
</div>
<div className="card-body">
<p className="lead">
<strong>Selected Provider: </strong>
<span>{this.state.provider.name}</span>
{this.state.provider.name && <span className="float-right">
<Link href={'/'}>
<a>Change provider?</a>
</Link>
</span>}
</p>
<Form/>
</div>
</div>
</div>
</section>
</Layout>
);
}
}
export default Payment;
|
package pubbot.fun;
import com.jagrosh.jdautilities.command.Command;
import com.jagrosh.jdautilities.command.CommandEvent;
import com.jagrosh.jdautilities.commons.waiter.EventWaiter;
import net.dv8tion.jda.core.EmbedBuilder;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent;
import org.json.JSONArray;
import org.json.JSONObject;
import pubbot.utils.Messages;
import java.awt.*;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.concurrent.TimeUnit;
public class DrinkCommand extends Command {
private EventWaiter waiter;
public DrinkCommand(EventWaiter waiter) {
this.name = "drink";
this.aliases = new String[]{"recipe"};
this.category = new Category("Fun");
this.waiter = waiter;
this.cooldown = 5;
this.cooldownScope = CooldownScope.USER;
}
protected void execute(CommandEvent event) {
if(event.getArgs().trim().length() < 1){
Messages.error(event, "If you want a drink, then tell me what drink you want smfh.");
return;
}
String drink = event.getArgs().trim();
try {
URL url = new URL("https://www.thecocktaildb.com/api/json/v2/8673533/search.php?s="+ URLEncoder.encode(drink, "UTF-8"));
HttpURLConnection urlConn = (HttpURLConnection) url.openConnection();
urlConn.setRequestMethod("GET");
urlConn.setDoOutput(true);
urlConn.connect();
InputStreamReader read = new InputStreamReader(urlConn.getInputStream());
BufferedReader each = new BufferedReader(read);
String line;
while ((line = each.readLine()) != null) {
JSONObject jsonObject = new JSONObject(line);
if(jsonObject.isNull("drinks")){
Messages.error(event, "No drinks found with the name **"+drink+"**.");
return;
}
else{
JSONArray drinks = jsonObject.getJSONArray("drinks");
if(drinks.length() > 1){
StringBuilder choices = new StringBuilder();
for(int i = 0; i < drinks.length(); i++){
JSONObject indvDrinks = drinks.getJSONObject(i);
choices.append(i+1).append(". ").append(indvDrinks.get("strDrink")).append("\n");
}
EmbedBuilder em = new EmbedBuilder();
em.setTitle("Pick which type of **"+drink.toLowerCase()+"** you want.");
em.setColor(Color.getHSBColor(294, 71,89));
em.setDescription(choices.toString());
em.setFooter("Reply with the number by the drink or `cancel`.",null);
Message m = event.getTextChannel().sendMessage(em.build()).complete();
waitForChoice(event, drinks, m);
}
else if(drinks.length() == 1){
makeDrink(event, drinks, 0);
}
else{
Messages.error(event, "Not sure what happened but things didn't workout. Try again.");
return;
}
}
}
read.close();
each.close();
urlConn.disconnect();
}catch (Exception ex){
Messages.error(event, "Not sure what happened but things didn't workout. Try again and notify the bitch who made me.");
ex.printStackTrace();
}
}
private void waitForChoice(CommandEvent event, JSONArray drinks, Message m) {
waiter.waitForEvent(GuildMessageReceivedEvent.class,
e -> e.getAuthor().equals(event.getAuthor()) && e.getChannel().equals(event.getChannel()) && !e.getAuthor().isBot(),
e -> {
if(m != null){
m.delete().queue();
}
if(e.getMessage().getContentRaw().trim().equalsIgnoreCase("cancel"))
{
Messages.reply(event, "Smh, pathetic, stop being a wuss.");
return;
}
else {
String choice = e.getMessage().getContentRaw().trim();
int choiceNum = 0;
try{
choiceNum = Integer.parseInt(choice);
}catch (NumberFormatException ex){
Messages.error(event, "Wtf, pick a number not anything else, drink cancelled.");
return;
}
if(choiceNum < 1 || choiceNum > drinks.length()) {Messages.reply(event, "God damn it, can't you see that's not in the range."); return;}
makeDrink(event, drinks, choiceNum-1);
}
},
2, TimeUnit.MINUTES, () -> Messages.errorTimed(event, "Smh slowpoke, "+event.getAuthor().getAsMention()+". I'm not serving you a drink, you took too long.", 10, TimeUnit.SECONDS));
}
private void makeDrink(CommandEvent event, JSONArray drinks, int choiceNum) {
EmbedBuilder em = new EmbedBuilder();
em.setColor(Color.getHSBColor(294, 71,89));
JSONObject drink = drinks.getJSONObject(choiceNum);
em.setTitle(drink.getString("strDrink"));
em.setDescription(drink.getString("strCategory"));
// Drink ID
em.addField("Drink ID", drink.getString("idDrink"), true);
// Alcoholic or not
em.addField("Alcoholic?", drink.getString("strAlcoholic"), true);
// Alternate Names
if(!drink.isNull("strDrinkAlternate")) em.addField("Alternate Names", drink.getString("strDrinkAlternate"), true);
// Different languages
StringBuilder diffLang = new StringBuilder();
if(!drink.isNull("strDrinkES")) diffLang.append("Spanish: ").append(drink.get("strDrinkES")).append("\n");
if(!drink.isNull("strDrinkDE")) diffLang.append("German: ").append(drink.get("strDrinkDE")).append("\n");
if(!drink.isNull("strDrinkFR")) diffLang.append("French: ").append(drink.get("strDrinkFR")).append("\n");
if(!drink.isNull("strDrinkZH-HANS")) diffLang.append("Chinese Simplified: ").append(drink.get("strDrinkZH-HANS")).append("\n");
if(!drink.isNull("strDrinkZH-HANT")) diffLang.append("Chinese Traditional: ").append(drink.get("strDrinkZH-HANT")).append("\n");
if(!diffLang.toString().trim().isEmpty()) em.addField("Other Languages", diffLang.toString(), true);
// Glass Type
if(!drink.isNull("strGlass")) em.addField("Glass", drink.getString("strGlass"), true);
// Measures
StringBuilder measures = new StringBuilder();
for(int i = 1; i <= 15; i++){
String givenMeasure = drink.isNull("strMeasure"+i) ? "" : drink.getString("strMeasure"+i);
if(!givenMeasure.trim().isEmpty()) measures.append(i).append(". ").append(givenMeasure).append("\n");
}
if(!measures.toString().isEmpty()) em.addField("Measures", measures.toString(), true);
// Ingredients
StringBuilder ingredients = new StringBuilder();
for(int i = 1; i <= 15; i++){
String givenIngredient = drink.isNull("strIngredient"+i) ? "" : drink.getString("strIngredient"+i);
if(!givenIngredient.trim().isEmpty()) ingredients.append(i).append(". ").append(givenIngredient).append("\n");
}
if(!ingredients.toString().isEmpty()) em.addField("Ingredients", ingredients.toString(), true);
// Instructions
if(!drink.isNull("strInstructions")) em.addField("Instruction", drink.getString("strInstructions"), false);
// Thumbnail
if(!drink.isNull("strDrinkThumb")) em.setThumbnail(drink.getString("strDrinkThumb"));
// Modified
if(!drink.isNull("dateModified")) em.setFooter("Date Modified: " + drink.getString("dateModified"), null);
else em.setFooter("Date Modified: -", null);
event.getTextChannel().sendMessage(em.build()).queue();
}
}
|
<reponame>jcgay/homebrew
require 'formula'
class RpmDownloadStrategy < CurlDownloadStrategy
def initialize name, resource
super
@tarball_name = "#{name}-#{resource.version}.tar.gz"
end
def stage
safe_system "rpm2cpio.pl <#{tarball_path} | cpio -vi #{@tarball_name}"
safe_system "/usr/bin/tar -xzf #{@tarball_name} && rm #{@tarball_name}"
chdir
end
def ext
".src.rpm"
end
end
class Rpm < Formula
homepage 'http://www.rpm5.org/'
url 'http://rpm5.org/files/rpm/rpm-5.4/rpm-5.4.11-0.20130708.src.rpm',
:using => RpmDownloadStrategy
version '5.4.11'
sha1 'a40328cf49f43d33746c503a390e3955f5bd3680'
depends_on 'berkeley-db'
depends_on 'libmagic'
depends_on 'popt'
depends_on 'beecrypt'
depends_on 'libtasn1'
depends_on 'neon'
depends_on 'gettext'
depends_on 'xz'
depends_on 'ossp-uuid'
depends_on 'pcre'
depends_on 'rpm2cpio' => :build
def patches
{ :p0 => DATA } if MacOS.version >= :mountain_lion
end
def install
args = %W[
--prefix=#{prefix}
--localstatedir=#{var}
--with-path-cfg=#{etc}/rpm
--with-extra-path-macros=#{lib}/rpm/macros.*
--disable-openmp
--disable-nls
--disable-dependency-tracking
--with-db=external
--with-file=external
--with-popt=external
--with-beecrypt=external
--with-libtasn1=external
--with-neon=external
--with-uuid=external
--with-pcre=external
--with-lua=internal
--with-syck=internal
--without-apidocs
varprefix=#{var}
]
system "./configure", *args
system "make"
system "make install"
end
end
__END__
diff -u -rrpm-5_4_11-release -rrpm-5_4
--- system.h 26 Jul 2012 12:56:08 -0000 2.129.2.5
+++ system.h 9 Aug 2013 10:30:22 -0000 2.129.2.8
@@ -323,7 +323,13 @@
#endif
#if defined(HAVE_GRP_H)
+#define uuid_t unistd_uuid_t /* XXX Mac OS X dares to be different. */
+#define uuid_create unistd_uuid_create
+#define uuid_compare unistd_uuid_compare
#include <grp.h>
+#undef unistd_uuid_t /* XXX Mac OS X dares to be different. */
+#undef unistd_uuid_create
+#undef unistd_uuid_compare
#endif
#if defined(HAVE_LIMITS_H)
|
#!/bin/bash
set -e
ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
[[ "$1" != "" && "$1" != "-fix" ]] && echo "The only supported argument is -fix" && exit
FIX=$1
# We are currently standardized on using LLVM/Clang10 for this script.
# Note that this is totally independent of the version of LLVM that you
# are using to build Halide itself. If you don't have LLVM10 installed,
# you can usually install what you need easily via:
#
# sudo apt-get install llvm-10 clang-10 libclang-10-dev clang-tidy-10
# export CLANG_TIDY_LLVM_INSTALL_DIR=/usr/lib/llvm-10
[ -z "$CLANG_TIDY_LLVM_INSTALL_DIR" ] && echo "CLANG_TIDY_LLVM_INSTALL_DIR must point to an LLVM installation dir for this script." && exit
echo CLANG_TIDY_LLVM_INSTALL_DIR = ${CLANG_TIDY_LLVM_INSTALL_DIR}
# Use a temp folder for the CMake stuff here, so it's fresh & correct every time
CLANG_TIDY_BUILD_DIR=`mktemp -d`
echo CLANG_TIDY_BUILD_DIR = ${CLANG_TIDY_BUILD_DIR}
echo Building compile_commands.json...
cmake -DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-DLLVM_DIR=${CLANG_TIDY_LLVM_INSTALL_DIR}/lib/cmake/llvm \
-S ${ROOT_DIR} \
-B ${CLANG_TIDY_BUILD_DIR} \
> /dev/null
[ -a ${CLANG_TIDY_BUILD_DIR}/compile_commands.json ]
# We must populate the includes directory to check things outside of src/
cd ${CLANG_TIDY_BUILD_DIR} && make HalideIncludes
RUN_CLANG_TIDY=${CLANG_TIDY_LLVM_INSTALL_DIR}/share/clang/run-clang-tidy.py
# We deliberately skip apps/ and test/ for now, as the compile commands won't include
# generated headers files from Generators.
#
# Skip DefaultCostModel.cpp as it relies on cost_model.h.
# Skip GenGen.cpp and RunGenMain.cpp as they bring clang-tidy to its knees,
# for reasons that aren't entirely clear yet.
CLANG_TIDY_TARGETS=$(find \
"${ROOT_DIR}/src" \
"${ROOT_DIR}/python_bindings" \
"${ROOT_DIR}/tools" \
"${ROOT_DIR}/util" \
\( -name *.cpp -o -name *.h -o -name *.c \) -and -not -wholename "*/.*" \
! -name DefaultCostModel.cpp \
! -name GenGen.cpp \
! -name RunGenMain.cpp)
# clang-tidy doesn't have a sane way to exclude third-party headers (e.g. pybind11),
# so we will instead build an include filter
CLANG_TIDY_HEADER_FILTER=".*/src/.*|.*/python_bindings/.*|.*/tools/.*|.*/util/.*"
${RUN_CLANG_TIDY} \
${FIX} \
-header-filter="${CLANG_TIDY_HEADER_FILTER}" \
-quiet \
-p ${CLANG_TIDY_BUILD_DIR} \
-clang-tidy-binary ${CLANG_TIDY_LLVM_INSTALL_DIR}/bin/clang-tidy \
-clang-apply-replacements-binary ${CLANG_TIDY_LLVM_INSTALL_DIR}/bin/clang-apply-replacements \
${CLANG_TIDY_TARGETS} \
2>&1 | grep -v "warnings generated" | sed "s|.*/||"
RESULT=${PIPESTATUS[0]}
echo run-clang-tidy finished with status ${RESULT}
rm -rf ${CLANG_TIDY_BUILD_DIR}
exit $RESULT
|
#!/bin/bash
set -e
echo "****** INSTALLATION START ******"
script_path=$(cd `dirname $0`; pwd)
#----------------------------------------
bash $script_path/../../SungemSDK/installer/macOS/install.sh >/dev/null
#----------------------------------------
if [ "$1" = "tuna" ] ; then
echo "Using TUNA mirror"
INDEX_URL="-i https://pypi.tuna.tsinghua.edu.cn/simple"
fi
pip3 install $INDEX_URL -r $script_path/../requirements_pip.txt
#----------------------------------------
echo "****** INSTALLATION COMPLETE ******"
|
<gh_stars>0
package helper
import (
"context"
"time"
)
// suppressedContext suppresses cancellation or expiration of the context.
type suppressedContext struct{ context.Context }
func (suppressedContext) Deadline() (deadline time.Time, ok bool) { return time.Time{}, false }
func (suppressedContext) Done() <-chan struct{} { return nil }
func (suppressedContext) Err() error { return nil }
// SuppressCancellation returns a context that suppresses cancellation or expiration of the parent context.
func SuppressCancellation(ctx context.Context) context.Context { return suppressedContext{ctx} }
|
#!/bin/bash
##############################################################################
# Tim H 2020
# Description: creates a compressed file containing the logs neccessary
# to troubleshoot dynamic connections to cloud providers like AWS and Azure
# It also sets the ownership and permissions so it is easier to SCP off to
# another system.
#
# References:
# InsightVM log file explanations: https://docs.rapid7.com/insightvm/troubleshooting/#locating-each-log-file-and-understanding-its-purpose
##############################################################################
ZIP_PATH="$HOME/rapid7-insightvm-console-cloud_debug_logs-$(date +%F).tar.gz"
sudo tar -czf "$ZIP_PATH" /opt/rapid7/nexpose/nsc/logs/eso.log /opt/rapid7/nexpose/nsc/logs/nsc.log /opt/rapid7/nexpose/nsc/logs/nse.log
sudo chmod +r "$ZIP_PATH"
sudo chown "$USER" "$ZIP_PATH"
ls -lah "$ZIP_PATH"
echo "ZIP file is located at: $ZIP_PATH"
|
/*
* Copyright 2015-2021 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nexttypes.datatypes;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import com.nexttypes.system.KeyWords;
@JacksonXmlRootElement(localName = KeyWords.OBJECTS)
public class Objects extends Result {
protected NXObject[] items;
public Objects() {
this(new NXObject[]{}, 0L, null, null, null, null, null);
}
public Objects(NXObject[] items, Long count, Long offset, Long limit, Long minLimit, Long maxLimit,
Long limitIncrement) {
super(count, offset, limit, minLimit, maxLimit, limitIncrement);
this.items = items;
}
@JacksonXmlElementWrapper(localName = KeyWords.ITEMS)
@JacksonXmlProperty(localName = KeyWords.OBJECT)
@JsonProperty(KeyWords.ITEMS)
public NXObject[] getItems() {
return items;
}
}
|
<filename>src/rules-configurations/eslint/brace-style.d.ts<gh_stars>0
import { RuleConfiguration } from '../../../support/Rule'
type Options = (("1tbs" | "stroustrup" | "allman") | {
allowSingleLine?: boolean
})[]
type Configuration = RuleConfiguration<'brace-style', 'eslint', Options>
export default Configuration
|
<filename>Test/src/test-three.js/ConfigPanel.js
import DockingPanel from 'DockingPanel'
export default class ConfigPanel extends DockingPanel {
/////////////////////////////////////////////////////////////
//
//
/////////////////////////////////////////////////////////////
constructor (domContainer) {
super(domContainer, 'Configuration', {
closable: true
})
$(this.container).addClass('particle')
$(this.container).addClass('config')
}
/////////////////////////////////////////////////////////////
//
//
/////////////////////////////////////////////////////////////
htmlContent (id) {
return `
<div class="container">
</div>`
}
}
|
/*****************************************************************************
*
* Copyright (c) 2000 - 2010, Lawrence Livermore National Security, LLC
* Produced at the Lawrence Livermore National Laboratory
* LLNL-CODE-400124
* All rights reserved.
*
* This file is part of VisIt. For details, see https://visit.llnl.gov/. The
* full copyright notice is contained in the file COPYRIGHT located at the root
* of the VisIt distribution or at http://www.llnl.gov/visit/copyright.html.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the disclaimer below.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the disclaimer (as noted below) in the
* documentation and/or other materials provided with the distribution.
* - Neither the name of the LLNS/LLNL nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL LAWRENCE LIVERMORE NATIONAL SECURITY,
* LLC, THE U.S. DEPARTMENT OF ENERGY OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
*****************************************************************************/
#ifndef PICKATTRIBUTES_H
#define PICKATTRIBUTES_H
#include <state_exports.h>
#include <string>
#include <AttributeSubject.h>
class PickVarInfo;
#include <visitstream.h>
// ****************************************************************************
// Class: PickAttributes
//
// Purpose:
// This class contains attributes used for pick.
//
// Notes: Autogenerated by xml2atts.
//
// Programmer: xml2atts
// Creation: omitted
//
// Modifications:
//
// ****************************************************************************
class STATE_API PickAttributes : public AttributeSubject
{
public:
enum PickType
{
Zone,
Node,
CurveZone,
CurveNode,
DomainZone,
DomainNode
};
enum CoordinateType
{
XY,
RZ,
ZR
};
PickAttributes();
PickAttributes(const PickAttributes &obj);
virtual ~PickAttributes();
virtual PickAttributes& operator = (const PickAttributes &obj);
virtual bool operator == (const PickAttributes &obj) const;
virtual bool operator != (const PickAttributes &obj) const;
virtual const std::string TypeName() const;
virtual bool CopyAttributes(const AttributeGroup *);
virtual AttributeSubject *CreateCompatible(const std::string &) const;
virtual AttributeSubject *NewInstance(bool) const;
// Property selection methods
virtual void SelectAll();
void SelectVariables();
void SelectPickLetter();
void SelectIncidentElements();
void SelectDatabaseName();
void SelectActiveVariable();
void SelectPickPoint();
void SelectCellPoint();
void SelectNodePoint();
void SelectPlotBounds();
void SelectRayPoint1();
void SelectRayPoint2();
void SelectMeshInfo();
void SelectRealIncidentElements();
void SelectPnodeCoords();
void SelectDnodeCoords();
void SelectBnodeCoords();
void SelectDzoneCoords();
void SelectBzoneCoords();
void SelectVarInfo();
void SelectInvalidVars();
void SelectErrorMessage();
void SelectBlockPieceName();
void SelectGroupPieceName();
void SelectGhosts();
void SelectGlobalIncidentElements();
void SelectSubsetName();
void SelectFloatFormat();
// Property setting methods
void SetVariables(const stringVector &variables_);
void SetDisplayIncidentElements(bool displayIncidentElements_);
void SetShowNodeId(bool showNodeId_);
void SetShowNodeDomainLogicalCoords(bool showNodeDomainLogicalCoords_);
void SetShowNodeBlockLogicalCoords(bool showNodeBlockLogicalCoords_);
void SetShowNodePhysicalCoords(bool showNodePhysicalCoords_);
void SetShowZoneId(bool showZoneId_);
void SetShowZoneDomainLogicalCoords(bool showZoneDomainLogicalCoords_);
void SetShowZoneBlockLogicalCoords(bool showZoneBlockLogicalCoords_);
void SetClearWindow(bool clearWindow_);
void SetPickLetter(const std::string &pickLetter_);
void SetFulfilled(bool fulfilled_);
void SetPickType(PickType pickType_);
void SetDomain(int domain_);
void SetElementNumber(int elementNumber_);
void SetIncidentElements(const intVector &incidentElements_);
void SetTimeStep(int timeStep_);
void SetDimension(int dimension_);
void SetDatabaseName(const std::string &databaseName_);
void SetActiveVariable(const std::string &activeVariable_);
void SetPickPoint(const double *pickPoint_);
void SetCellPoint(const double *cellPoint_);
void SetNodePoint(const double *nodePoint_);
void SetPlotBounds(const double *plotBounds_);
void SetRayPoint1(const double *rayPoint1_);
void SetRayPoint2(const double *rayPoint2_);
void SetMeshInfo(const std::string &meshInfo_);
void SetRealElementNumber(int realElementNumber_);
void SetRealIncidentElements(const intVector &realIncidentElements_);
void SetPnodeCoords(const stringVector &pnodeCoords_);
void SetDnodeCoords(const stringVector &dnodeCoords_);
void SetBnodeCoords(const stringVector &bnodeCoords_);
void SetDzoneCoords(const stringVector &dzoneCoords_);
void SetBzoneCoords(const stringVector &bzoneCoords_);
void SetNeedTransformMessage(bool needTransformMessage_);
void SetInvalidVars(const stringVector &invalidVars_);
void SetDoTimeCurve(bool doTimeCurve_);
void SetErrorMessage(const std::string &errorMessage_);
void SetError(bool error_);
void SetMatSelected(bool matSelected_);
void SetNeedActualCoords(bool needActualCoords_);
void SetConciseOutput(bool conciseOutput_);
void SetShowTimeStep(bool showTimeStep_);
void SetShowMeshName(bool showMeshName_);
void SetBlockPieceName(const std::string &blockPieceName_);
void SetGroupPieceName(const std::string &groupPieceName_);
void SetGhosts(const intVector &ghosts_);
void SetIncludeGhosts(bool includeGhosts_);
void SetElementIsGhost(bool elementIsGhost_);
void SetRequiresGlyphPick(bool requiresGlyphPick_);
void SetLocationSuccessful(bool locationSuccessful_);
void SetDisplayGlobalIds(bool displayGlobalIds_);
void SetGlobalElement(int globalElement_);
void SetGlobalIncidentElements(const intVector &globalIncidentElements_);
void SetElementIsGlobal(bool elementIsGlobal_);
void SetDisplayPickLetter(bool displayPickLetter_);
void SetReusePickLetter(bool reusePickLetter_);
void SetGhostType(int ghostType_);
void SetHasMixedGhostTypes(int hasMixedGhostTypes_);
void SetLinesData(bool linesData_);
void SetInputTopoDim(int inputTopoDim_);
void SetMeshCoordType(CoordinateType meshCoordType_);
void SetCreateSpreadsheet(bool createSpreadsheet_);
void SetSubsetName(const std::string &subsetName_);
void SetFloatFormat(const std::string &floatFormat_);
void SetTimePreserveCoord(bool timePreserveCoord_);
// Property getting methods
const stringVector &GetVariables() const;
stringVector &GetVariables();
bool GetDisplayIncidentElements() const;
bool GetShowNodeId() const;
bool GetShowNodeDomainLogicalCoords() const;
bool GetShowNodeBlockLogicalCoords() const;
bool GetShowNodePhysicalCoords() const;
bool GetShowZoneId() const;
bool GetShowZoneDomainLogicalCoords() const;
bool GetShowZoneBlockLogicalCoords() const;
bool GetClearWindow() const;
const std::string &GetPickLetter() const;
std::string &GetPickLetter();
bool GetFulfilled() const;
PickType GetPickType() const;
int GetDomain() const;
int GetElementNumber() const;
const intVector &GetIncidentElements() const;
intVector &GetIncidentElements();
int GetTimeStep() const;
int GetDimension() const;
const std::string &GetDatabaseName() const;
std::string &GetDatabaseName();
const std::string &GetActiveVariable() const;
std::string &GetActiveVariable();
const double *GetPickPoint() const;
double *GetPickPoint();
const double *GetCellPoint() const;
double *GetCellPoint();
const double *GetNodePoint() const;
double *GetNodePoint();
const double *GetPlotBounds() const;
double *GetPlotBounds();
const double *GetRayPoint1() const;
double *GetRayPoint1();
const double *GetRayPoint2() const;
double *GetRayPoint2();
const std::string &GetMeshInfo() const;
std::string &GetMeshInfo();
int GetRealElementNumber() const;
const intVector &GetRealIncidentElements() const;
intVector &GetRealIncidentElements();
const stringVector &GetPnodeCoords() const;
stringVector &GetPnodeCoords();
const stringVector &GetDnodeCoords() const;
stringVector &GetDnodeCoords();
const stringVector &GetBnodeCoords() const;
stringVector &GetBnodeCoords();
const stringVector &GetDzoneCoords() const;
stringVector &GetDzoneCoords();
const stringVector &GetBzoneCoords() const;
stringVector &GetBzoneCoords();
bool GetNeedTransformMessage() const;
const AttributeGroupVector &GetVarInfo() const;
AttributeGroupVector &GetVarInfo();
const stringVector &GetInvalidVars() const;
stringVector &GetInvalidVars();
bool GetDoTimeCurve() const;
const std::string &GetErrorMessage() const;
std::string &GetErrorMessage();
bool GetError() const;
bool GetMatSelected() const;
bool GetNeedActualCoords() const;
bool GetConciseOutput() const;
bool GetShowTimeStep() const;
bool GetShowMeshName() const;
const std::string &GetBlockPieceName() const;
std::string &GetBlockPieceName();
const std::string &GetGroupPieceName() const;
std::string &GetGroupPieceName();
const intVector &GetGhosts() const;
intVector &GetGhosts();
bool GetIncludeGhosts() const;
bool GetElementIsGhost() const;
bool GetRequiresGlyphPick() const;
bool GetLocationSuccessful() const;
bool GetDisplayGlobalIds() const;
int GetGlobalElement() const;
const intVector &GetGlobalIncidentElements() const;
intVector &GetGlobalIncidentElements();
bool GetElementIsGlobal() const;
bool GetDisplayPickLetter() const;
bool GetReusePickLetter() const;
int GetGhostType() const;
int GetHasMixedGhostTypes() const;
bool GetLinesData() const;
int GetInputTopoDim() const;
CoordinateType GetMeshCoordType() const;
bool GetCreateSpreadsheet() const;
const std::string &GetSubsetName() const;
std::string &GetSubsetName();
const std::string &GetFloatFormat() const;
std::string &GetFloatFormat();
bool GetTimePreserveCoord() const;
// Persistence methods
virtual bool CreateNode(DataNode *node, bool completeSave, bool forceAdd);
virtual void SetFromNode(DataNode *node);
// Attributegroup convenience methods
void AddVarInfo(const PickVarInfo &);
void ClearVarInfos();
void RemoveVarInfo(int i);
int GetNumVarInfos() const;
PickVarInfo &GetVarInfo(int i);
const PickVarInfo &GetVarInfo(int i) const;
PickVarInfo &operator [] (int i);
const PickVarInfo &operator [] (int i) const;
// Enum conversion functions
static std::string PickType_ToString(PickType);
static bool PickType_FromString(const std::string &, PickType &);
protected:
static std::string PickType_ToString(int);
public:
static std::string CoordinateType_ToString(CoordinateType);
static bool CoordinateType_FromString(const std::string &, CoordinateType &);
protected:
static std::string CoordinateType_ToString(int);
public:
// Keyframing methods
virtual std::string GetFieldName(int index) const;
virtual AttributeGroup::FieldType GetFieldType(int index) const;
virtual std::string GetFieldTypeName(int index) const;
virtual bool FieldsEqual(int index, const AttributeGroup *rhs) const;
// User-defined methods
void PrintSelf(ostream &os);
void CreateOutputString(std::string &os, bool withLetter = true);
void PrepareForNewPick();
void CreateConciseOutputString(std::string &os, bool withLetter = true);
void SetRayPoint1(const doubleVector &);
void SetRayPoint2(const doubleVector &);
// IDs that can be used to identify fields in case statements
enum {
ID_variables = 0,
ID_displayIncidentElements,
ID_showNodeId,
ID_showNodeDomainLogicalCoords,
ID_showNodeBlockLogicalCoords,
ID_showNodePhysicalCoords,
ID_showZoneId,
ID_showZoneDomainLogicalCoords,
ID_showZoneBlockLogicalCoords,
ID_clearWindow,
ID_pickLetter,
ID_fulfilled,
ID_pickType,
ID_domain,
ID_elementNumber,
ID_incidentElements,
ID_timeStep,
ID_dimension,
ID_databaseName,
ID_activeVariable,
ID_pickPoint,
ID_cellPoint,
ID_nodePoint,
ID_plotBounds,
ID_rayPoint1,
ID_rayPoint2,
ID_meshInfo,
ID_realElementNumber,
ID_realIncidentElements,
ID_pnodeCoords,
ID_dnodeCoords,
ID_bnodeCoords,
ID_dzoneCoords,
ID_bzoneCoords,
ID_needTransformMessage,
ID_varInfo,
ID_invalidVars,
ID_doTimeCurve,
ID_errorMessage,
ID_error,
ID_matSelected,
ID_needActualCoords,
ID_conciseOutput,
ID_showTimeStep,
ID_showMeshName,
ID_blockPieceName,
ID_groupPieceName,
ID_ghosts,
ID_includeGhosts,
ID_elementIsGhost,
ID_requiresGlyphPick,
ID_locationSuccessful,
ID_displayGlobalIds,
ID_globalElement,
ID_globalIncidentElements,
ID_elementIsGlobal,
ID_displayPickLetter,
ID_reusePickLetter,
ID_ghostType,
ID_hasMixedGhostTypes,
ID_linesData,
ID_inputTopoDim,
ID_meshCoordType,
ID_createSpreadsheet,
ID_subsetName,
ID_floatFormat,
ID_timePreserveCoord
};
protected:
AttributeGroup *CreateSubAttributeGroup(int index);
private:
stringVector variables;
bool displayIncidentElements;
bool showNodeId;
bool showNodeDomainLogicalCoords;
bool showNodeBlockLogicalCoords;
bool showNodePhysicalCoords;
bool showZoneId;
bool showZoneDomainLogicalCoords;
bool showZoneBlockLogicalCoords;
bool clearWindow;
std::string pickLetter;
bool fulfilled;
int pickType;
int domain;
int elementNumber;
intVector incidentElements;
int timeStep;
int dimension;
std::string databaseName;
std::string activeVariable;
double pickPoint[3];
double cellPoint[3];
double nodePoint[3];
double plotBounds[6];
double rayPoint1[3];
double rayPoint2[3];
std::string meshInfo;
int realElementNumber;
intVector realIncidentElements;
stringVector pnodeCoords;
stringVector dnodeCoords;
stringVector bnodeCoords;
stringVector dzoneCoords;
stringVector bzoneCoords;
bool needTransformMessage;
AttributeGroupVector varInfo;
stringVector invalidVars;
bool doTimeCurve;
std::string errorMessage;
bool error;
bool matSelected;
bool needActualCoords;
bool conciseOutput;
bool showTimeStep;
bool showMeshName;
std::string blockPieceName;
std::string groupPieceName;
intVector ghosts;
bool includeGhosts;
bool elementIsGhost;
bool requiresGlyphPick;
bool locationSuccessful;
bool displayGlobalIds;
int globalElement;
intVector globalIncidentElements;
bool elementIsGlobal;
bool displayPickLetter;
bool reusePickLetter;
int ghostType;
int hasMixedGhostTypes;
bool linesData;
int inputTopoDim;
int meshCoordType;
bool createSpreadsheet;
std::string subsetName;
std::string floatFormat;
bool timePreserveCoord;
// Static class format string for type map.
static const char *TypeMapFormatString;
};
#endif
|
if [ "$1" = "lein" ]; then
exportVariable JAVA_CMD "$JAVA_HOME/bin/java"
exportVariable LEIN_JAVA_CMD "$JAVA_HOME/bin/java"
if [ -n "$JENV_OPTIONS" ]; then
if [ -z "$LEIN_JVM_OPTS" ]; then
exportVariable LEIN_JVM_OPTS $JENV_OPTIONS
exportVariable JVM_OPTS $JENV_OPTIONS
else
echo "LEIN_JVM_OPTS is set and not overridden by jenv"
fi
unset JENV_OPTIONS
fi
fi
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2016 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cmdline;
import static org.sosy_lab.cpachecker.cmdline.CmdLineArguments.putIfNotExistent;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.sosy_lab.cpachecker.cmdline.CmdLineArguments.InvalidCmdlineArgumentException;
abstract class CmdLineArgument implements Comparable<CmdLineArgument> {
private final ImmutableSet<String> names;
private String description = ""; // changed later, if needed
CmdLineArgument(String... pNames) {
names = ImmutableSet.copyOf(pNames);
}
CmdLineArgument withDescription(String pDescription) {
description = pDescription;
return this;
}
@Override
public int compareTo(CmdLineArgument other) {
return names.toString().compareTo(other.names.toString());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
return o instanceof CmdLineArgument && names.equals(((CmdLineArgument) o).names);
}
@Override
public int hashCode() {
return names.hashCode();
}
@Override
public String toString() {
String s = Joiner.on("/").join(names);
if (description.isEmpty()) {
return s;
} else {
// we rjust the description at column 20.
return String.format("%1$-20s %2$s", s, description);
}
}
boolean apply(
Map<String, String> properties,
String currentArg,
Iterator<String> argsIt)
throws InvalidCmdlineArgumentException {
if (names.contains(currentArg)) {
apply0(properties, currentArg, argsIt);
return true;
}
return false;
}
abstract void apply0(
Map<String, String> properties,
String currentArg,
Iterator<String> argsIt)
throws InvalidCmdlineArgumentException;
/** The arg is a short replacement for an option with 'one' value given as next argument. */
static class CmdLineArgument1 extends CmdLineArgument {
private final String option;
CmdLineArgument1(String pName) {
super(pName);
option = "";
}
CmdLineArgument1(String pName, String pOption) {
super(pName);
option = pOption;
}
CmdLineArgument1(String pName1, String pName2, String pOption) {
super(pName1, pName2);
option = pOption;
}
String getOption() {
return option;
}
@Override
final void apply0(
Map<String, String> properties,
String currentArg,
Iterator<String> args)
throws InvalidCmdlineArgumentException {
if (args.hasNext()) {
handleArg(properties, args.next());
} else {
throw new InvalidCmdlineArgumentException(currentArg + " argument missing.");
}
}
/**
* Handles a command-line argument.
*
* @param pProperties the map of configuration properties.
* @param pArg the value of the configuration option represented by this argument.
*/
void handleArg(
Map<String, String> pProperties,
String pArg)
throws InvalidCmdlineArgumentException {
putIfNotExistent(pProperties, option, pArg);
}
}
/** This is a command-line argument that sets some properties to fixed values. */
static class PropertyAddingCmdLineArgument extends CmdLineArgument {
private final Map<String, String> additionalIfNotExistentArgs = new HashMap<>();
private final Map<String, String> additionalArgs = new HashMap<>();
PropertyAddingCmdLineArgument(String pName) {
super(pName);
}
PropertyAddingCmdLineArgument settingProperty(String pName, String pValue) {
additionalIfNotExistentArgs.put(pName, pValue);
return this;
}
PropertyAddingCmdLineArgument overridingProperty(String pName, String pValue) {
additionalArgs.put(pName, pValue);
return this;
}
@Override
void apply0(
Map<String, String> properties,
String currentArg,
Iterator<String> args)
throws InvalidCmdlineArgumentException {
for (Entry<String, String> e : additionalIfNotExistentArgs.entrySet()) {
putIfNotExistent(properties, e.getKey(), e.getValue());
}
for (Entry<String, String> e : additionalArgs.entrySet()) {
properties.put(e.getKey(), e.getValue());
}
}
}
}
|
package org.sunbird.notification.sms;
import static org.powermock.api.mockito.PowerMockito.doReturn;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
import org.apache.http.StatusLine;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.junit.Assert;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.sunbird.common.models.util.ProjectUtil;
import org.sunbird.notification.sms.providerimpl.NICGatewaySmsProvider;
import org.sunbird.notification.utils.PropertiesCache;
import org.sunbird.notification.utils.SMSFactory;
import org.sunbird.notification.utils.SmsTemplateUtil;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"javax.management.*", "javax.net.ssl.*", "javax.security.*"})
@PrepareForTest({
HttpClients.class,
CloseableHttpClient.class,
PropertiesCache.class,
SMSFactory.class,
SmsTemplateUtil.class,
ProjectUtil.class,
URLEncoder.class,
System.class
})
public class NICGatewaySmsProviderTest {
@Before
public void initMockRules() throws Exception {
PowerMockito.mockStatic(ProjectUtil.class);
when(ProjectUtil.getConfigValue("sms_gateway_provider")).thenReturn("NIC");
PowerMockito.mockStatic(URLEncoder.class);
when(URLEncoder.encode(Mockito.anyString(), Mockito.anyString())).thenReturn("dfgdgfg");
PowerMockito.mockStatic(System.class);
when(System.getenv(Mockito.anyString())).thenReturn("someString");
}
private void initMockRulesFor200() {
CloseableHttpClient httpClient = mock(CloseableHttpClient.class);
CloseableHttpResponse httpResp = mock(CloseableHttpResponse.class);
PropertiesCache propertiesCache = mock(PropertiesCache.class);
StatusLine statusLine = mock(StatusLine.class);
PowerMockito.mockStatic(HttpClients.class);
try {
doReturn(httpClient).when(HttpClients.class, "createDefault");
when(httpClient.execute(Mockito.any(HttpGet.class))).thenReturn(httpResp);
doReturn(statusLine).when(httpResp).getStatusLine();
doReturn(200).when(statusLine).getStatusCode();
} catch (Exception e) {
Assert.fail("Exception while mocking static " + e.getLocalizedMessage());
}
try {
PowerMockito.when(propertiesCache.getProperty(Mockito.anyString())).thenReturn("anyString");
} catch (Exception e) {
Assert.fail("Exception while mocking static " + e.getLocalizedMessage());
}
}
@Test
public void testSendSms() {
initMockRulesFor200();
PowerMockito.mockStatic(SmsTemplateUtil.class);
Map<String, String> template1 = new HashMap<>();
template1.put(
"OTP to verify your phone number on $installationName is $otp. This is valid for $otpExpiryInMinutes minutes only.",
"1");
template1.put(
"OTP to reset your password on $installationName is $otp. This is valid for $otpExpiryInMinutes minutes only.",
"2");
template1.put(
"Your ward has requested for registration on $installationName using this phone number. Use OTP $otp to agree and create the account. This is valid for $otpExpiryInMinutes minutes only.",
"3");
when(SmsTemplateUtil.getSmsTemplateConfigMap()).thenReturn(template1);
NICGatewaySmsProvider megObj = new NICGatewaySmsProvider();
String sms =
"OTP to reset your password on instance is 456123. This is valid for 30 minutes only.";
boolean response = megObj.send("4321111111", sms);
Assert.assertFalse(response);
}
}
|
<reponame>SumanthAkula/valorant-rpc
def presence(rpc,client=None,data=None,content_data=None,config=None):
rpc.update(
state="Away",
details=f"Menu - {content_data['queue_aliases'][data['queueId']] if data['queueId'] != '' else 'Custom Setup'}",
large_image="game_icon_yellow",
large_text="VALORANT",
)
|
#!/bin/bash
SERVICE=$1
# validate mysql
echo "sleeping for 60 seconds during mysql boot..."
sleep 60
mysql -uroot -p${SERVICE}-test --host=mysql --port=3306 -e "SELECT VERSION();SELECT NOW()"
mysql -u${SERVICE}-test -p${SERVICE}-test --host=mysql --port=3306 -e "SELECT VERSION();SELECT NOW()"
mysql -u${SERVICE}-test -p${SERVICE}-test --host=mysql --port=3306 -e "SELECT VERSION();SELECT NOW()" leotestdb
|
ERROR=0
if ruby -rubygems t1.rb > /dev/null 2>&1; then
echo "t1 OK"
else
echo "t1 FAIL"
ERROR=1
fi
if ruby -rubygems t2.rb > /dev/null 2>&1; then
echo "t2 OK"
else
echo "t2 FAIL"
ERROR=2
fi
if ruby -rubygems t3.rb > /dev/null 2>&1; then
echo "t3 OK"
else
echo "t3 FAIL"
ERROR=3
fi
exit $ERROR
|
#!/bin/bash
# if [ -z $1 ] ; then
# echo "usage: sudo $(basename $0)"' <path to compose file>'
# exit 1
# fi
path=$(dirname $0)
# Copy stdout and stderr via named pipe to stdout of container for logging.
_fifo="/container_stdout"
exec > >(tee -ia "$_fifo")
exec 2> >(tee -ia "$_fifo" >&2)
_timing_sec_start=${SECONDS}
# Log call and parameters.
echo "[DEBUG] \"$0 $@\" called" > "$_fifo"
set -a
. $path/config.sh
set +a
# Login to private repository. Only when necessary?
docker_login=$(aws ecr get-login --region ${region} --no-include-email)
$docker_login
if [ "$?" != "0" ]; then
echo "[ERROR] Login to private respository failed."
exit 1
fi
# Pass environment variables to sudo environment.
# sudo map_data_dir="$map_data_dir" map_logs_dir="$map_logs_dir" /usr/local/bin/docker-compose -f "${map_data_dir}/docker-compose.yml" up -d
"$docker_compose" -f "${map_data_dir}/docker-compose.yml" up -d &> "$_fifo"
_ret=$?
echo "[DEBUG] $0 ending, exec time:" $(( SECONDS - _timing_sec_start )) "seconds"
exit $_ret
|
import { Component } from '@angular/core';
@Component({
selector: 'app-table-data',
template:
<table class="table table-bordered">
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Age</th>
</tr>
</thead>
<tbody>
<tr *ngFor="let data of tableData">
<td>{{data.id}}</td>
<td>{{data.name}}</td>
<td>{{data.age}}</td>
</tr>
</tbody>
</table>
})
export class TableDataComponent {
tableData = [
{ id: 1, name: 'John Doe', age: 20 },
{ id: 2, name: 'Jane Doe', age: 21 },
{ id: 3, name: 'Tom Smith', age: 25 }
];
}
|
#!/bin/bash
set -eu
function get_abs_filename() {
echo "$(cd "$(dirname "$1")" && pwd)/$(basename "$1")"
}
function usage() {
echo "usage: ./bootstrap.sh MFEXT_HOME"
}
if test "${1:-}" = "--help"; then
usage
exit 0
fi
if test "${1:-}" = ""; then
usage
exit 1
fi
MFEXT_HOME=$(get_abs_filename "$1")
export MFEXT_HOME
MFEXT_VERSION=$(cat "${MFEXT_HOME}/config/version")
export MFEXT_VERSION
MFMODULE_VERSION=$("${MFEXT_HOME}/bin/guess_version.sh")
export MFMODULE_VERSION
export MFMODULE_HOME=${MFEXT_HOME}
export MFMODULE=MFEXT
export MFMODULE_LOWERCASE=mfext
export MFEXTADDON=1
SRC_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export SRC_DIR
if ! test -d adm; then mkdir adm; fi
rm -f adm/root.mk
touch adm/root.mk
ROOT_PATH=${MFEXT_HOME}/bin:${MFEXT_HOME}/opt/core/bin:${PATH:-}
echo "Making adm/root.mk..."
rm -f adm/root.mk adm/envtpl
touch adm/root.mk
ln -s "${MFEXT_HOME}/bin/envtpl" adm/envtpl
echo "export MFMODULE := ${MFMODULE}" >>adm/root.mk
echo "export MFMODULE_LOWERCASE := $(echo ${MFMODULE} | tr '[:upper:]' '[:lower:]')" >>adm/root.mk
echo "export LAYERAPI2_LAYERS_PATH := ${MFEXT_HOME}/opt:${MFEXT_HOME}" >>adm/root.mk
echo "export MFEXT_HOME := ${MFEXT_HOME}" >>adm/root.mk
echo "export MFEXT_ADDON := 1" >>adm/root.mk
echo "export MFEXT_ADDON_NAME := rabbitmq" >>adm/root.mk
echo "export MFEXT_VERSION := ${MFEXT_VERSION}" >>adm/root.mk
echo "export MFMODULE_HOME := ${MFMODULE_HOME}" >>adm/root.mk
echo "export MFMODULE_VERSION := ${MFEXT_VERSION}" >>adm/root.mk
echo "export SRC_DIR := ${SRC_DIR}" >>adm/root.mk
echo "ifeq (\$(FORCED_PATHS),)" >>adm/root.mk
echo " export PATH := ${ROOT_PATH}" >>adm/root.mk
echo " export LD_LIBRARY_PATH := ${MFEXT_HOME}/lib" >>adm/root.mk
echo " export PKG_CONFIG_PATH := ${MFEXT_HOME}/lib/pkgconfig" >>adm/root.mk
echo " LAYER_ENVS:=\$(shell env |grep '^LAYERAPI2_LAYER_.*_LOADED=1\$\$' |awk -F '=' '{print \$\$1;}')" >>adm/root.mk
echo " \$(foreach LAYER_ENV, \$(LAYER_ENVS), \$(eval unexport \$(LAYER_ENV)))" >>adm/root.mk
echo "endif" >>adm/root.mk
if test "${ftp_proxy:-}" != ""; then
echo "export ftp_proxy:=${ftp_proxy:-}" >>adm/root.mk
fi
if test "${FTP_PROXY:-}" != ""; then
echo "export FTP_PROXY:=${FTP_PROXY:-}" >>adm/root.mk
fi
if test "${http_proxy:-}" != ""; then
echo "export http_proxy:=${http_proxy:-}" >>adm/root.mk
fi
if test "${https_proxy:-}" != ""; then
echo "export https_proxy:=${https_proxy:-}" >>adm/root.mk
fi
if test "${HTTPS_PROXY:-}" != ""; then
echo "export HTTPS_PROXY:=${HTTPS_PROXY:-}" >>adm/root.mk
fi
if test "${HTTP_PROXY:-}" != ""; then
echo "export HTTP_PROXY:=${HTTP_PROXY:-}" >>adm/root.mk
fi
# FIXME: do not hardcode this
# FIXME: move to layer root extra_env ?
echo "export PYTHON2_SHORT_VERSION := 2.7" >>adm/root.mk
echo "export PYTHON3_SHORT_VERSION := 3.9" >>adm/root.mk
echo "BOOTSTRAP DONE !"
echo "MFEXT_HOME=${MFEXT_HOME}"
|
<reponame>hieutran3010/EnglishOnline<filename>src/app/models/user.ts
import ModelBase from './modelBase';
export default class User extends ModelBase {
email!: string;
emailVerified!: boolean;
phoneNumber?: string;
password?: string;
displayName!: string;
avatarUrl?: string;
role?: string;
disabled?: boolean;
roles: string[];
/**
*
*/
constructor() {
super();
this.roles = [];
}
}
|
package com.jeanboy.component.permission.core;
/**
* @author caojianbo
* @since 2019/12/3 15:02
*/
public interface Watcher {
/**
* 已授权
*/
void onGranted();
/**
* 权限被拒绝
*
* @param isNeverAsk 已选择不再询问
*/
void onDenied(boolean isNeverAsk);
}
|
<reponame>Zovube/Tasks-solutions
#pragma GCC optimize("O3")
#pragma GCC target("sse,sse2,sse3,ssse3,sse4,popcnt,abm,mmx")
#define __USE_MINGW_ANSI_STDIO 0
#include<bits/stdc++.h>
using namespace std;
#define dbg(x) cerr << #x << " == " << x << "\n";
#define PI acos(-1)
#define pb push_back
#define fi first
#define se second
#define TASK "war"
#define sz(a) (int)(a).size()
#define all(c) (c).begin(), (c).end()
#define TIMESTAMP fprintf(stderr, "Execution time: %.3lf s.\n", (double)clock()/CLOCKS_PER_SEC)
string to_string(string s) {
return '"' + s + '"';
}
string to_string(const char* s) {
return to_string((string) s);
}
template <typename A, typename B>
string to_string(pair<A, B> p) {
return "(" + to_string(p.first) + ", " + to_string(p.second) + ")";
}
template <typename A>
string to_string(A v) {
bool first = true;
string res = "{";
for (const auto &x : v) {
if (!first) {
res += ", ";
}
first = false;
res += to_string(x);
}
res += "}";
return res;
}
void debug_out() { cerr << endl; }
template <typename Head, typename... Tail>
void debug_out(Head H, Tail... T) {
cerr << " " << to_string(H);
debug_out(T...);
}
#ifdef LOCAL
#define debug(...) cerr << "[" << #__VA_ARGS__ << "]:", debug_out(__VA_ARGS__)
#else
#define debug(...) 42
#endif
typedef long long ll;
typedef long double ld;
typedef vector<int> vi;
typedef vector<ll> vll;
typedef pair <int, int> pii;
typedef vector <vi> vvi;
typedef vector <pii> vpii;
typedef vector <string> vs;
const int MAXN = 1e6 + 9;
const int MOD = (int)(1e9 + 7);
const int LOG2MAXN = 17;
const int INF = 1e9;
const ld eps = 1e-9;
int di[] = {0, 1, 0, -1};
int dj[] = {1, 0, -1, 0};
char ddir[] = {'R', 'U', 'L', 'D'};
mt19937_64 rnd(chrono::high_resolution_clock::now().time_since_epoch().count());
int head[MAXN], head_tmp[MAXN], nxt[MAXN], cap[MAXN], cost[MAXN], to[MAXN], S, T, E;
int qq[MAXN], qh, qt, was[MAXN], N, M, pp[MAXN];
int dd[MAXN];
void addEdge(int a, int b, int c, int cc) {
to[E] = b;
cap[E] = c;
cost[E] = cc;
nxt[E] = head[a];
head[a] = E++;
to[E] = a;
cap[E] = 0;
cost[E] = -cc;
nxt[E] = head[b];
head[b] = E++;
}
bool SPFA() {
memset(was, 0, sizeof(was));
qh = qt = 0;
was[S] = 1;
fill(dd, dd + MAXN, INF);
dd[S] = 0;
qq[qt++] = S;
while(qh != qt) {
int v = qq[qh++];
if(qh == MAXN) qh = 0;
was[v] = 0;
debug(qh, qt, v);
for(int id = head[v]; id != -1; id = nxt[id]) {
int nv = to[id];
if(cap[id] > 0 && dd[nv] > dd[v] + cost[id]) {
dd[nv] = dd[v] + cost[id];
if(!was[nv]) {
was[nv] = 1;
qq[qt++] = nv;
if(qt == MAXN) qt = 0;
}
pp[nv] = id;
}
}
}
return dd[T] != INF;
}
int mincost() {
int cost_flow = 0, flow = 0;
while(SPFA()) {
int add = INF, add_cost = 0;
for(int i = T; i != S; i = to[pp[i] ^ 1]) {
add_cost += cost[pp[i]];
add = min(add, cap[pp[i]]);
}
flow += add;
cost_flow += add * add_cost;
for(int i = T; i != S; i = to[pp[i] ^ 1]) {
cap[pp[i]] -= add;
cap[pp[i] ^ 1] += add;
}
}
return cost_flow;
}
void solve() {
fill(head, head + MAXN, -1);
cin >> N >> M;
S = N + M, T = S + 1;
for(int i = 0; i < N; i++) {
int x; cin >> x;
addEdge(S, i, x, 0);
}
for(int i = 0; i < M; i++) {
int x; cin >> x;
addEdge(N + i, T, x, 0);
}
for(int i = 0; i < N; i++) {
for(int j = 0; j < M; j++) {
int x; cin >> x;
addEdge(i, N + j, INF, x);
}
}
cout << mincost() << endl;
}
void input() {
}
int main() {
ios_base::sync_with_stdio(0);
cin.tie(0);
#ifdef LOCAL
freopen("xxx.in", "r", stdin);
//freopen("xxx.out", "w", stdout);
#else
freopen(TASK".in", "r", stdin);
freopen(TASK".out", "w", stdout);
#endif
input();
solve();
#ifdef LOCAL
TIMESTAMP;
#endif
return 0;
}
|
#!/bin/bash -x
export PYTHON_VERSION=2.7
export JDK_VERSION=8
export SCALA_VERSION=2.12.4
export MAVEN_VERSION=3.5.2
export HADOOP_VERSION_DETAIL=2.7.5
export SPARK_VERSION_DETAIL=2.2.1
export HADOOP_FOR_SPARK_VERSION=2.7
export FLINK_VERSION_DETAIL=1.4.0
export HADOOP_FOR_FLINK_VERSION=26
export SCALA_FOR_FLINK_VERSION=2.11
export JAVA_HOME=/usr
export PATH=$PATH:$JAVA_HOME/bin
export SCALA_HOME=$HOME/local/scala
export PATH=$PATH:$SCALA_HOME/bin
export M2_HOME=$HOME/local/apache-maven
export PATH=$PATH:$M2_HOME/bin
export HADOOP_HOME=$HOME/local/hadoop-${HADOOP_VERSION_DETAIL}
export HADOOP_PREFIX=$HOME/local/hadoop-${HADOOP_VERSION_DETAIL}
export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop/
export HADOOP_INSTALL=$HADOOP_HOME
export PATH=$PATH:$HADOOP_INSTALL/bin
export PATH=$PATH:$HADOOP_INSTALL/sbin
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_HOME=$HADOOP_INSTALL
export HADOOP_HDFS_HOME=$HADOOP_INSTALL
export YARN_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib/native"
export SPARK_HOME=$HOME/local/spark-${SPARK_VERSION_DETAIL}
export SPARK_MASTER_IP=localhost
export FLINK_HOME=$HOME/local/flink-${FLINK_VERSION_DETAIL}
export PATH=$PATH:$FLINK_HOME/bin
export THRILL_HOME=$HOME/thrill
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.textColor = void 0;
var textColor = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M5.032 13l0.9-3h4.137l0.9 3h1.775l-3-10h-3.488l-3 10h1.776zM7.432 5h1.137l0.9 3h-2.937l0.9-3z"
}
}]
};
exports.textColor = textColor;
|
basicGrp1="bundles
console
context
discover
getting_started
home
management
saved_objects_management
status_page
timelion
"
basicGrp2="visualize
"
basicGrp3="dashboard
"
xpackGrp1="advanced_settings
apm
cross_cluster_replication
grok_debugger
index_lifecycle_management
index_management
index_patterns
license_management
maps
reporting_management
"
xpackGrp2="canvas
dashboard_mode
dev_tools
discover
graph
infra
lens
logstash
status_page
timelion
upgrade_assistant
uptime
watcher
"
xpackGrp3="machine_learning
ml
transform
visualize
"
xpackGrp4="api_keys
dashboard
ingest_pipelines
monitoring
remote_clusters
rollup_job
saved_objects_management
security
snapshot_restore
spaces
"
xpackExtGrp1="alerting_api_integration
api_integration
plugin_api_integration
reporting
reporting_api_integration
"
xpackExtGrp2="apm_api_integration
case_api_integration
lists_api_integration
spaces_api_integration
"
xpackExtGrp3="encrypted_saved_objects_api_integration
functional_embedded
functional_enterprise_search
observability_api_integration
saved_object_api_integration
saved_objects_field_count
ui_capabilities
upgrade_assistant_integration
"
xpackExtGrp4="detection_engine_api_integration
ingest_manager_api_integration
security_api_integration
security_functional
security_solution_endpoint
"
|
// @flow
import { fbapi } from "../../facebook/api"
import type { MitosisUser } from "../types"
import { Cities } from "places"
import { updateMitosisUser } from "../../db/mongo"
import { GravityShowsSearchAPI, gravity, metaphysicsQuery } from "../artsy-api"
import { elementForArtwork } from "./artwork/element"
import { elementForGravityShow } from "./shows/element"
import { showInfoQuery } from "./shows/queries"
const querystring = require("querystring")
interface City {
slug: string,
name: string,
coords: number[],
sort_order: number,
timezone: number
}
// Keys for callback resolution
export const ShowsNearMeKey = "shows-near-me"
export const ShowsSaveAsMyCity = "shows-save-for-my-city"
export const ShowsSetMyCity = "shows-set-for-my-city"
export const ShowsForCityKey = "shows-for-city"
export const ShowsShowKey = "shows-show"
export const ShowsShowInfo = "shows-info-show"
export const ShowsShowPressRelease = "shows-press-release-show"
export const ShowsShowArtworks = "shows-artworks-show"
export const ShowsFavPartner = "shows-favourite-partner"
export const ShowsInferCity = "shows-infer-city"
/**
* Handles pulling out the payload keys and running the appropriate function
*
* @export
* @param {MitosisUser} context the user details
* @param {string} payload a string for the lookup
*/
export function handleShowsCallbacks(context: MitosisUser, payload: string) {
if (payload.startsWith(ShowsNearMeKey)) { callbackShowsNearMe(context, payload) }
if (payload.startsWith(ShowsForCityKey)) { callbackShowsForCity(context, payload) }
if (payload.startsWith(ShowsSaveAsMyCity)) { callbackForShowsSaveAsMyCity(context, payload) }
if (payload.startsWith(ShowsSetMyCity)) { callbackForSetSaveAsMyCity(context, payload) }
if (payload.startsWith(ShowsShowInfo)) { callbackForShowsInfo(context, payload) }
if (payload.startsWith(ShowsShowPressRelease)) { callbackForShowsPressRelease(context, payload) }
if (payload.startsWith(ShowsInferCity)) { callbackForShowsInferCity(context, payload) }
}
// Shows a list of shows nearby, or just jumps straight into shows nearby
async function callbackShowsNearMe(context: MitosisUser, payload: string) {
fbapi.startTyping(context.fbSenderID)
const cities = citiesForUser(context)
if (cities.length === 1) {
// If there's only going be one result, skip showing an option
callbackShowsForCity(context, `${ShowsForCityKey}::${cities.pop().slug}`)
return
}
// Present a list to choose from
await fbapi.quickReply(context.fbSenderID, "Which is the closest city to you?", cities.map((city) => {
return { content_type: "text", title: city.name, payload: `${ShowsSaveAsMyCity}::${city.slug}::${city.name}` }
}))
}
// Highlight shows for a city
async function callbackShowsForCity(context: MitosisUser, payload: string) {
const [, showCityID] = payload.split("::")
const city = Cities.find((c) => c.slug === showCityID)
const query = querystring.stringify({
near: city.coords.toString(),
sort: "-start_at",
size: 5,
displayable: true,
at_a_fair: false
})
const url = `${GravityShowsSearchAPI}?${query}`
const shows = await gravity(url, context)
await fbapi.elementCarousel(context.fbSenderID, `Shows near ${city.name}`, shows.map(show => elementForGravityShow(show)), [])
}
// A show overview, showing artworks, and supports paginating through the works
async function callbackForShowsInfo(context: MitosisUser, payload: string) {
const [, showID, showName, artworksPage] = payload.split("::")
const page = artworksPage || "1"
const query = showInfoQuery(showID, page)
const results = await metaphysicsQuery(query, context)
const show = results.data.show
const firstPage = page === "1"
if (firstPage) {
let location = show.location.display
if (location === null) {
location = `${show.location.address}, ${show.location.postal_code}`
}
await fbapi.sendTextMessage(context.fbSenderID, `${show.exhibition_period}
${location}
${show.description}
`)
}
if (show.artworks.length) {
const showPressRelease = firstPage && show.press_release !== null && show.press_release.length > 0
await fbapi.elementCarousel(context.fbSenderID, `Works at ${showName}`, show.artworks.map(a => elementForArtwork(a)), [
{ content_type: "text", title: "More Artworks", payload: `${ShowsShowInfo}::${showID}::${showName}::${parseInt(page) + 1}` },
showPressRelease ? { content_type: "text", title: "Press Release", payload: `${ShowsShowPressRelease}::${showID}::${showName}` } : null
])
} else {
await fbapi.sendTextMessage(context.fbSenderID, "That's all of the artworks for the show.")
}
}
// Show just the press release, they are long so don't do it on info
async function callbackForShowsPressRelease(context: MitosisUser, payload: string) {
const [, showID] = payload.split("::")
const query = showInfoQuery(showID, "1")
const results = await metaphysicsQuery(query, context)
const show = results.data.show
await fbapi.sendLongMessage(context.fbSenderID, show.press_release)
}
// If you have choosen a city, let it be the default
async function callbackForShowsSaveAsMyCity(context: MitosisUser, payload: string) {
const [, showCityID, cityName] = payload.split("::")
await fbapi.quickReply(context.fbSenderID, `Would you like to save ${cityName} as your local city?`, [
{ content_type: "text", title: "Yes please", payload: `${ShowsSetMyCity}::${showCityID}::${cityName}` },
{ content_type: "text", title: "No thanks", payload: `${ShowsForCityKey}::${showCityID}::${cityName}` }
])
}
// Try and figure out what city the user is after
async function callbackForShowsInferCity(context: MitosisUser, payload: string) {
let [, message] = payload.split("::")
message = message.replace("nyc", "new york")
.replace("new york city", "new york")
.replace("SF", "san francisco")
.replace("cupertino", "san francisco")
const cityID = message.replace("shows in", "").trim().replace(" ", "-").replace(".", "").replace("?", "")
const city = Cities.find((c) => c.slug === cityID)
if (city) {
callbackShowsForCity(context, `${ShowsForCityKey}::${city.slug}::${city.name}`)
} else {
const sortedCities = Cities.sort((a, b) => a.sort_order < b.sort_order).reverse()
fbapi.sendLongMessage(context.fbSenderID, `Sorry, we could not find your city. Here is our list of cities to work from:
${sortedCities.map((c) => { return c.name }).join(", ")}`)
}
}
// Save your location if you wanted to, thne show the city
async function callbackForSetSaveAsMyCity(context: MitosisUser, payload: string) {
const [, showCityID, cityName] = payload.split("::")
context.artsyLocationCitySlug = showCityID
fbapi.startTyping(context.fbSenderID)
await updateMitosisUser(context)
fbapi.sendTextMessage(context.fbSenderID, `Set ${cityName} as your local city. You can say "shows" at any time to see shows in ${cityName}`)
callbackShowsForCity(context, `${ShowsForCityKey}::${showCityID}::${cityName}`)
}
// Try and figure out the useful cities for a user
function citiesForUser(context: MitosisUser): City[] {
// Are we certain?
if (context.artsyLocationCitySlug !== undefined) {
const locaton = Cities.find((c) => c.slug === context.artsyLocationCitySlug)
return [locaton]
}
// If not be pretty good about showing the first
const cities = []
if (context.favouriteCitySlug !== undefined) {
cities.push(Cities.find((c) => c.slug === context.favouriteCitySlug))
}
// And then the rest in the timezone
const citiesInTimezone = Cities.filter((c) => Math.round(c.timezone) === Math.round(context.timezoneOffset))
const sortedCities = citiesInTimezone.sort((a, b) => a.sort_order < b.sort_order).reverse()
return cities.concat(sortedCities)
}
|
TERMUX_PKG_HOMEPAGE=https://xmake.io/
TERMUX_PKG_DESCRIPTION="A cross-platform build utility based on Lua"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="Ruki Wang @waruqi"
TERMUX_PKG_VERSION=2.5.3
TERMUX_PKG_SRCURL=https://github.com/xmake-io/xmake/releases/download/v${TERMUX_PKG_VERSION}/xmake-v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=337edd61de22b043720556a02bf7b1c4d4881e200ecce6bb2406d0442f2db92e
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_make() {
make build
}
termux_step_make_install() {
make install PREFIX="${TERMUX_PREFIX}"
}
|
<filename>src/templates/Home.js<gh_stars>0
import React from "react"
import { graphql, Link } from "gatsby"
import SEO from "../components/seo"
import Layout from "../components/layout"
import Flickity from "react-flickity-component"
import { Tab, Tabs, TabList, TabPanel } from "react-tabs"
import { SRLWrapper } from "simple-react-lightbox"
import "react-tabs/style/react-tabs.scss"
let flickityOptions = {
cellAlign: "left",
contain: true,
fade: true,
freeScroll: true,
wrapAround: true,
autoPlay: true,
}
const HomeTemplate = ({ data }) => (
<Layout>
<SEO title={data.wordpressPage.title} />
<section className="hero-slider">
<Flickity
className={"carousel mask"}
elementType={"div"}
options={flickityOptions}
>
{data.wordpressPage.acf.home_header.carousel.map(item => (
<div
className="hero-slide"
style={{
backgroundImage: `url(${item.image.localFile.childImageSharp.sizes.src})`,
}}
key={item.big_header}
>
<div className="hero-slider__overlay">
<div className="hero-slider-container">
<p className="intro-title">{item.top_header}</p>
<p className="slide-title">{item.big_header}</p>
<p className="bottom-title">{item.big_header}</p>
<div>
<button className="top-button">
<Link to={`/about-us`}>More About Us </Link>
</button>
<br />
<button className="bottom-button">
<Link to={`/reservation`}>Reserve a Table!</Link>
</button>
</div>
</div>
</div>
</div>
))}
</Flickity>
</section>
<section className="about">
<div className="container">
<div className="intro-block">
<p className="intro-title">
{data.wordpressPage.acf.about.top_header}
</p>
<h2 className="title">{data.wordpressPage.acf.about.big_header}</h2>
<h2 className="sub-title">
{data.wordpressPage.acf.about.bottom_header}
</h2>
<div className="divider"></div>
<p className="paragraph">{data.wordpressPage.acf.about.details}</p>
</div>
</div>
</section>
<section
className="background-image-section"
style={{
backgroundImage: `url(${data.wordpressPage.acf.parallax.background_image.localFile.childImageSharp.sizes.src})`,
}}
>
<div className="background-image-section__overlay">
<div className="container">
<div className="intro-block">
<h2 className="intro-title">
{data.wordpressPage.acf.parallax.intro_title}
</h2>
<h2 className="sub-title">
{data.wordpressPage.acf.parallax.sub_title}
</h2>
</div>
<div className="section-row">
{data.wordpressPage.acf.parallax.rows.map(row => (
<div className="facts" key={row.detail}>
<img
src={row.image.localFile.childImageSharp.sizes.src}
alt=""
/>
<p dangerouslySetInnerHTML={{ __html: row.detail }} />
</div>
))}
</div>
</div>
</div>
</section>
<section
className="menu"
style={{
backgroundImage: `url(${data.wordpressPage.acf.menu.background_image.localFile.childImageSharp.sizes.src})`,
}}
>
<div className="menu__overlay">
<div className="container">
<h2 className="title">{data.wordpressPage.acf.menu.intro_title}</h2>
<h2 className="sub-title">{data.wordpressPage.acf.menu.sub_title}</h2>
<Tabs className="menu-tabs">
<TabList className="menu-tabs-menu">
{data.wordpressPage.acf.menu.tab_buttons.map(button => (
<Tab className="ui-btn" key={button.name}>
<button href="#" className="tab-button">
{button.name}
</button>
</Tab>
))}
</TabList>
<div className="tabs-content">
<TabPanel className="pane">
<div className="white-wrapper">
<div className="collection-list">
{data.wordpressPage.acf.menu.coffee_item.map(item => (
<div className="list-item" key={item.name}>
<div className="price">{item.price}</div>
<div className="item-title">{item.name}</div>
<div className="item-sub-title">{item.detail}</div>
</div>
))}
</div>
</div>
</TabPanel>
<TabPanel className="pane">
<div className="white-wrapper">
<div className="collection-list">
{data.wordpressPage.acf.menu.lunch_item.map(item => (
<div className="list-item" key={item.name}>
<div className="price">{item.price}</div>
<div className="item-title">{item.name}</div>
<div className="item-sub-title">{item.detail}</div>
</div>
))}
</div>
</div>
</TabPanel>
<TabPanel className="pane">
<div className="white-wrapper">
<div className="collection-list">
{data.wordpressPage.acf.menu.everyday_specials_item.map(
item => (
<div className="list-item" key={item.name}>
<div className="price">{item.price}</div>
<div className="item-title">{item.name}</div>
<div className="item-sub-title">{item.detail}</div>
</div>
)
)}
</div>
</div>
</TabPanel>
</div>
</Tabs>
<button className="menu-button">
<Link to={data.wordpressPage.acf.menu.complete_menu}>
View our complete menu
</Link>
</button>
</div>
</div>
</section>
<section className="bottom">
<div className="container">
<div className="bottom-columns">
<div className="text-column-left">
<p className="intro-title">
{data.wordpressPage.acf.bottom.intro_title}
</p>
<h2 className="title">{data.wordpressPage.acf.bottom.title}</h2>
<h2 className="sub-title">
{data.wordpressPage.acf.bottom.sub_title}
</h2>
<div className="divider"></div>
<p className="info">{data.wordpressPage.acf.bottom.details}</p>
</div>
<div className="text-column-right">
<div className="small-gallery-row">
{data.wordpressPage.acf.bottom.images.map(imager => (
<div
className="small-gallery-row--left"
key={imager.image.localFile.childImageSharp.sizes.src}
>
<SRLWrapper>
<a href={imager.image.localFile.childImageSharp.sizes.src}>
<img
src={imager.image.localFile.childImageSharp.sizes.src}
alt=""
/>
</a>
</SRLWrapper>
</div>
))}
</div>
</div>
</div>
</div>
</section>
</Layout>
)
export default HomeTemplate
export const query = graphql`
query($id: Int!) {
wordpressPage(wordpress_id: { eq: $id }) {
title
content
excerpt
date(formatString: "MMMM DD, YYYY")
author {
name
}
acf {
home_header {
carousel {
big_header
bottom_header
button_left
button_right
top_header
image {
localFile {
childImageSharp {
sizes {
src
}
}
}
}
}
}
about {
big_header
bottom_header
details
top_header
}
parallax {
intro_title
sub_title
background_image {
localFile {
childImageSharp {
sizes {
src
}
}
}
}
rows {
detail
image {
localFile {
childImageSharp {
sizes {
src
}
}
}
}
}
}
menu {
complete_menu
intro_title
sub_title
tab_buttons {
name
}
background_image {
localFile {
childImageSharp {
sizes {
src
}
}
}
}
coffee_item {
detail
name
price
}
everyday_specials_item {
detail
name
price
}
lunch_item {
detail
name
price
}
}
bottom {
details
intro_title
sub_title
title
images {
image {
localFile {
childImageSharp {
sizes {
src
}
}
}
}
}
}
}
}
}
`
|
package org.allenai.ari.solvers.textilp.utils
import edu.illinois.cs.cogcomp.annotation.Annotator
import edu.illinois.cs.cogcomp.core.datastructures.textannotation.{ Constituent, SpanLabelView, TextAnnotation }
import edu.illinois.cs.cogcomp.core.utilities.SerializationHelper
import edu.illinois.cs.cogcomp.core.utilities.configuration.ResourceManager
import org.allenai.ari.controller.questionparser.{ FillInTheBlankGenerator, QuestionParse }
import org.apache.commons.codec.digest.DigestUtils
import org.mapdb.{ DBMaker, HTreeMap, Serializer }
class FillInBlankAnnotator extends Annotator("FillInBlankGenerator", Array.empty) {
private var db = DBMaker.fileDB("fillInBlankGenerator.cache").closeOnJvmShutdown().transactionEnable().make()
lazy val fitbGenerator = FillInTheBlankGenerator.mostRecent
override def initialize(rm: ResourceManager): Unit = {
// do nothing
}
override def addView(ta: TextAnnotation): Unit = {
val concurrentMap: HTreeMap[String, Array[Byte]] = db.hashMap(viewName, Serializer.STRING, Serializer.BYTE_ARRAY).createOrOpen()
val key = DigestUtils.sha1Hex(ta.text)
if (concurrentMap.containsKey(key)) {
val taByte: Array[Byte] = concurrentMap.get(key)
val cachedTa = SerializationHelper.deserializeTextAnnotationFromBytes(taByte)
ta.addView(viewName, cachedTa.getView(viewName))
} else {
val qparse = QuestionParse.constructFromString(ta.text)
val fitbQuestionStrOpt = fitbGenerator.generateFITB(qparse).map(_.text)
val cons = new Constituent(fitbQuestionStrOpt.getOrElse(""), viewName, ta, 0, ta.getTokens.length)
val vu = new SpanLabelView(viewName, ta)
vu.addConstituent(cons)
ta.addView(viewName, vu)
if (concurrentMap != null) {
concurrentMap.put(key, SerializationHelper.serializeTextAnnotationToBytes(ta));
this.db.commit();
}
}
}
def close = db.close()
def useCaching(str: String) = {
db.close()
db = DBMaker.fileDB(str).closeOnJvmShutdown().transactionEnable().make()
}
}
|
#!/bin/sh
# 关闭服务
systemctl stop docker
# 开机关闭
systemctl disable docker
#rm -rf /etc/systemd/system/docker.service.d
# 删安装包
yum list installed | grep docker
rpm -qa | grep docker*
yum remove docker
yum remove -y docker-ce-cli
# 删除镜像
rm -rf /var/lib/docker
rm -rf /var/run/docker
docker --version
#### 遇到问题
#
# 问题:rm: cannot remove ‘/var/run/docker/netns/default’: Device or resource busy
# 解决:https://blog.csdn.net/qq_28284093/article/details/80117367
# 参考:
# umount /var/run/docker/netns/default
##########
# reference
##########
# 干净的卸载docker (适用centos7)
# https://blog.csdn.net/weixin_39592623/article/details/88060629
# centos7下卸载docker
# https://blog.csdn.net/x15011238662/article/details/84963439
# CentOS 7卸载Docker
|
<reponame>Organizational-Proof-Of-Work/clearinghoused_build
#!/usr/bin/env python
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
from __future__ import absolute_import
__author__ = "<NAME>"
__copyright__ = "Copyright 2010-2013, <NAME>"
__email__ = "sorin(dot)sbarnea(at)gmail.com"
__status__ = "Production"
from . import version
__date__ = "2013-09-10"
__all__ = ['tee', 'colorer', 'unicode', 'execfile2', 'singleton', 'ansiterm', 'version']
"""
Tendo is tested with Python 2.5-3.3
"""
import sys
if sys.hexversion < 0x02050000:
sys.exit("Python 2.5 or newer is required by tendo module.")
|
<reponame>cchdo/ctdcal<gh_stars>1-10
from ctdcal import flagging
import numpy as np
import pandas as pd
import pytest
# TODO: make a class and recycle 3 data sets, separating outliers and NaNs:
# data = [np.nan] + 97 * [0] + [100, 100]
# data = pd.Series([np.nan] + 97 * [0] + [100, 100])
# data = pd.DataFrame([np.nan] + 97 * [0] + [100, 100])
def test_merge_flags():
old_flags = 5 * [2]
new_flags = 5 * [3]
# check proper flags are kept
np.testing.assert_array_equal(
flagging._merge_flags(new_flags, old_flags, keep_higher=True), new_flags
)
np.testing.assert_array_equal(
flagging._merge_flags(new_flags, old_flags, keep_higher=False), old_flags
)
# error if flag lengths are different
with pytest.raises(ValueError):
flagging._merge_flags(old_flags, new_flags[:-1])
with pytest.raises(ValueError):
flagging._merge_flags(old_flags[:-1], new_flags)
def test_nan_values():
data = [0, 0, 0, np.nan]
old_flags = len(data) * [2]
# check NaNs are flagged properly (without old flags)
assert all(flagging.nan_values(data)[:-1] == 2)
assert flagging.nan_values(data)[-1] == 9
# check re-defining flag_good/flag_bad (without old flags)
new_flags = flagging.nan_values(data, flag_good=6, flag_nan=5)
assert all(new_flags[:-1] == 6)
assert new_flags[-1] == 5
# check NaNs are flagged properly (with old flags)
assert all(flagging.nan_values(data, old_flags=old_flags)[:-1] == 2)
assert flagging.nan_values(data, old_flags=old_flags)[-1] == 9
# check re-defining flag_good/flag_bad (with old flags)
new_flags = flagging.nan_values(data, old_flags=old_flags, flag_good=6, flag_nan=5)
assert all(new_flags[:-1] == 6)
assert new_flags[-1] == 5
def test_outliers():
data = [np.nan] + 97 * [0] + [100, 100]
old_flags = (len(data) - 1) * [2] + [7]
# check outliers are flagged properly (without old flags)
assert all(flagging.outliers(data)[:-2] == 2)
assert all(flagging.outliers(data)[-2:] == 4)
# check re-defining flag_good/flag_bad (without old flags)
new_flags = flagging.outliers(data, flag_good=6, flag_outlier=5)
assert all(new_flags[:-2] == 6)
assert all(new_flags[-2:] == 5)
# check outliers are flagged properly (with old flags)
assert all(flagging.outliers(data, old_flags=old_flags)[:-2] == 2)
assert flagging.outliers(data, old_flags=old_flags)[-1] == old_flags[-1]
assert flagging.outliers(data, old_flags=old_flags)[-2] == 4
# check re-defining flag_good/flag_bad (with old_flags)
new_flags = flagging.outliers(
data, old_flags=old_flags, flag_good=6, flag_outlier=5
)
assert all(new_flags[:-2] == 6)
assert new_flags[-1] == old_flags[-1]
assert new_flags[-2] == 5
# check outliers are not flagged with large n_sigma1
assert all(flagging.outliers(data, n_sigma1=5)[:-2] == 2)
assert all(flagging.outliers(data, n_sigma1=5)[-2:] == 4)
assert all(flagging.outliers(data, n_sigma1=10) == 2)
def test_by_percent_diff():
data = [np.nan] + 97 * [0] + [100, 100]
ref = (len(data) - 1) * [0] + [80]
old_flags = (len(data) - 1) * [2] + [7]
# check large diffs are flagged properly (without old flags)
assert all(flagging.by_percent_diff(data, ref)[:-2] == 2)
assert all(flagging.by_percent_diff(data, ref)[-2:] == 3)
# check re-defining flag_good/flag_bad (without old flags)
new_flags = flagging.by_percent_diff(data, ref, flag_good=6, flag_bad=5)
assert all(new_flags[:-2] == 6)
assert all(new_flags[-2:] == 5)
# check large diffs are flagged properly (with old flags)
new_flags = flagging.by_percent_diff(data, ref, old_flags=old_flags)
assert all(new_flags[:-2] == 2)
assert new_flags[-1] == old_flags[-1]
assert new_flags[-2] == 3
# check re-defining flag_good/flag_bad (with old flags)
new_flags = flagging.by_percent_diff(
data, ref, old_flags=old_flags, flag_good=6, flag_bad=5
)
assert all(new_flags[:-2] == 6)
assert new_flags[-1] == old_flags[-1]
assert new_flags[-2] == 5
# check large diffs are not flagged with sufficiently high thresh
assert flagging.by_percent_diff(data, ref, percent_thresh=10)[-1] == 3
assert flagging.by_percent_diff(data, ref, percent_thresh=20)[-1] == 2
@pytest.mark.parametrize(
"offset, p", [(0.003, 2000), (0.006, 1000), (0.011, 500), (0.021, 0)]
)
def test_by_residual(offset, p):
data = [np.nan] + 97 * [0] + [100, 100]
ref = np.array(data) + offset
pres = np.linspace(0, 3000, len(data))
old_flags = (len(data) - 1) * [2] + [7]
# check values above threshold are flagged properly (without old flags)
new_flags = flagging.by_residual(data, ref, pres)
assert all(new_flags[pres < p] == 2)
assert all(new_flags[pres > p] == 3)
# check re-defining flag_good/flag_bad (without old flags)
new_flags = flagging.by_residual(data, ref, pres, flag_good=6, flag_bad=5)
assert all(new_flags[pres < p] == 6)
assert all(new_flags[pres > p] == 5)
# check values above threshold are flagged properly (with old flags)
new_flags = flagging.by_residual(data, ref, pres, old_flags=old_flags)
assert all(new_flags[pres < p] == 2)
assert all(new_flags[pres > p][:-1] == 3)
assert new_flags[pres > p][-1] == old_flags[-1]
# check re-defining flag_good/flag_bad (with old flags)
new_flags = flagging.by_residual(
data, ref, pres, old_flags=old_flags, flag_good=6, flag_bad=5
)
assert all(new_flags[pres < p] == 6)
assert all(new_flags[pres > p][:-1] == 5)
assert new_flags[pres > p][-1] == old_flags[-1]
# error if threshold not one value longer than p_cutoff
with pytest.raises(IndexError):
flagging.by_residual([], [], [], threshold=[1, 2], p_cutoff=[100, 50, 0])
with pytest.raises(IndexError):
flagging.by_residual([], [], [], threshold=[1, 2, 3], p_cutoff=[100, 50, 0])
with pytest.raises(IndexError):
flagging.by_residual([], [], [], threshold=[1, 2, 3], p_cutoff=[100])
# error if p_cutoff not in decreasing order
with pytest.raises(ValueError):
flagging.by_residual([], [], [], threshold=[1, 2, 3], p_cutoff=[0, 50])
with pytest.raises(ValueError):
flagging.by_residual([], [], [], threshold=[1, 2, 3, 4], p_cutoff=[100, 0, 50])
|
package Multiply_Strings;
import java.util.ArrayList;
import java.util.List;
public class Solution {
public String multiply(String num1, String num2) {
/**
* directly get product and add them in one iteration
* 36ms
*/
int[] pos = new int[num1.length() + num2.length()];
for (int i = num1.length() - 1; i >= 0; i--) {
for (int j = num2.length() - 1; j >= 0; j--) {
int mul = (num1.charAt(i) - '0') * (num2.charAt(j) - '0');
int sum = mul + pos[i +j + 1];
pos[i + j] += sum / 10;
pos[i + j + 1] = sum % 10;
}
}
StringBuilder builder = new StringBuilder();
for (int i: pos) {
if (!(builder.length() == 0 && i == 0)) builder.append(i);
}
return builder.length() == 0 ? "0" : builder.toString();
/**
* get product of every digit of num1 with num2, then sum them all.
* 52ms
*/
// List<String> results = new ArrayList<>();
// for (int i = num1.length() - 1; i >= 0; i--) {
// int digit = num1.charAt(i) - '0';
// StringBuilder builder = new StringBuilder(multiply(num2, digit));
// for (int j = 0; j < num1.length() - i - 1; j++) {
// builder.append("0");
// }
// results.add(builder.toString());
// }
// String res = results.get(0);
// for (int i = 1; i < results.size(); i++) {
// res = add(res, results.get(i));
// }
// return res;
}
private String multiply(String a, int b) {
int carry = 0;
StringBuilder builder = new StringBuilder();
for (int i = a.length() - 1; i >= 0; i--) {
int val = (a.charAt(i) - '0') * b + carry;
builder.append(val % 10);
carry = val / 10;
}
if (carry > 0) builder.append(carry);
while (builder.charAt(builder.length() - 1) == '0' && builder.length() > 1) builder.deleteCharAt(builder.length() - 1);
return builder.reverse().toString();
}
private String add(String a, String b) {
int carry = 0;
StringBuilder builder = new StringBuilder();
for (int i = a.length() - 1, j = b.length() - 1; i >= 0 || j >= 0; i--, j--) {
int val = (i >= 0 ? a.charAt(i) - '0' : 0) + (j >= 0 ? b.charAt(j) - '0' : 0) + carry;
builder.append(val % 10);
carry = val / 10;
}
if (carry > 0) builder.append(carry);
while (builder.charAt(builder.length() - 1) == '0' && builder.length() > 1) builder.deleteCharAt(builder.length() - 1);
return builder.reverse().toString();
}
public static void main(String[] args) {
Solution s = new Solution();
// System.out.println(s.add("123", "987"));
// System.out.println(s.multiply("1234567", 11));
System.out.println(s.multiply("1234", "1111"));
System.out.println(s.multiply("9133", "0"));
}
}
|
const path = require('path');
const webpack = require('webpack');
const autoprefixer = require('autoprefixer-core');
// Specify what theme will the dev webpack config will serve
// Change during development time to test layout for different themes. Available themes : red, blue, gray (default)
// Restart dev server after changing theme: `npm run start`
const THEME = 'gray';
module.exports = {
devtool: 'eval-source-map',
entry: [
'webpack-hot-middleware/client',
'./site/src/index'
],
output: {
path: path.join(__dirname, 'dist', 'site'),
filename: 'bundled.js',
publicPath: '/static/site/',
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin()
],
module: {
loaders: [
{
test: /\.js$/,
loaders: ['babel'],
include: path.join(__dirname)
},
{
test: /\.scss$/,
loaders: [
'style',
'css?source-map',
'postcss-loader',
'sass?sourceMap'
],
include: path.join(__dirname)
},
{
test: /\.(jpg|jpeg|gif|png|svg)$/,
exclude: /node_modules/,
loader:'url-loader?limit=10024&name=images/' + THEME + '/[name]_[hash].[ext]',
include: path.join(__dirname)
},
{
test: /\.(woff|woff2|eot|ttf)$/,
exclude: /node_modules/,
loader: 'url-loader?limit=1024&name=fonts/' + THEME + '/[name]_[hash].[ext]',
include: path.join(__dirname)
}
]
},
postcss: [autoprefixer({
// https://github.com/ai/browserslist
browsers: ['last 2 version', 'ie 9', 'ios 7', 'android 4']
})],
sassLoader: {
// Overwrites $theme SCSS variable.
// Possible values: red, gray
data: '$theme: "' + THEME + '";'
},
resolve: {
extensions: ['', '.js'],
alias: {
// absolute path to pattern library
'atomic-react-components': path.resolve(__dirname, 'src')
}
}
};
|
<reponame>akimikimikimikimikimikimika/shellCommands<filename>Time Measurement/Python/main.py
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from lib import CM,data
from analyze import argAnalyze
from execute import execute
from docs import help,version
d=data()
argAnalyze(d)
if d.mode==CM.main: execute(d)
if d.mode==CM.help: help()
if d.mode==CM.version: version()
|
<reponame>fspoettel/nine9s-discord-webhook
/* globals NINE9S_WEBHOOK_SECRET */
import { EventData } from './types/EventData'
import { Nine9sWebhookEvent } from './types/Nine9sWebhookEvent'
/**
* @see https://nine9s.cloud/api/docs
*/
export function isAuthenticated(headers: Headers): boolean {
return (
headers.get('X-Webhook-Secret') === NINE9S_WEBHOOK_SECRET ||
(headers.get('User-Agent') || '').startsWith('Nine9s.cloud Webhook Alerts')
)
}
/**
* @see https://nine9s.cloud/api/docs#operation/retrieveEndpoint
*/
export function isWellFormattedEvent(x: unknown): x is Nine9sWebhookEvent {
return (
typeof x === 'object' &&
x != null &&
'event_type' in x &&
'data' in x &&
(x as Nine9sWebhookEvent).event_type === 'endpoint.status.changed' &&
typeof (x as Nine9sWebhookEvent).data === 'object' &&
(x as Nine9sWebhookEvent).data != null &&
typeof (x as Nine9sWebhookEvent).data.last_check_status === 'string'
)
}
/**
* @see https://nine9s.cloud/api/docs#operation/retrieveEndpoint
*/
export function parseEvent(event: Nine9sWebhookEvent): EventData {
const { data } = event
return {
endpoint: {
name: data.name,
url: data.endpoint_url,
status: data.status,
id: data.uuid,
},
status: data.last_check_status,
statusDetails: data.last_check_message || undefined,
timestamp: data.created_at,
...parseHistory(event),
}
}
function parseHistory(event: Nine9sWebhookEvent) {
const {
data: { history },
} = event
if (!Array.isArray(history) || history.length === 0) return null
const [mostRecentCheck, ...rest] = history
const responseTime = mostRecentCheck.response_time
const timestamp = mostRecentCheck.created_at
const parsed = {
timestamp,
responseTime,
}
if (!mostRecentCheck.ok || rest.length === 0) {
return parsed
}
const lastGoodCheck = rest.find(x => x.ok)
const earliestCheck = rest[rest.length - 1]
const incidentDuration = timeDiffInMinutes(
(lastGoodCheck || earliestCheck).created_at,
timestamp,
)
return {
...parsed,
incidentDuration,
isIncidentDurationExact: !!lastGoodCheck,
}
}
function timeDiffInMinutes(prev: string, now: string): string {
const prevTime = new Date(prev)
const nowTime = new Date(now)
return ((nowTime.getTime() - prevTime.getTime()) / 1000 / 60).toFixed(1)
}
|
<reponame>tsonntag/gitter
require 'action_view'
module Gitter
class TableCell
include ActionView::Helpers::NumberHelper
attr_reader :x, :y, :content
def initialize x, y, content
@x, @y, @content = x, y, content
end
def html opts = {}
Table.tag :td, formatted_content, opts.merge(class: "#{x} #{y}")
end
def header?
false
end
def formatted_content
number_with_delimiter content, delimiter: '.'
end
end
class TableHeaderCell < TableCell
attr_reader :content
def initialize content
@content = content
end
def html opts = {}
Table.tag :th, formatted_content, opts
end
def header?
true
end
def formatted_content
content
end
end
class Table
extend ActionView::Helpers::TagHelper
extend ActionView::Helpers::OutputSafetyHelper
def self.tag tag, content, opts = {}
opts = opts.merge(class: "#{opts[:class]} grid pivot")
content_tag tag, raw(content), opts
end
attr_reader :title, :x_axis, :y_axis, :data, :opts
# axis: responds to each, yielding [key,title] or key
# # data: hash from [x_key,y_key] to cell_array
def initialize title, x_axis, y_axis, data, opts = {}
@title, @x_axis, @y_axis, @data, @opts = title, x_axis, y_axis, data, opts
@cells = data.dup
if label = opts[:show_sums]
add_sums
@x_axis = add_sum_label_to_axis @x_axis, label
@y_axis = add_sum_label_to_axis @y_axis, label
end
end
def empty?
data.empty?
end
def rows
@rows ||= begin
rows = []
rows << x_header if x_header
rows + (y_axis||[nil]).map do |y,y_title|
row = (x_axis||[nil]).map do |x,x_title|
cell = @cells[cell_key(x,y)]
cell = yield cell, x, y if block_given?
TableCell.new x, y, cell
end
row.unshift TableHeaderCell.new(y_title||y) if y_axis
row
end
end
end
def html opts = {}
@html ||= begin
h = rows.map do |row|
Table.tag :tr, (row.map{|cell| cell.html} * "\n"), (opts[:tr_html]||{})
end * "\n"
Table.tag :table, h, (opts[:table_html]||{})
end
end
private
def x_header
@x_header ||= begin
if x_axis
h = x_axis.map{|key,title| TableHeaderCell.new(title||key) }
h.unshift TableHeaderCell.new('') if y_axis
h
else
nil
end
end
end
def cell_key x, y
if x.nil? || y.nil?
x.nil? ? y : x
else
[x,y]
end
end
def add_sums
xsums, ysums = {}, {}
sum = 0
@cells.each do |key,value|
x, y = *key
xsums[y] = (xsums[y]||0) + value
ysums[x] = (ysums[x]||0) + value
sum += value
end
xsums.each{|y,sum| @cells[cell_key(:sum,y)] = sum}
ysums.each{|x,sum| @cells[cell_key(x,:sum)] = sum}
@cells[[:sum,:sum]] = sum
@cells
end
def add_sum_label_to_axis axis, label = nil
label = 'Sum' unless String === label
case axis
when Array then axis + [[:sum, label]]
when Hash then axis.merge(:sum => label)
else nil;
end
end
end
end
|
def descending_sort(arr):
n = len(arr)
for i in range(n):
for j in range(0, n-i-1):
if arr[j] < arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
return arr
|
<reponame>nabeelkhan/Oracle-DBA-Life
-- ***************************************************************************
-- File: 12_2.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 12_2.lis
SET SERVEROUTPUT ON SIZE 1000000
DECLARE
lv_message_txt VARCHAR2(1800);
lv_status_num PLS_INTEGER;
BEGIN
-- Registers the alert for the session.
DBMS_ALERT.REGISTER('REORDER_THRESHOLD_ALERT');
-- Processes the first 10 alerts and then ends and displays alerts.
FOR lv_count_num IN 1..10 LOOP
-- Catches each of the alerts.
DBMS_ALERT.WAITONE('REORDER_THRESHOLD_ALERT',
lv_message_txt, lv_status_num);
-- Status of 0 means successful alert capture, otherwise it is
-- unsuccessful.
IF lv_status_num = 0 THEN
DBMS_OUTPUT.PUT_LINE(lv_message_txt);
ELSE
DBMS_OUTPUT.PUT_LINE('Alert Failed.');
END IF;
END LOOP;
END;
/
SPOOL OFF
|
<filename>DicomWebStorge/Orthanc-1.7.4/OrthancFramework/Sources/Images/Font.cpp
/**
* Orthanc - A Lightweight, RESTful DICOM Store
* Copyright (C) 2012-2016 <NAME>, Medical Physics
* Department, University Hospital of Liege, Belgium
* Copyright (C) 2017-2020 <NAME>., Belgium
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
**/
#include "../PrecompiledHeaders.h"
#include "Font.h"
#if !defined(ORTHANC_ENABLE_LOCALE)
# error ORTHANC_ENABLE_LOCALE must be defined to use this file
#endif
#if ORTHANC_SANDBOXED == 0
# include "../SystemToolbox.h"
#endif
#include "../OrthancException.h"
#include "../Toolbox.h"
#include "Image.h"
#include "ImageProcessing.h"
#include <stdio.h>
#include <memory>
#include <boost/lexical_cast.hpp>
namespace Orthanc
{
Font::~Font()
{
for (Characters::iterator it = characters_.begin();
it != characters_.end(); ++it)
{
delete it->second;
}
}
void Font::LoadFromMemory(const std::string& font)
{
Json::Value v;
Json::Reader reader;
if (!reader.parse(font, v) ||
v.type() != Json::objectValue ||
!v.isMember("Name") ||
!v.isMember("Size") ||
!v.isMember("Characters") ||
v["Name"].type() != Json::stringValue ||
v["Size"].type() != Json::intValue ||
v["Characters"].type() != Json::objectValue)
{
throw OrthancException(ErrorCode_BadFont);
}
name_ = v["Name"].asString();
size_ = v["Size"].asUInt();
maxHeight_ = 0;
Json::Value::Members characters = v["Characters"].getMemberNames();
for (size_t i = 0; i < characters.size(); i++)
{
const Json::Value& info = v["Characters"][characters[i]];
if (info.type() != Json::objectValue ||
!info.isMember("Advance") ||
!info.isMember("Bitmap") ||
!info.isMember("Height") ||
!info.isMember("Top") ||
!info.isMember("Width") ||
info["Advance"].type() != Json::intValue ||
info["Bitmap"].type() != Json::arrayValue ||
info["Height"].type() != Json::intValue ||
info["Top"].type() != Json::intValue ||
info["Width"].type() != Json::intValue)
{
throw OrthancException(ErrorCode_BadFont);
}
std::unique_ptr<Character> c(new Character);
c->advance_ = info["Advance"].asUInt();
c->height_ = info["Height"].asUInt();
c->top_ = info["Top"].asUInt();
c->width_ = info["Width"].asUInt();
c->bitmap_.resize(info["Bitmap"].size());
if (c->height_ > maxHeight_)
{
maxHeight_ = c->height_;
}
for (Json::Value::ArrayIndex j = 0; j < info["Bitmap"].size(); j++)
{
if (info["Bitmap"][j].type() != Json::intValue)
{
throw OrthancException(ErrorCode_BadFont);
}
int value = info["Bitmap"][j].asInt();
if (value < 0 || value > 255)
{
throw OrthancException(ErrorCode_BadFont);
}
c->bitmap_[j] = static_cast<uint8_t>(value);
}
int index = boost::lexical_cast<int>(characters[i]);
if (index < 0 || index > 255)
{
throw OrthancException(ErrorCode_BadFont);
}
characters_[static_cast<char>(index)] = c.release();
}
}
#if ORTHANC_SANDBOXED == 0
void Font::LoadFromFile(const std::string& path)
{
std::string font;
SystemToolbox::ReadFile(font, path);
LoadFromMemory(font);
}
#endif
static unsigned int MyMin(unsigned int a,
unsigned int b)
{
return a < b ? a : b;
}
void Font::DrawCharacter(ImageAccessor& target,
const Character& character,
int x,
int y,
const uint8_t color[4]) const
{
// Compute the bounds of the character
if (x >= static_cast<int>(target.GetWidth()) ||
y >= static_cast<int>(target.GetHeight()))
{
// The character is out of the image
return;
}
unsigned int left = x < 0 ? -x : 0;
unsigned int top = y < 0 ? -y : 0;
unsigned int width = MyMin(character.width_, target.GetWidth() - x);
unsigned int height = MyMin(character.height_, target.GetHeight() - y);
unsigned int bpp = target.GetBytesPerPixel();
// Blit the font bitmap OVER the target image
// https://en.wikipedia.org/wiki/Alpha_compositing
for (unsigned int cy = top; cy < height; cy++)
{
uint8_t* p = reinterpret_cast<uint8_t*>(target.GetRow(y + cy)) + (x + left) * bpp;
unsigned int pos = cy * character.width_ + left;
switch (target.GetFormat())
{
case PixelFormat_Grayscale8:
{
assert(bpp == 1);
for (unsigned int cx = left; cx < width; cx++, pos++, p++)
{
uint16_t alpha = character.bitmap_[pos];
uint16_t value = alpha * static_cast<uint16_t>(color[0]) + (255 - alpha) * static_cast<uint16_t>(*p);
*p = static_cast<uint8_t>(value >> 8);
}
break;
}
case PixelFormat_RGB24:
{
assert(bpp == 3);
for (unsigned int cx = left; cx < width; cx++, pos++, p += 3)
{
uint16_t alpha = character.bitmap_[pos];
for (uint8_t i = 0; i < 3; i++)
{
uint16_t value = alpha * static_cast<uint16_t>(color[i]) + (255 - alpha) * static_cast<uint16_t>(p[i]);
p[i] = static_cast<uint8_t>(value >> 8);
}
}
break;
}
case PixelFormat_RGBA32:
case PixelFormat_BGRA32:
{
assert(bpp == 4);
for (unsigned int cx = left; cx < width; cx++, pos++, p += 4)
{
float alpha = static_cast<float>(character.bitmap_[pos]) / 255.0f;
float beta = (1.0f - alpha) * static_cast<float>(p[3]) / 255.0f;
float denom = 1.0f / (alpha + beta);
for (uint8_t i = 0; i < 3; i++)
{
p[i] = static_cast<uint8_t>((alpha * static_cast<float>(color[i]) +
beta * static_cast<float>(p[i])) * denom);
}
p[3] = static_cast<uint8_t>(255.0f * (alpha + beta));
}
break;
}
default:
throw OrthancException(ErrorCode_NotImplemented);
}
}
}
void Font::DrawInternal(ImageAccessor& target,
const std::string& utf8,
int x,
int y,
const uint8_t color[4]) const
{
if (target.GetFormat() != PixelFormat_Grayscale8 &&
target.GetFormat() != PixelFormat_RGB24 &&
target.GetFormat() != PixelFormat_RGBA32 &&
target.GetFormat() != PixelFormat_BGRA32)
{
throw OrthancException(ErrorCode_NotImplemented);
}
int a = x;
#if ORTHANC_ENABLE_LOCALE == 1
std::string s = Toolbox::ConvertFromUtf8(utf8, Encoding_Latin1);
#else
// If the locale support is disabled, simply drop non-ASCII
// characters from the source UTF-8 string
std::string s = Toolbox::ConvertToAscii(utf8);
#endif
for (size_t i = 0; i < s.size(); i++)
{
if (s[i] == '\n')
{
// Go to the next line
a = x;
y += maxHeight_ + 1;
}
else
{
Characters::const_iterator c = characters_.find(s[i]);
if (c != characters_.end())
{
DrawCharacter(target, *c->second, a, y + static_cast<int>(c->second->top_), color);
a += c->second->advance_;
}
}
}
}
void Font::Draw(ImageAccessor& target,
const std::string& utf8,
int x,
int y,
uint8_t grayscale) const
{
uint8_t color[4] = { grayscale, grayscale, grayscale, 255 };
DrawInternal(target, utf8, x, y, color);
}
void Font::Draw(ImageAccessor& target,
const std::string& utf8,
int x,
int y,
uint8_t r,
uint8_t g,
uint8_t b) const
{
uint8_t color[4];
switch (target.GetFormat())
{
case PixelFormat_BGRA32:
color[0] = b;
color[1] = g;
color[2] = r;
color[3] = 255;
break;
default:
color[0] = r;
color[1] = g;
color[2] = b;
color[3] = 255;
break;
}
DrawInternal(target, utf8, x, y, color);
}
void Font::ComputeTextExtent(unsigned int& width,
unsigned int& height,
const std::string& utf8) const
{
width = 0;
height = 0;
#if ORTHANC_ENABLE_LOCALE == 1
std::string s = Toolbox::ConvertFromUtf8(utf8, Encoding_Latin1);
#else
// If the locale support is disabled, simply drop non-ASCII
// characters from the source UTF-8 string
std::string s = Toolbox::ConvertToAscii(utf8);
#endif
// Compute the text extent
unsigned int x = 0;
unsigned int y = 0;
for (size_t i = 0; i < s.size(); i++)
{
if (s[i] == '\n')
{
// Go to the next line
x = 0;
y += (maxHeight_ + 1);
}
else
{
Characters::const_iterator c = characters_.find(s[i]);
if (c != characters_.end())
{
x += c->second->advance_;
unsigned int bottom = y + c->second->top_ + c->second->height_;
if (bottom > height)
{
height = bottom;
}
if (x > width)
{
width = x;
}
}
}
}
}
ImageAccessor* Font::Render(const std::string& utf8,
PixelFormat format,
uint8_t r,
uint8_t g,
uint8_t b) const
{
unsigned int width, height;
ComputeTextExtent(width, height, utf8);
std::unique_ptr<ImageAccessor> target(new Image(format, width, height, false));
ImageProcessing::Set(*target, 0, 0, 0, 255);
Draw(*target, utf8, 0, 0, r, g, b);
return target.release();
}
ImageAccessor* Font::RenderAlpha(const std::string& utf8) const
{
unsigned int width, height;
ComputeTextExtent(width, height, utf8);
std::unique_ptr<ImageAccessor> target(new Image(PixelFormat_Grayscale8, width, height, false));
ImageProcessing::Set(*target, 0);
Draw(*target, utf8, 0, 0, 255);
return target.release();
}
}
|
<gh_stars>0
import { Indicator, Toast } from 'mint-ui'
import errMsg from '../unit/apiErr.js'
export const pageInit = {
data() {
return {
isAJAX: false, // 是否通信过了 => loading 没有重新设为 true 只显示一次 loading
showDataView: false, // 通信过了且有数据
showNoView: false // 通信过了且没数据 showView 不能用同一个变量 会闪
}
},
created() {
Indicator.open({
spinnerType: 'fading-circle'
})
},
watch: {
isAJAX(now) {
if (!now) {
Indicator.open({
spinnerType: 'fading-circle'
})
} else {
Indicator.close()
}
}
},
methods: {
isTrue(httpResponse) {
return new Promise(function (resolve, reject) {
const {
body: { success, data, message }
} = httpResponse
if (success) {
this.showDataView = true
resolve(data)
} else {
this.showNoView = true
Toast(errMsg(parseInt(message)))
reject(message)
}
}.bind(this))
}
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2019 The Baricoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Detect when a mutable list or dict is used as a default parameter value in a Python function.
export LC_ALL=C
EXIT_CODE=0
OUTPUT=$(git grep -E '^\s*def [a-zA-Z0-9_]+\(.*=\s*(\[|\{)' -- "*.py")
if [[ ${OUTPUT} != "" ]]; then
echo "A mutable list or dict seems to be used as default parameter value:"
echo
echo "${OUTPUT}"
echo
cat << EXAMPLE
This is how mutable list and dict default parameter values behave:
>>> def f(i, j=[], k={}):
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1, 1], {1: True})
>>> f(2)
([1, 1, 2], {1: True, 2: True})
The intended behaviour was likely:
>>> def f(i, j=None, k=None):
... if j is None:
... j = []
... if k is None:
... k = {}
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1], {1: True})
>>> f(2)
([2], {2: True})
EXAMPLE
EXIT_CODE=1
fi
exit ${EXIT_CODE}
|
<filename>common/src/index.ts
export * from './types';
export * from './api';
export * from './constant';
|
//# sourceMappingURL=ts_test.js.map
|
#!/usr/bin/env bash
# add ZSH to list of accepted shells
if grep -Fxq "/usr/local/bin/zsh" /etc/shells > /dev/null 2>&1; then
echo_skip "ZSH is already in the list of accepted shells"
else
# If not found
sudo sh -c 'echo "/usr/local/bin/zsh" >> /etc/shells'
if grep -Fxq "/usr/local/bin/zsh" /etc/shells > /dev/null 2>&1; then
echo_ok "ZSH added to list of accepted shells"
else
echo_fail "ZSH could not be added to list of accepted shells."
fi
fi
# set ZSH as the default shell
if echo $SHELL | grep /bin/bash > /dev/null 2>&1; then
chsh -s $(which zsh)
echo_ok "ZSH is now the default shell"
else
echo_skip "ZSH is already the default shell"
fi
# symlink pure-prompt & async files into newly created zsh directory
ln -sf "$LIB_DIR/pure/pure.zsh" /usr/local/share/zsh/site-functions/prompt_pure_setup
ln -sf "$LIB_DIR/pure/async.zsh" /usr/local/share/zsh/site-functions/async
echo_done "zsh"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.