text stringlengths 1 1.05M |
|---|
<gh_stars>0
package main
import (
"fmt"
"log"
"os"
"courseScheduling/dev/dummy"
"courseScheduling/models"
"courseScheduling/scheduling"
)
var (
courses map[string]*models.Course
instructs map[int]*models.Instruct
clazzes map[string]*models.Clazz
)
// dummy data includes only the primary key, the function fill all fields from the corresponding table
func fillInstructedClazz(allInstructedClazz []*models.InstructedClazz) {
for _, item := range allInstructedClazz {
ins, exist := instructs[item.Instruct.InstructId]
if !exist {
log.Println("cannot found Instruct", item.Instruct.InstructId)
}
item.Instruct = ins
cou, exist := courses[item.Instruct.Course.Id]
if !exist {
log.Println("cannot found Course", item.Instruct.Course.Id)
}
item.Instruct.Course = cou
clazz, exist := clazzes[item.Clazz.ClazzId]
if !exist {
log.Println("cannot found Clazz", item.Clazz.ClazzId)
}
item.Clazz = clazz
}
}
func main() {
allCourses := dummy.ParseCourse()
courses = make(map[string]*models.Course, len(allCourses))
for _, item := range allCourses {
courses[item.Id] = item
}
allInstructs := dummy.ParseInstruct()
instructs = make(map[int]*models.Instruct, len(allInstructs))
for _, item := range allInstructs {
instructs[item.InstructId] = item
}
allClazzes := dummy.ParseClazz()
clazzes = make(map[string]*models.Clazz, len(allClazzes))
for _, item := range allClazzes {
clazzes[item.ClazzId] = item
}
allInstructedClazz := dummy.ParseInstructedClazz()
fillInstructedClazz(allInstructedClazz)
allClazzroom := dummy.ParseClazzroom()
allTimespan := dummy.ParseTimespan()
result, score := scheduling.GenerateSchedule(&scheduling.Params{
AllInstructedClazz: allInstructedClazz,
AllClazzroom: allClazzroom,
AllTimespan: allTimespan,
UseEvaluator: []string{
"AvoidUseNight", "DisperseSameCourse", "KeepAllLessonsDisperseEveryTimespan", "KeepAllLessonsDisperseEveryDay",
},
})
f, err := os.Create("s.txt")
if err != nil {
fmt.Println(err)
return
}
_, err = fmt.Fprintf(f, "Score: %v\n", score)
if err != nil {
log.Println(err)
return
}
for _, item := range result {
_, err := fmt.Fprintf(f, "%v\n", item)
if err != nil {
log.Println(err)
return
}
}
}
|
import football from './football/api'
export default {
football,
}
|
<reponame>fuergaosi233/uptime-monitor
export declare const tempFixes: () => Promise<void>;
|
def palindrome_finder(sentence):
words = sentence.split(' ')
palindrome_words = []
for word in words:
if word == word[::-1]:
palindrome_words.append(word)
return palindrome_words |
#!/usr/bin/env bash
# Get project root
PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel)
# Should have no "latest" tags
grep -R "tag: latest" "$PROJECT_ROOT_DIR"/infra/charts || true
COUNT=$(grep -R "tag: latest" "$PROJECT_ROOT_DIR"/infra/charts | wc -l)
if [ "$COUNT" -gt 0 ]; then
echo 'Found more than one instance of "latest" in an image tag. Please replace with correct release version.';
exit 1
else
echo 'No "latest" tags found, continuing';
fi
# TODO: Enable DockerHub vs GCR checks asap.
## Should have no "gcr" images
#grep -R "gcr.io" "$PROJECT_ROOT_DIR"/infra/charts || true
#COUNT=$(grep -R "gcr.io" "$PROJECT_ROOT_DIR"/infra/charts | wc -l)
#
#if [ "$COUNT" -gt 0 ]; then
# echo 'Found more than one instance of "gcr.io" in charts. Please replace with https://hub.docker.com/r/feastdev feast image.';
# exit 1
#else
# echo 'No "gcr.io" instances found, continuing';
#fi
# Should have no "SNAPSHOT" versions
grep -R "SNAPSHOT" "$PROJECT_ROOT_DIR"/infra/charts || true
COUNT=$(grep -R "SNAPSHOT" "$PROJECT_ROOT_DIR"/infra/charts | wc -l)
if [ "$COUNT" -gt 0 ]; then
echo 'Found more than one instance of "SNAPSHOT" in charts. Please ensure that no SNAPSHOT charts are published.';
exit 1
else
echo 'No "SNAPSHOT" instances found, continuing';
fi |
<reponame>wmn7/Traffic-Classification<gh_stars>1-10
'''
@Author: <NAME>
@Date: 2021-01-07 15:04:21
@Description: 训练模型的整个流程, 单个模型的训练
@LastEditTime: 2021-03-25 12:04:33
'''
import os
import torch
from torch import nn, optim
from TrafficFlowClassification.TrafficLog.setLog import logger
from TrafficFlowClassification.utils.setConfig import setup_config
# 下面是一些可以使用的模型
from TrafficFlowClassification.models.cnn1d import cnn1d
from TrafficFlowClassification.models.cnn2d import cnn2d
from TrafficFlowClassification.models.cnn1d_noPooling import cnn1d_noPooling
from TrafficFlowClassification.models.cnn2d_noPooling import cnn2d_noPooling
from TrafficFlowClassification.models.dnn import deepnn # 对统计特征进行分类
from TrafficFlowClassification.models.resnet18_2d import resnet182D
from TrafficFlowClassification.models.resnet18_1d import resnet181D
from TrafficFlowClassification.train.trainProcess import train_process
from TrafficFlowClassification.train.validateProcess import validate_process
from TrafficFlowClassification.data.dataLoader import data_loader
from TrafficFlowClassification.data.tensordata import get_tensor_data
from TrafficFlowClassification.utils.helper import adjust_learning_rate, save_checkpoint
from TrafficFlowClassification.utils.evaluate_tools import display_model_performance_metrics
def train_pipeline():
cfg = setup_config() # 获取 config 文件
logger.info(cfg)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
logger.info('是否使用 GPU 进行训练, {}'.format(device))
model_path = os.path.join(cfg.train.model_dir, cfg.train.model_name) # 模型的路径
model = resnet181D(model_path, pretrained=cfg.test.pretrained, num_classes=12, image_width=cfg.train.IMAGE_WIDTH).to(device) # 定义模型
criterion = nn.CrossEntropyLoss() # 定义损失函数
optimizer = optim.Adam(model.parameters(), lr=cfg.train.lr) # 定义优化器
logger.info('成功初始化模型.')
train_loader = data_loader(pcap_file=cfg.train.train_pcap, label_file=cfg.train.train_label, statistic_file=cfg.train.train_statistic, trimed_file_len=cfg.train.TRIMED_FILE_LEN, batch_size=cfg.train.BATCH_SIZE) # 获得 train dataloader
test_loader = data_loader(pcap_file=cfg.train.test_pcap, label_file=cfg.train.test_label, statistic_file=cfg.train.test_statistic, trimed_file_len=cfg.train.TRIMED_FILE_LEN, batch_size=cfg.train.BATCH_SIZE) # 获得 train dataloader
logger.info('成功加载数据集.')
if cfg.test.evaluate: # 是否只进行测试
logger.info('进入测试模式.')
validate_process(test_loader, model, criterion, device, 20) # 总的一个准确率
torch.cuda.empty_cache() # 清除显存
# 计算每个类别详细的准确率
index2label = {j:i for i,j in cfg.test.label2index.items()} # index->label 对应关系
label_list = [index2label.get(i) for i in range(12)] # 12 个 label 的标签
pcap_data, statistic_data, label_data = get_tensor_data(pcap_file=cfg.train.test_pcap, statistic_file=cfg.train.test_statistic, label_file=cfg.train.test_label, trimed_file_len=cfg.train.TRIMED_FILE_LEN)
start_index = 0
y_pred = None
for i in list(range(100, 5800, 100)):
y_pred_batch = model(pcap_data[start_index:i].to(device), statistic_data[start_index:i].to(device)) # 放入模型进行预测
start_index = i
if y_pred == None:
y_pred = y_pred_batch.cpu().detach()
else:
y_pred = torch.cat((y_pred, y_pred_batch.cpu().detach()), dim=0)
print(y_pred.shape)
_, pred = y_pred.topk(1, 1, largest=True, sorted=True)
Y_data_label = [index2label.get(i.tolist()) for i in label_data] # 转换为具体名称
pred_label = [index2label.get(i.tolist()) for i in pred.view(-1).cpu().detach()]
display_model_performance_metrics(true_labels=Y_data_label, predicted_labels=pred_label, classes=label_list)
return
best_prec1 = 0
for epoch in range(cfg.train.epochs):
adjust_learning_rate(optimizer, epoch, cfg.train.lr) # 动态调整学习率
train_process(train_loader, model, criterion, optimizer, epoch, device, 80) # train for one epoch
prec1 = validate_process(test_loader, model, criterion, device, 20) # evaluate on validation set
# remember the best prec@1 and save checkpoint
is_best = prec1 > best_prec1
best_prec1 = max(prec1, best_prec1)
# 保存最优的模型
save_checkpoint({
'epoch': epoch + 1,
'state_dict': model.state_dict(),
'best_prec1': best_prec1,
'optimizer': optimizer.state_dict()
}, is_best, model_path)
logger.info('Finished! (* ̄︶ ̄)')
if __name__ == "__main__":
train_pipeline() # 用于测试 |
#!/bin/sh
set -e
UNSIGNED=$1
SIGNATURE=$2
ARCH=x86_64
ROOTDIR=dist
BUNDLE=${ROOTDIR}/mire-Qt.app
TEMPDIR=signed.temp
OUTDIR=signed-app
if [ -z "$UNSIGNED" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
if [ -z "$SIGNATURE" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
rm -rf ${TEMPDIR} && mkdir -p ${TEMPDIR}
tar -C ${TEMPDIR} -xf ${UNSIGNED}
tar -C ${TEMPDIR} -xf ${SIGNATURE}
if [ -z "${PAGESTUFF}" ]; then
PAGESTUFF=${TEMPDIR}/pagestuff
fi
if [ -z "${CODESIGN_ALLOCATE}" ]; then
CODESIGN_ALLOCATE=${TEMPDIR}/codesign_allocate
fi
for i in `find ${TEMPDIR} -name "*.sign"`; do
SIZE=`stat -c %s ${i}`
TARGET_FILE=`echo ${i} | sed 's/\.sign$//'`
echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}"
${CODESIGN_ALLOCATE} -i ${TARGET_FILE} -a ${ARCH} ${SIZE} -o ${i}.tmp
OFFSET=`${PAGESTUFF} ${i}.tmp -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
if [ -z ${QUIET} ]; then
echo "Attaching signature at offset ${OFFSET}"
fi
dd if=$i of=${i}.tmp bs=1 seek=${OFFSET} count=${SIZE} 2>/dev/null
mv ${i}.tmp ${TARGET_FILE}
rm ${i}
echo "Success."
done
mv ${TEMPDIR}/${ROOTDIR} ${OUTDIR}
rm -rf ${TEMPDIR}
echo "Signed: ${OUTDIR}"
|
# Implement quicksort to sort the array in ascending order
def quicksort(arr):
if len(arr) <= 1:
return arr
pivot = arr[0]
left = [x for x in arr[1:] if x < pivot]
right = [x for x in arr[1:] if x >= pivot]
return quicksort(left) + [pivot] + quicksort(right)
# Test array
arr = [3, 2, 7, 1, 4, 5]
# print sorted array
print(quicksort(arr)) # [1, 2, 3, 4, 5, 7] |
module.exports = {
devServer: {
// 设置主机地址
host: "0.0.0.0",
// 设置默认端口
port: 8080,
// 设置代理
proxy: {
"/api": {
// 目标 API 地址
target: "http://127.0.0.1:80/",
// 如果要代理 websockets
ws: true,
// 将主机标头的原点更改为目标URL
changeOrigin: false
}
}
}
};
|
#!/bin/bash
# Run_AMP24_Dhrystone.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/dhrystone_microblaze/dhrystone_microblaze.MICROBLAZE-O2-g.elf \
--processorvendor xilinx.ovpworld.org --processorname microblaze --variant V8_10 \
--numprocessors 24 \
--parameter endian=big \
"$@" \
-argv 800000
|
#!/bin/bash
set -ex
buildifier -showlog -mode=check $(find . -type f \( -name 'BUILD' -or -name 'WORKSPACE' -or -wholename '.*bazel' -or -wholename '.*bzl' \) -print )
NUM_CPU=$(getconf _NPROCESSORS_ONLN)
gometalinter --concurrency=${NUM_CPU} --enable-gc --deadline=300s --disable-all\
--enable=aligncheck\
--enable=deadcode\
--enable=errcheck\
--enable=gas\
--enable=goconst\
--enable=gofmt\
--enable=goimports\
--enable=golint\
--exclude=.pb.go\
--exclude=gen_test.go\
--enable=gosimple\
--enable=gotype\
--enable=ineffassign\
--enable=interfacer\
--enable=lll --line-length=120\
--enable=misspell\
--enable=staticcheck\
--enable=structcheck\
--enable=unconvert\
--enable=unused\
--enable=varcheck\
--enable=vet\
--enable=vetshadow\
./...
# Disabled linters:
# --enable=dupl\
# --enable=gocyclo\
# --cyclo-over=15\
|
/*
* Copyright © 2020 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
import { getAddressFromPublicKey, getKeys, getRandomBytes } from '@liskhq/lisk-cryptography';
export const getRandomAccount = () => {
const { publicKey, privateKey } = getKeys(getRandomBytes(20).toString('hex'));
const address = getAddressFromPublicKey(publicKey);
return {
address: address.toString('hex'),
publicKey: publicKey.toString('hex'),
privateKey: privateKey.toString('hex'),
nonce: 0,
};
};
|
<gh_stars>0
#ifndef TEST_CLEAR_COLOUR_H
#define TEST_CLEAR_COLOUR_H
#include "test.h"
void
test_clear_colour_setup (struct test_data *t)
{
unsigned int i = 0;
t->colour[i++] = 0.2f;
t->colour[i++] = 0.3f;
t->colour[i++] = 0.8f;
t->colour[i++] = 1.0f;
}
void
test_clear_colour_update (struct test_data *t, float dt)
{
}
void
test_clear_colour_render (struct test_data *t)
{
GLCALL (glClearColor (t->colour[0], t->colour[1], t->colour[2], t->colour[3]));
GLCALL (glClear (GL_COLOR_BUFFER_BIT));
}
void
test_clear_colour_render_gui (struct test_data *t, struct nk_context *ctx)
{
if (nk_begin (ctx, "Clear Colour", nk_rect (50, 50, 220, 220),
NK_WINDOW_MOVABLE | NK_WINDOW_BORDER | NK_WINDOW_CLOSABLE))
{
nk_layout_row_begin (ctx, NK_STATIC, 30, 2);
{
nk_layout_row_push (ctx, 50);
nk_label (ctx, "R:", NK_TEXT_LEFT);
nk_layout_row_push (ctx, 110);
nk_slider_float (ctx, 0, &t->colour[0], 1.0f, 0.1f);
}
nk_layout_row_end (ctx);
nk_layout_row_begin (ctx, NK_STATIC, 30, 2);
{
nk_layout_row_push (ctx, 50);
nk_label (ctx, "G:", NK_TEXT_LEFT);
nk_layout_row_push (ctx, 110);
nk_slider_float (ctx, 0, &t->colour[1], 1.0f, 0.1f);
}
nk_layout_row_end (ctx);
nk_layout_row_begin (ctx, NK_STATIC, 30, 2);
{
nk_layout_row_push (ctx, 50);
nk_label (ctx, "B:", NK_TEXT_LEFT);
nk_layout_row_push (ctx, 110);
nk_slider_float (ctx, 0, &t->colour[2], 1.0f, 0.1f);
}
nk_layout_row_end (ctx);
nk_layout_row_begin (ctx, NK_STATIC, 30, 2);
{
nk_layout_row_push (ctx, 50);
nk_label (ctx, "A:", NK_TEXT_LEFT);
nk_layout_row_push (ctx, 110);
nk_slider_float (ctx, 0, &t->colour[3], 1.0f, 0.1f);
}
nk_layout_row_end (ctx);
}
nk_end (ctx);
nk_sdl_render (NK_ANTI_ALIASING_ON, MAX_VERTEX_MEMORY, MAX_ELEMENT_MEMORY);
}
void test_clear_colour_teardown (struct test_data *t) {}
#endif /* TEST_CLEAR_COLOUR_H */
|
#!/bin/sh
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/BitcoinGenghis-Qt.app"
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature-osx.tar.gz
OUTROOT=osx
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: \"${TARGETFILE}\""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"
|
#!/bin/bash
#PBS -l walltime=00:30:00
#PBS -l nodes=1:ppn=4
#PBS -m ae -M fb1n15@soton.ac.uk
#PBS -o /home/fb1n15/simulation-resource-allocation-multi-agent-RL/output_log_of_tasks/
#PBS -e /home/fb1n15/simulation-resource-allocation-multi-agent-RL/error_log_of_tasks/
#Change to directory from which job was submitted
#remember! cd to the directory that contains the file when run this script
cd "/home/fb1n15/simulation-resource-allocation-multi-agent-RL/scripts/" || exit
module load conda/4.4.0
source activate auction
module load cplex/12.10
export PYTHONPATH=$PYTHONPATH:/home/fb1n15/simulation-resource-allocation-multi-agent-RL
python3 /home/fb1n15/simulation-resource-allocation-multi-agent-RL/scripts/simulation_optimal_pricing.py $VAR1 $VAR2 $VAR3 $VAR4 $VAR5 $VAR6 $VAR7 $VAR8 $VAR9
|
#!/bin/bash
# Do NOT Change. New Version will need these variables
#CATALOGURL="https://swscan.apple.com/content/catalogs/others/index-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog.gz"
COREBRIGHTNESS="/System/Library/PrivateFrameworks/CoreBrightness.framework"
COREBRIGHTNESS_A="/System/Library/PrivateFrameworks/CoreBrightness.framework/Versions/A/CoreBrightness"
CBSIGNATURE="/System/Library/PrivateFrameworks/CoreBrightness.framework/Versions/A/_CodeSignature"
CBBACKUP="/Library/CoreBrightness Backup"
MACMODEL="$(sysctl -n hw.model)"
OSVERSION="$(sw_vers -productVersion)"
OFFSET_FULL="$(nm "${COREBRIGHTNESS_A}" | grep _ModelMinVersion | cut -d' ' -f 1 | sed -e 's/^0*//g' | head -1)"
OFFSET="0x${OFFSET_FULL}"
APPSUPPORT="/Library/Application Support/Night Shift/"
echo "Night Shift Enable Script for Unsupported Macs"
echo "version 2.0"
echo "Script made by Isiah Johnson (TMRJIJ) / OS X Hackers and Dosdude1"
echo ""
echo "All credits for this work goes to Piker Alpha. Thanks!"
echo "Special thanks to pookjw, PeterHolbrook, dosdude1, and aonez for their continued critiques and support from their own source work."
echo ""
echo "This script is intended as non-commerical, with no Donation requests, Open Source, and must give thanks to PIke!"
echo "URL: https://pikeralpha.wordpress.com/2017/01/30/4398/"
echo ""
echo ""
# Details about the script
echo "Night Shift was introduced in macOS Sierra 10.12.4 (16E144f) and is controlled by the CoreBrightness.framework. The official minimum requirements for this feature are:
MacBookPro9,x
iMac13,x
Macmini6,x
MacBookAir5,x
MacPro6,x
MacBook8,x
This script will replace the CoreBrightness.framework with one already patched with the matching hex value in CoreBrightness.framework for most older/unsupported hardware.
As such, if something goes wrong (like the Display tab in System Preference crashing) or if this framework copy doesn't work. Please feel free to email me at support@osxhackers.net or attempt it manually via Pike's original blog post."
echo ""
# Checks if System Version is at least 10.12.4
echo "Checking System Version..."
echo ""
if [[ "$(echo "${OSVERSION}" | cut -d"." -f2)" -lt 12 ]]; then
echo "Incompatible version of macOS, install macOS Sierra and run this script again"
exit
elif [[ "$(echo "${OSVERSION}" | cut -d"." -f2)" == 12 ]]; then
if [[ "$(echo "${OSVERSION}" | cut -d"." -f3)" -lt 4 ]]; then
echo "Requires macOS 10.12.4 or higher. You have version: $(sw_vers -productVersion), install the newest macOS update and run this script again"
echo ""
exit
fi
fi
# Checks Mac Model
if [[ ! -z "$(echo "${MACMODEL}" | grep "MacBookPro")" ]]; then
if [[ "$(echo "${MACMODEL}" | cut -d"o" -f4 | cut -d"," -f1)" -ge 9 ]]; then
SUPPORTEDMAC=YES
fi
elif [[ ! -z "$(echo "${MACMODEL}" | grep "iMacPro")" ]]; then
SUPPORTEDMAC=YES
elif [[ ! -z "$(echo "${MACMODEL}" | grep "iMac")" ]]; then
if [[ "$(echo "${MACMODEL}" | cut -d"c" -f2 | cut -d"," -f1)" -ge 13 ]]; then
SUPPORTEDMAC=YES
fi
elif [[ ! -z "$(echo "${MACMODEL}" | grep "Macmini")" ]]; then
if [[ "$(echo "${MACMODEL}" | cut -d"i" -f3 | cut -d"," -f1)" -ge 6 ]]; then
SUPPORTEDMAC=YES
fi
elif [[ ! -z "$(echo "${MACMODEL}" | grep "MacBookAir")" ]]; then
if [[ "$(echo "${MACMODEL}" | cut -d"r" -f2 | cut -d"," -f1)" -ge 5 ]]; then
SUPPORTEDMAC=YES
fi
elif [[ ! -z "$(echo "${MACMODEL}" | grep "MacPro")" ]]; then
if [[ "$(echo "${MACMODEL}" | cut -d"o" -f2 | cut -d"," -f1)" -ge 6 ]]; then
SUPPORTEDMAC=YES
fi
elif [[ ! -z "$(echo "${MACMODEL}" | grep "MacBook")" ]]; then
if [[ "$(echo "${MACMODEL}" | cut -d"k" -f2 | cut -d"," -f1)" -ge 8 ]]; then
SUPPORTEDMAC=YES
fi
fi
if [[ "${SUPPORTEDMAC}" == YES ]]; then
echo "Your Mac already supports Night Shift. It is not Recommended to use this Patch."
echo "Quitting..."
exit
fi
#Determining the Offset from OS Version
if [[ "$(echo "${OSVERSION}" | cut -d"." -f2)" == 13 ]]; then
if [[ -z "$(echo "${OSVERSION}" | cut -d"." -f3)" ]]; then # High Sierra
PATCH=1
else
if [ "$(echo "${OSVERSION}" | cut -d"." -f3)" -ge 2 ]; then
PATCH=2 # High Sierra 10.13.2 to Mojave
else
PATCH=1 # 10.13.1
fi
fi
elif [ "$(echo "${OSVERSION}" | cut -d"." -f2)" -gt 13 ]; then # Mojave or newer
Patch=2
else
PATCH=1 # Sierra 10.12.4 or later
fi
# Check if SIP is enabled. Exits if enabled.
echo "Checking System Integrity Protection status..."
echo ""
if [[ !($(csrutil status | grep enabled | wc -l) -eq 0) ]]; then
echo "SIP is enabled on this system. Please boot into Recovery HD or a Sierra Installer USB drive, open a new Terminal Window, and enter 'csrutil disable'. When completed, reboot back into your standard Sierra install, and run this script again."
echo ""
exit
elif [[ "$(csrutil status | head -n 1)" == *"status: enabled (Custom Configuration)"* ]]; then
echo "The SIP status has a Custom Configuration. The script might not work."
fi
# Check if Command Line Tools from XCode are installed
if [[ ! -d "$("xcode-select" -p)" ]]; then
echo "Your Mac doesn't appear to have Command Line Tool. Please type 'xcode-select --install' command in the terminal to install it, then run this script again."
exit
fi
# Actual Patching of Framework
read -p "Ready to begin Patching? [y/n]: " prompt
if [[ $prompt == 'y' ]]; then
echo "Let get started then"
echo ""
#Backup Original Framework
echo "Backing Up older CoreBrightness Framework. It's in your Library Folder"
sudo mkdir -p $CBBACKUP
sudo cp $COREBRIGHTNESS_A /Library/CoreBrightness\ Backup/CoreBrightness.bak
sudo cp -r $CBSIGNATURE /Library/CoreBrightness\ Backup/_CodeSignature.bak
#Patching Framework
echo "Patching Framework"
if [ -z ${OFFSET_FULL} ]; then
echo -e "Can't find the offset to patch, Installation will not continue."
exit
fi
echo "Offset: ${OFFSET}"
echo 'Getting Offset Hex Data'
OFFSET_ORIGINAL="$(xxd -s ${OFFSET} -c 24 -l 24 "${COREBRIGHTNESS_A}")"
echo "Original Hex: ${OFFSET_ORIGINAL}"
if [[ $OFFSET_ORIGINAL == *'0100 0000 0100 0000 0100 0000 0100 0000 0100 0000 0100 0000'* ]]; then
echo -e "A patch was already applied on \"${COREBRIGHTNESS}\". No Need to worry."
exit
fi
# Checking for temp files
if [ -f "${COREBRIGHTNESS}/Versions/Current/CoreBrightness.temp" ]; then
echo -e "Detected obsolete file CoreBrightness.temp from the backup, removing"
sudo rm "${COREBRIGHTNESS}/Versions/Current/CoreBrightness.temp"
fi
if [[ -f "/System/Library/PrivateFrameworks/CoreBrightness.framework/Versions/A/CoreBrightness.tbd" ]]; then
echo "Detected CoreBrightness.tbd, removing..."
sudo rm "${COREBRIGHTNESS}/Versions/A/CoreBrightness.tbd"
fi
#
printf "\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00" | sudo dd count=24 bs=1 seek=$((${OFFSET})) of="${COREBRIGHTNESS_A}" conv=notrunc
echo 'Checking offset hex changed...'
OFFSET_CHECK="$(xxd -s ${OFFSET} -c 24 -l 24 "${COREBRIGHTNESS_A}")"
echo "New Hex Data: ${OFFSET_CHECK}"
# Codesigning
echo "New CoreBrightness will be Codesigned"
sudo codesign -f -s - $COREBRIGHTNESS_A
sudo chmod +x $COREBRIGHTNESS_A
echo ""
echo "Backing up new CoreBrightness Framework in Application Support Folder"
sudo cp -R $COREBRIGHTNESS $APPSUPPORT
echo "Finished. Please restart your Mac. After this, there should be a Night Shift Tab within System Preferences > Displays"
echo "Enjoy"
echo ""
echo "If you have issues, please feel free to go to the Github Repository"
elif [[ $prompt == 'n' ]]; then
echo""
echo "Okay then, bye :P"
exit
else
echo "No idea what you mean by '$prompt'. Closing Script now. Bye!"
exit
fi
|
#!/bin/bash
. ./tests/common.sh
[ "${1}" == "" ] && echo "specify how many wallets to use" && exit 2
num_wlts="${1}"
check_enough_wlts
[ "${2}" == "" ] && echo "specify how many times make the call" && exit 2
num_calls="${2}"
report_basename="txnotes"
gen_report_dir
entrypoint="${UTILS_DIR}/txnotes.sh"
loop_msg="requesting tx notes of wallet n. %s"
cmd_base="${DOCKER_RUN_D} --name wlt_%s ${IMG_NAME} ${entrypoint} ${report_dir} ${num_calls}"
call_and_track
|
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: koubei.merchant.department.create response.
*
* @author <NAME>
* @since 1.0, 2021-10-26 12:00:07
*/
public class KoubeiMerchantDepartmentCreateResponse extends AlipayResponse {
private static final long serialVersionUID = 5189414273733919438L;
/**
* 员工管理场景商户创建部门成功时返回的部门id
*/
@ApiField("dept_id")
private String deptId;
public void setDeptId(String deptId) {
this.deptId = deptId;
}
public String getDeptId( ) {
return this.deptId;
}
}
|
<reponame>dariosilva/spring-framework-5
package com.dams.controllers;
import com.dams.exceptions.NotFoundException;
import com.dams.services.RecipeService;
import com.dams.commands.RecipeCommand;
import com.dams.domain.Recipe;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Created by jt on 6/19/17.
*/
public class RecipeControllerTest {
@Mock
RecipeService recipeService;
RecipeController controller;
MockMvc mockMvc;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
controller = new RecipeController(recipeService);
mockMvc = MockMvcBuilders.standaloneSetup(controller)
.setControllerAdvice(new ControllerExceptionHandler())
.build();
}
@Test
public void testGetRecipe() throws Exception {
Recipe recipe = new Recipe();
recipe.setId(1L);
when(recipeService.findById(anyLong())).thenReturn(recipe);
mockMvc.perform(get("/recipe/1/show"))
.andExpect(status().isOk())
.andExpect(view().name("recipe/show"))
.andExpect(model().attributeExists("recipe"));
}
@Test
public void testGetRecipeNotFound() throws Exception {
when(recipeService.findById(anyLong())).thenThrow(NotFoundException.class);
mockMvc.perform(get("/recipe/1/show"))
.andExpect(status().isNotFound())
.andExpect(view().name("404error"));
}
@Test
public void testGetRecipeNumberFormatException() throws Exception {
mockMvc.perform(get("/recipe/asdf/show"))
.andExpect(status().isBadRequest())
.andExpect(view().name("400error"));
}
@Test
public void testGetNewRecipeForm() throws Exception {
RecipeCommand command = new RecipeCommand();
mockMvc.perform(get("/recipe/new"))
.andExpect(status().isOk())
.andExpect(view().name("recipe/recipeform"))
.andExpect(model().attributeExists("recipe"));
}
@Test
public void testPostNewRecipeForm() throws Exception {
RecipeCommand command = new RecipeCommand();
command.setId(2L);
when(recipeService.saveRecipeCommand(any())).thenReturn(command);
mockMvc.perform(post("/recipe")
.contentType(MediaType.APPLICATION_FORM_URLENCODED)
.param("id", "")
.param("description", "some string")
.param("directions", "some directions")
)
.andExpect(status().is3xxRedirection())
.andExpect(view().name("redirect:/recipe/2/show"));
}
@Test
public void testPostNewRecipeFormValidationFail() throws Exception {
RecipeCommand command = new RecipeCommand();
command.setId(2L);
when(recipeService.saveRecipeCommand(any())).thenReturn(command);
mockMvc.perform(post("/recipe")
.contentType(MediaType.APPLICATION_FORM_URLENCODED)
.param("id", "")
.param("cookTime", "3000")
)
.andExpect(status().isOk())
.andExpect(model().attributeExists("recipe"))
.andExpect(view().name("recipe/recipeform"));
}
@Test
public void testGetUpdateView() throws Exception {
RecipeCommand command = new RecipeCommand();
command.setId(2L);
when(recipeService.findCommandById(anyLong())).thenReturn(command);
mockMvc.perform(get("/recipe/1/update"))
.andExpect(status().isOk())
.andExpect(view().name("recipe/recipeform"))
.andExpect(model().attributeExists("recipe"));
}
@Test
public void testDeleteAction() throws Exception {
mockMvc.perform(get("/recipe/1/delete"))
.andExpect(status().is3xxRedirection())
.andExpect(view().name("redirect:/"));
verify(recipeService, times(1)).deleteById(anyLong());
}
} |
#! /bin/bash
chmod a+x "$@"
|
def count_num(arr, num):
count = 0
for row in arr:
for col in row:
if col == num:
count += 1
return count |
#!/bin/bash -e
# -----------------------------------------------------------------------------
#
# Package : zxcvbn-go
# Version : ae427f1e4c1d
# Source repo : https://github.com/nbutton23/zxcvbn-go.git
# Tested on : ubi 8.5
# Language : go
# Travis-Check : true
# Script License: Apache License, Version 2 or later
# Maintainer : Sachin K {sachin.kakatkar@ibm.com}
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
#Run the script:./zxcvbn-go_ubi_8.5.sh ae427f1e4c1d(version_to_test)
PACKAGE_NAME=zxcvbn-go
PACKAGE_VERSION=${1:-ae427f1e4c1d}
GO_VERSION=1.17.4
PACKAGE_URL=https://github.com/nbutton23/zxcvbn-go.git
dnf install git wget sudo diffutils.ppc64le make gcc gcc-c++ -y
mkdir -p /home/tester/output
cd /home/tester
wget https://golang.org/dl/go$GO_VERSION.linux-ppc64le.tar.gz
rm -rf /usr/local/go && tar -C /usr/local/ -xzf go$GO_VERSION.linux-ppc64le.tar.gz
rm -rf go$GO_VERSION.linux-ppc64le.tar.gz
export GOROOT=${GOROOT:-"/usr/local/go"}
export GOPATH=${GOPATH:-/home/tester/go}
export PATH=$PATH:$GOROOT/bin:$GOPATH/bin:/usr/local/bin
export GO111MODULE=on
mkdir -p $GOPATH/src/github.com/nbutton23
cd $GOPATH/src/github.com/nbutton23
rm -rf $PACKAGE_NAME
OS_NAME=$(cat /etc/os-release | grep ^PRETTY_NAME | cut -d= -f2)
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 1
fi
cd $PACKAGE_NAME
git checkout $PACKAGE_VERSION
if ! (go mod init && go mod tidy); then
echo "------------------$PACKAGE_NAME:build failed---------------------"
echo "$PACKAGE_VERSION $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | build_Fails"
exit 1
fi
go mod vendor
if ! go test ./... -v; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/test_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails" > /home/tester/output/version_tracker
exit 1
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_VERSION $PACKAGE_NAME" > /home/tester/output/test_success
echo "$PACKAGE_NAME | $PACKAGE_VERSION | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success" > /home/tester/output/version_tracker
exit 0
fi
|
#! /bin/bash
#
# makePlatformPrivateKey.sh -- Translate key to PuTTY format
#
# Copyright (c) 2022 Riverbed Technology LLC
#
# This software is licensed under the terms and conditions of the MIT License
# accompanying the software ("License"). This software is distributed "AS IS"
# as set forth in the License.
#
if [[ ! -e "$HOME/.ssh/id_rsa.ppk" ]]; then
get_rsync_for_windows
echo "---------------------------------------------------------"
echo "No PuTTY format private key available. Creating one now."
echo "---------------------------------------------------------"
echo
echo "PuTTYgen should pop up, but first you will get a dialog box saying"
echo "it has successfully imported a foreign OpenSSH SSH-2 private key."
echo
echo "1. Click OK on the 'PuTTYgen Notice' dialog box about the" \
"successful import."
echo
echo "2. Click the 'Save private key' button."
echo
echo "3. When asked if you are sure you want to save this key without a"
echo " passphrase to protect it, click 'Yes.'"
echo
echo "4. In the 'file name:' field, type the exact text below" \
"INCLUDING the quotes:"
echo
echo " \"%USERPROFILE%\\.ssh\\id_rsa.ppk\""
echo
echo "5. Click the 'Save' button in the file dialog and close PuTTYgen" \
"to continue."
echo
echo "NOTE: The first time we test installing this key, you will have to"
echo "enter your password, perhaps twice. Subsequent attempts will not"
echo "ask you again since your key itself will be used for authentication."
echo
"${MSYSRSYNCPATH}\\bin\\puttygen.exe" "$USERPROFILE\\.ssh\\id_rsa"
fi
status=0
if [[ ! -e "$HOME/.ssh/id_rsa.ppk" ]]; then
echo Failed to create a putty-format private key.
pause
status=1
fi
if [[ $status != 0 ]]; then
exit
fi
|
struct SessionState {
var map: MapState
var trip: Trip?
var selectedOrderId: Order.ID?
var places: PlacesSummary?
var placesPresentation: PlacesPresentation
var selectedPlace: Place?
var addPlace: AddPlace?
var history: History?
var tab: TabSelection
var publishableKey: PublishableKey
var profile: Profile
var integrationStatus: IntegrationStatus
var requests: Set<Request>
var token: Token?
init(map: MapState, trip: Trip? = nil, selectedOrderId: Order.ID? = nil, places: PlacesSummary? = nil, placesPresentation: PlacesPresentation = .byPlace, selectedPlace: Place? = nil, addPlace: AddPlace? = nil, history: History? = nil, tab: TabSelection, publishableKey: PublishableKey, profile: Profile, integrationStatus: IntegrationStatus = .unknown, requests: Set<Request> = [], token: Token? = nil) {
self.map = map
self.trip = trip
self.selectedOrderId = selectedOrderId
self.places = places
self.placesPresentation = placesPresentation
self.selectedPlace = selectedPlace
self.addPlace = addPlace
self.history = history
self.tab = tab
self.publishableKey = publishableKey
self.profile = profile
self.integrationStatus = integrationStatus
self.requests = requests
self.token = token
}
} |
# -*- coding: utf-8 -*-
"""
Feature file parser.
One Feature file parser instance is able to parse one feature file.
"""
from __future__ import unicode_literals
import os
import io
import re
import json
import filecmp
import copy
from .compat import RecursionError
from .exceptions import RadishError, FeatureFileSyntaxError, LanguageNotSupportedError
from .feature import Feature
from .scenario import Scenario
from .scenariooutline import ScenarioOutline
from .scenarioloop import ScenarioLoop
from .stepmodel import Step
from .background import Background
from .model import Tag
class Keywords(object):
"""
Represent config object for gherkin keywords.
"""
def __init__(self, feature, background, scenario, scenario_outline, examples, scenario_loop, iterations):
self.feature = feature
self.background = background
self.scenario = scenario
self.scenario_outline = scenario_outline
self.examples = examples
self.scenario_loop = scenario_loop
self.iterations = iterations
class FeatureParser(object):
"""
Class to parse a feature file.
A feature file contains just one feature.
"""
LANGUAGE_LOCATION = os.path.join(os.path.dirname(__file__), "languages")
DEFAULT_LANGUAGE = "en"
CONTEXT_CLASSES = ['given', 'when', 'then', 'but']
class State(object):
"""
Represents the parser state
"""
INIT = "init"
FEATURE = "feature"
BACKGROUND = "background"
SCENARIO = "scenario"
STEP = "step"
EXAMPLES = "examples"
EXAMPLES_ROW = "examples_row"
STEP_TEXT = "step_text"
SKIP_SCENARIO = "skip_scenario"
def __init__(self, core, featurefile, featureid, tag_expr=None, inherited_tags=None, language="en"):
if not os.path.exists(featurefile):
raise OSError("Feature file at '{0}' does not exist".format(featurefile))
self._core = core
self._featureid = featureid
self._featurefile = featurefile
self._tag_expr = tag_expr
self.keywords = {}
self._keywords_delimiter = ":"
self._inherited_tags = inherited_tags or []
self._current_state = FeatureParser.State.FEATURE
self._current_line = 0
self._current_tags = []
self._current_preconditions = []
self._current_constants = []
self._current_scenario = None
#: Holds the current context class for a Step.
# eg. If a step is: 'And I have the number'
# and this step was preceeded by 'Given I have the number
# it's context class is 'Given'. This is used to correctly
# match the 'And' sentences
self._current_context_class = None
self._in_step_text = False
self.feature = None
self._load_language(language)
def _load_language(self, language=None):
"""
Loads all keywords of the given language
:param string language: the lanugage to use for the feature files.
if None is given `radish` tries to detect the language.
:returns: if the language could be loaded or not
:rtype: bool
:raises LanguageNotSupportedError: if the given language is not supported by radish
"""
if not language: # try to detect language
raise NotImplementedError("Auto detect language is not implemented yet")
language_path = os.path.join(self.LANGUAGE_LOCATION, language + ".json")
try:
with io.open(language_path, "r", encoding="utf-8") as f:
language_pkg = json.load(f)
except IOError:
raise LanguageNotSupportedError(language)
self.keywords = Keywords(**language_pkg["keywords"])
def parse(self):
"""
Parses the feature file of this `FeatureParser` instance
:returns: if the parsing was successful or not
:rtype: bool
"""
with io.open(self._featurefile, "r", encoding="utf-8") as f:
for line in f.readlines():
self._current_line += 1
line = line.strip()
if not line: # line is empty
continue
if line.startswith("#"):
# try to detect feature file language
language = self._detect_language(line)
if language:
self._load_language(language)
continue
if self.feature:
if self._detect_feature(line):
raise FeatureFileSyntaxError("radish supports only one Feature per feature file")
if self._detect_background(line):
if self.feature.background:
raise FeatureFileSyntaxError("The Background block may only appear once in a Feature")
if self.feature.scenarios:
raise FeatureFileSyntaxError(
"The Background block must be placed before any Scenario block")
result = self._parse_context(line)
if result is False:
raise FeatureFileSyntaxError(
"Syntax error in feature file {0} on line {1}".format(self._featurefile, self._current_line))
if not self.feature:
raise FeatureFileSyntaxError("No Feature found in file {0}".format(self._featurefile))
if not self.feature.scenarios:
return None
if self._current_scenario and not self._current_scenario.complete: # for the last scenario
self._current_scenario.after_parse()
return self.feature
def _parse_context(self, line):
"""
Parses arbitrary context from a line
:param string line: the line to parse from
"""
parse_context_func = getattr(self, "_parse_" + self._current_state, None)
if not parse_context_func:
raise RadishError("FeatureParser state {0} is not supported".format(self._current_state))
return parse_context_func(line)
def _parse_feature(self, line):
"""
Parses a Feature Sentence
The `INIT` state is used as initiale state.
:param string line: the line to parse from
"""
detected_feature = self._detect_feature(line)
if not detected_feature:
tag = self._detect_tag(line)
if tag:
self._current_tags.append(Tag(tag[0], tag[1]))
if tag[0] == "constant":
name, value = self._parse_constant(tag[1])
self._current_constants.append((name, value))
return True
return False
self.feature = Feature(self._featureid, self.keywords.feature, detected_feature, self._featurefile,
self._current_line, self._current_tags)
self.feature.context.constants = self._current_constants
self._current_state = FeatureParser.State.BACKGROUND
self._current_tags = []
self._current_constants = []
return True
def _parse_background(self, line):
"""
Parses a background context
:param str line: the line to parse the background
"""
detected_background = self._detect_background(line)
if detected_background is None:
# try to find a scenario
if self._detect_scenario_type(line):
return self._parse_scenario(line)
# this line is interpreted as a feature description line
self.feature.description.append(line)
return True
self.feature.background = Background(self.keywords.background, detected_background, self._featurefile,
self._current_line, self.feature)
self._current_scenario = self.feature.background
self._current_state = FeatureParser.State.STEP
return True
def _parse_scenario(self, line):
"""
Parses a Feature context
:param string line: the line to parse from
"""
detected_scenario = self._detect_scenario(line)
scenario_type = Scenario
keywords = (self.keywords.scenario,)
if not detected_scenario:
detected_scenario = self._detect_scenario_outline(line)
scenario_type = ScenarioOutline
keywords = (self.keywords.scenario_outline, self.keywords.examples)
if not detected_scenario:
detected_scenario = self._detect_scenario_loop(line)
if not detected_scenario:
tag = self._detect_tag(line)
if tag:
self._current_tags.append(Tag(tag[0], tag[1]))
if tag[0] == "precondition":
scenario = self._parse_precondition(tag[1])
self._current_preconditions.append(scenario)
elif tag[0] == "constant":
name, value = self._parse_constant(tag[1])
self._current_constants.append((name, value))
return True
raise FeatureFileSyntaxError(
"The parser expected a scenario or a tag on this line. Given: '{0}'".format(line))
detected_scenario, iterations = detected_scenario # pylint: disable=unpacking-non-sequence
scenario_type = ScenarioLoop
keywords = (self.keywords.scenario_loop, self.keywords.iterations)
if detected_scenario in self.feature:
raise FeatureFileSyntaxError(
"Scenario with name '{0}' defined twice in feature '{1}'".format(detected_scenario, self.feature.path))
scenario_id = 1
if self.feature.scenarios:
previous_scenario = self._current_scenario
if hasattr(previous_scenario, "scenarios") and previous_scenario.scenarios:
scenario_id = previous_scenario.scenarios[-1].id + 1
else:
scenario_id = previous_scenario.id + 1
# all tags of this scenario have been consumed so we can
# check if this scenario has to be evaluated or not
if self._tag_expr:
# inherit the tags from the current feature and the explicitely
# inherited tags given to the parser. This tags are coming from precondition scenarios
current_tags = self._current_tags + self.feature.tags + self._inherited_tags
scenario_in_tags = self._tag_expr.evaluate([t.name for t in current_tags])
if not scenario_in_tags: # this scenario does not match the given tag expression
self._current_tags = []
self._current_preconditions = []
self._current_constants = []
self._current_state = FeatureParser.State.SKIP_SCENARIO
return True
background = self._create_scenario_background(steps_runable=scenario_type is Scenario)
scenario = scenario_type(scenario_id, *keywords, sentence=detected_scenario, path=self._featurefile,
line=self._current_line, parent=self.feature, tags=self._current_tags,
preconditions=self._current_preconditions, background=background)
self.feature.scenarios.append(scenario)
self._current_scenario = scenario
self._current_scenario.context.constants = self._current_constants
self._current_tags = []
self._current_preconditions = []
self._current_constants = []
if scenario_type == ScenarioLoop:
self._current_scenario.iterations = iterations
self._current_state = FeatureParser.State.STEP
return True
def _parse_examples(self, line):
"""
Parses the Examples header line
:param string line: the line to parse from
"""
if not isinstance(self._current_scenario, ScenarioOutline):
raise FeatureFileSyntaxError("Scenario does not support Examples. Use 'Scenario Outline'")
self._current_scenario.examples_header = [x.strip() for x in line.split("|")[1:-1]]
self._current_state = FeatureParser.State.EXAMPLES_ROW
return True
def _parse_examples_row(self, line):
"""
Parses an Examples row
:param string line: the line to parse from
"""
# detect next keyword
if self._detect_scenario_type(line):
self._current_scenario.after_parse()
return self._parse_scenario(line)
example = ScenarioOutline.Example([x.strip() for x in line.split("|")[1:-1]], self._featurefile,
self._current_line)
self._current_scenario.examples.append(example)
return True
def _parse_step(self, line):
"""
Parses a single step
:param string line: the line to parse from
"""
# detect next keyword
if self._detect_scenario_type(line):
self._current_scenario.after_parse()
return self._parse_scenario(line)
if self._detect_step_text(line):
self._current_state = self.State.STEP_TEXT
return self._parse_step_text(line)
if self._detect_table(line):
self._parse_table(line)
return True
if self._detect_examples(line):
self._current_state = FeatureParser.State.EXAMPLES
return True
# get context class
step_context_class = line.split()[0].lower()
if step_context_class in FeatureParser.CONTEXT_CLASSES:
self._current_context_class = step_context_class
step_id = len(self._current_scenario.all_steps) + 1
not_runable = isinstance(self._current_scenario, (ScenarioOutline, ScenarioLoop, Background))
step = Step(step_id, line, self._featurefile, self._current_line, self._current_scenario, not not_runable,
context_class=self._current_context_class)
self._current_scenario.steps.append(step)
return True
def _parse_table(self, line):
"""
Parses a step table row
:param string line: the line to parse from
"""
if not self._current_scenario.steps:
raise FeatureFileSyntaxError(
"Found step table without previous step definition on line {0}".format(
self._current_line))
current_step = self._current_scenario.steps[-1]
table_columns = [x.strip() for x in line.split("|")[1:-1]]
if not current_step.table_header: # it's the table heading
current_step.table_header = table_columns
else: # it's a table data row
table_data = {k: v for k, v in zip(current_step.table_header, table_columns)}
current_step.table_data.append(table_columns)
current_step.table.append(table_data)
return True
def _parse_step_text(self, line):
"""
Parses additional step text
:param str line: the line to parse
"""
if line.startswith('"""') and not self._in_step_text:
self._in_step_text = True
line = line[3:]
if line.endswith('"""') and self._in_step_text:
self._current_state = self.State.STEP
self._in_step_text = False
line = line[:-3]
if line:
self._current_scenario.steps[-1].raw_text.append(line.strip())
return True
def _parse_precondition(self, arguments):
"""
Parses scenario preconditions
The arguments must be in format:
File.feature: Some scenario
:param str arguments: the raw arguments
"""
match = re.search(r"(.*?\.feature): (.*)", arguments)
if not match:
raise FeatureFileSyntaxError(
"Scenario @precondition tag must have argument in format: 'test.feature: Some scenario'")
feature_file_name, scenario_sentence = match.groups()
feature_file = os.path.join(os.path.dirname(self._featurefile), feature_file_name)
# check if the precondition Scenario is in the same feature file.
# If this happens to be the case the current feature is just copied as is.
if filecmp.cmp(self._featurefile, feature_file):
if scenario_sentence not in self.feature:
raise FeatureFileSyntaxError(
"Cannot import precondition scenario '{0}' from feature '{1}': No such scenario".format(
scenario_sentence, feature_file))
feature = copy.deepcopy(self.feature)
self._core.features.append(feature)
else:
try:
current_tags = self._current_tags + self.feature.tags + self._inherited_tags
feature = self._core.parse_feature(feature_file, self._tag_expr, inherited_tags=current_tags)
except (RuntimeError, RecursionError) as e:
if str(e).startswith('maximum recursion depth exceeded'): # precondition cycling
raise FeatureFileSyntaxError(
"Your feature '{0}' has cycling preconditions with '{1}: {2}' starting at line {3}".format(
self._featurefile, feature_file_name, scenario_sentence, self._current_line))
raise
if scenario_sentence not in feature:
raise FeatureFileSyntaxError(
"Cannot import precondition scenario '{0}' from feature '{1}': No such scenario".format(
scenario_sentence, feature_file))
return feature[scenario_sentence]
def _parse_constant(self, arguments):
"""
Parses tag arguments as a constant containing name and value
The arguments must be in format:
ConstantName: SomeValue
ConstantName: 5
:param str arguments: the raw arguments to parse
"""
name, value = arguments.split(":", 1)
return name.strip(), value.strip()
def _parse_skip_scenario(self, line):
"""
Parses the next lines until the next scenario is reached
"""
if self._detect_scenario_type(line):
return self._parse_scenario(line)
return True
def _detect_keyword(self, keyword, line):
"""
Detects a keyword on a given line
:param keyword: the keyword to detect
:param line: the line in which we want to detect the keyword
:return: the line without the detected keyword
:rtype: string or None
"""
pattern = "^{keyword}\s*{delimiter}(.*)$".format(
keyword=keyword, delimiter=self._keywords_delimiter)
match = re.match(pattern, line)
if match:
return match.group(1).strip()
return None
def _detect_feature(self, line):
"""
Detects a feature on the given line
:param string line: the line to detect a feature
:returns: the detected feature on the given line
:rtype: string or None
"""
return self._detect_keyword(self.keywords.feature, line)
def _detect_background(self, line):
"""
Detects a background on the given line
:param string line: the line to detect a background
:returns: the detected background on the given line
:rtype: string or None
"""
return self._detect_keyword(self.keywords.background, line)
def _detect_scenario_type(self, line):
"""
Detect a Scenario/ScenarioOutline/ScenarioLoop/Tag on the given line.
:returns: if a scenario of any type is present on the given line
:rtype: bool
"""
if self._detect_scenario(line) or self._detect_scenario_outline(line) or self._detect_scenario_loop(
line) or self._detect_tag(line):
self._current_state = FeatureParser.State.SCENARIO
return True
return False
def _detect_scenario(self, line):
"""
Detects a scenario on the given line
:param string line: the line to detect a scenario
:returns: the scenario detected on the given line
:rtype: string or None
"""
return self._detect_keyword(self.keywords.scenario, line)
def _detect_scenario_outline(self, line):
"""
Detects a scenario outline on the given line
:param string line: the line to detect a scenario outline
:returns: the scenario outline detected on the given line
:rtype: string or None
"""
return self._detect_keyword(self.keywords.scenario_outline, line)
def _detect_examples(self, line):
"""
Detects an Examples block on the given line
:param string line: the line to detect the Examples
:returns: if an Examples block was found on the given line
:rtype: bool
"""
return self._detect_keyword(self.keywords.examples, line) is not None
def _detect_scenario_loop(self, line):
"""
Detects a scenario loop on the given line
:param string line: the line to detect a scenario loop
:returns: if a scenario loop was found on the given line
:rtype: string
"""
match = re.search(r"^{0} (\d+):(.*)".format(self.keywords.scenario_loop), line)
if match:
return match.group(2).strip(), int(match.group(1))
return None
def _detect_table(self, line):
"""
Detects a step table row on the given line
:param string line: the line to detect the table row
:returns: if an step table row was found or not
:rtype: bool
"""
return line.startswith("|")
def _detect_step_text(self, line):
"""
Detects the beginning of an additional step text block
:param str line: the line to detect the step text block
:returns: if a step text block was found or not
:rtype: bool
"""
return line.startswith('"""')
def _detect_language(self, line):
"""
Detects a language on the given line
:param string line: the line to detect the language
:returns: the language or None
:rtype: str or None
"""
match = re.search("^# language: (.*)", line)
if match:
return match.group(1)
return None
def _detect_tag(self, line):
"""
Detects a tag on the given line
:param string line: the line to detect the tag
:returns: the tag or None
:rtype: str or None
"""
match = re.search(r"^@([^\s(]+)(?:\((.*?)\))?", line)
if match:
return match.group(1), match.group(2)
return None
def _create_scenario_background(self, steps_runable):
"""
Creates a new instance of the features current
Background to assign to a new Scenario.
"""
if not self.feature.background:
return None
return self.feature.background.create_instance(steps_runable=steps_runable)
|
#!/bin/bash
# ============LICENSE_START===============================================
# Copyright (C) 2020 Nordix Foundation. All rights reserved.
# ========================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=================================================
#
TC_ONELINE_DESCR="PMS Create 10000 policies and restart, test polices persistency"
#App names to include in the test when running docker, space separated list
DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR PA RICSIM SDNC NGW"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR PA RICSIM SDNC KUBEPROXY NGW"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-ISTANBUL ORAN-D-RELEASE ORAN-E-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
. ../common/agent_api_functions.sh
. ../common/ricsimulator_api_functions.sh
. ../common/control_panel_api_functions.sh
. ../common/controller_api_functions.sh
. ../common/consul_cbs_functions.sh
. ../common/cr_api_functions.sh
. ../common/kube_proxy_api_functions.sh
. ../common/gateway_api_functions.sh
setup_testenvironment
#### TEST BEGIN ####
#Local vars in test script
##########################
# Tested variants of REST/DMAAP/SDNC config
TESTED_VARIANTS="REST"
#Test agent and simulator protocol versions (others are http only)
TESTED_PROTOCOLS="HTTP"
NUM_RICS=5
NUM_POLICIES_PER_RIC=2000
generate_policy_uuid
if [ "$PMS_VERSION" == "V2" ]; then
notificationurl=$CR_SERVICE_PATH"/test"
else
notificationurl=""
fi
for __httpx in $TESTED_PROTOCOLS ; do
for interface in $TESTED_VARIANTS ; do
echo "#####################################################################"
echo "#####################################################################"
echo "### Testing agent: "$interface" and "$__httpx
echo "#####################################################################"
echo "#####################################################################"
if [ $__httpx == "HTTPS" ]; then
use_cr_https
use_simulator_https
if [[ $interface = *"SDNC"* ]]; then
use_sdnc_https
fi
use_agent_rest_https
else
use_cr_http
use_simulator_http
if [[ $interface = *"SDNC"* ]]; then
use_sdnc_http
fi
use_agent_rest_http
fi
# Clean container and start all needed containers #
clean_environment
if [ $RUNMODE == "KUBE" ]; then
start_kube_proxy
fi
start_ric_simulators ricsim_g1 $NUM_RICS STD_2.0.0
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE
set_agent_debug
if [ $RUNMODE == "DOCKER" ]; then
start_consul_cbs
fi
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
prepare_consul_config SDNC ".consul_config.json"
else
prepare_consul_config NOSDNC ".consul_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
consul_config_app ".consul_config.json"
fi
start_cr
api_get_status 200
for ((i=1; i<=$NUM_RICS; i++))
do
sim_print ricsim_g1_$i interface
done
echo "Load policy type in group 1 simulators"
for ((i=1; i<=$NUM_RICS; i++))
do
sim_put_policy_type 201 ricsim_g1_$i STD_QOS_0_2_0 testdata/STD2/sim_qos.json
done
if [ "$PMS_VERSION" == "V2" ]; then
api_equal json:policy-types 1 300 #Wait for the agent to refresh types from the simulator
else
api_equal json:policy_types 1 300 #Wait for the agent to refresh types from the simulator
fi
api_put_service 201 "serv1" 0 "$CR_SERVICE_PATH/1"
echo "Check the number of types in the agent for each ric is 1"
for ((i=1; i<=$NUM_RICS; i++))
do
if [ "$PMS_VERSION" == "V2" ]; then
api_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
else
api_equal json:policy_types?ric=ricsim_g1_$i 1 120
fi
done
START_ID=2000
start_timer "Create $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
api_put_policy_parallel 201 "serv1" ricsim_g1_ $NUM_RICS STD_QOS_0_2_0 $START_ID NOTRANSIENT $notificationurl testdata/STD/pi1_template.json $NUM_POLICIES_PER_RIC 7
print_timer "Create $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
INSTANCES=$(($NUM_RICS*$NUM_POLICIES_PER_RIC))
api_equal json:policies $INSTANCES
for ((i=1; i<=$NUM_RICS; i++))
do
sim_equal ricsim_g1_$i num_instances $NUM_POLICIES_PER_RIC
done
api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_PATH/1"
stop_policy_agent
start_stopped_policy_agent
set_agent_debug
api_equal json:policies $INSTANCES 500
stop_policy_agent
for ((i=1; i<=$NUM_RICS; i++))
do
sim_post_delete_instances 200 ricsim_g1_$i
done
for ((i=1; i<=$NUM_RICS; i++))
do
sim_equal ricsim_g1_$i num_instances 0
done
start_stopped_policy_agent
set_agent_debug
start_timer "Restore $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices after restart over $interface using "$__httpx
api_equal json:policies $INSTANCES 500
for ((i=1; i<=$NUM_RICS; i++))
do
sim_equal ricsim_g1_$i num_instances $NUM_POLICIES_PER_RIC 500
done
print_timer "Restore $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices after restart over $interface using "$__httpx
api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_PATH/1"
start_timer "Delete $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
api_delete_policy_parallel 204 $NUM_RICS $START_ID $NUM_POLICIES_PER_RIC 7
print_timer "Delete $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
api_equal json:policies 0
for ((i=1; i<=$NUM_RICS; i++))
do
sim_equal ricsim_g1_$i num_instances 0
done
stop_policy_agent
start_stopped_policy_agent
set_agent_debug
api_equal json:policies 0
for ((i=1; i<=$NUM_RICS; i++))
do
sim_equal ricsim_g1_$i num_instances 0
done
sleep_wait 200
api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_PATH/1"
api_equal json:policies 0
for ((i=1; i<=$NUM_RICS; i++))
do
sim_equal ricsim_g1_$i num_instances 0
done
for ((i=1; i<=$NUM_RICS; i++))
do
if [ $interface == "REST+SDNC" ]; then
sim_contains_str ricsim_g1_$i remote_hosts $SDNC_APP_NAME
else
sim_contains_str ricsim_g1_$i remote_hosts $POLICY_AGENT_APP_NAME
fi
done
check_policy_agent_logs
if [[ $interface = *"SDNC"* ]]; then
check_sdnc_logs
fi
store_logs "${__httpx}__${interface}"
done
done
#### TEST COMPLETE ####
print_result
auto_clean_environment |
<reponame>JakeStanger/Reactive-Gallery<filename>src/pages/gallery/controls/visibility/IVisibilityProps.ts<gh_stars>0
interface IVisibilityProps {
showLocation: boolean;
showTime: boolean;
showDescription: boolean;
showTags: boolean;
onChangeShowLocation: (show: boolean) => void;
onChangeShowTime: (show: boolean) => void;
onChangeShowDescription: (show: boolean) => void;
onChangeShowTags: (show: boolean) => void;
}
export default IVisibilityProps; |
package com.wx.wheelview.demo;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.widget.Toast;
import com.wx.wheelview.widget.WheelView;
import java.util.ArrayList;
import java.util.List;
@SuppressWarnings("unchecked")
public class Main2Activity extends AppCompatActivity {
private WheelView test;
private com.contrarywind.view.WheelView wv;
private List<Bean> beans = new ArrayList<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
initData();
init();
init2();
}
private void init2() {
wv = (com.contrarywind.view.WheelView) findViewById(R.id.test2);
// wv.setAdapter();
}
private void initData() {
beans.add(new Bean("1", "2", "3"));
beans.add(new Bean("2", "2", "3"));
beans.add(new Bean("3", "2", "3"));
beans.add(new Bean("4", "2", "3"));
beans.add(new Bean("5", "2", "3"));
beans.add(new Bean("6", "2", "3"));
beans.add(new Bean("7", "2", "3"));
}
private void init() {
test = (WheelView) findViewById(R.id.test);
test.setWheelAdapter(new MyAdapter(this));
test.setSkin(WheelView.Skin.Holo);
test.setLoop(false);
test.setWheelData(beans);
WheelView.WheelViewStyle style = new WheelView.WheelViewStyle();
style.selectedTextSize = 20;
style.textSize = 16;
test.setWheelSize(5);
test.setWheelClickable(true);
test.setSelection(2);
test.setOnWheelItemClickListener(new WheelView.OnWheelItemClickListener() {
@Override
public void onItemClick(int position, Object o) {
Bean bean = (Bean) o;
Toast.makeText(Main2Activity.this, "" + bean.getName(), Toast.LENGTH_SHORT).show();
}
});
test.setOnWheelItemSelectedListener(new WheelView.OnWheelItemSelectedListener() {
@Override
public void onItemSelected(int position, Object o) {
Bean bean = (Bean) o;
Toast.makeText(Main2Activity.this, "" + bean.getId(), Toast.LENGTH_SHORT).show();
}
});
test.setStyle(style);
}
}
|
<reponame>ohms83/GLRayTrace
#include "Camera.h"
#include <glm/gtc/matrix_transform.hpp> // translate, rotate, scale, perspective
void Camera::setViewPort(float width, float height)
{
_viewPort.center = { width * 0.5f, height * 0.5f };
_viewPort.width = width;
_viewPort.height = height;
_viewPort.invertY = true;
}
void Camera::setPerspective(float fov, float aspect, float near, float far)
{
_fov = glm::radians(fov);
_aspect = aspect;
_near = near;
_far = far;
_projection = glm::perspective(fov, aspect, near, far);
}
void Camera::setDefaultPerspective()
{
setPerspective(45.0f, 4.0f / 3.0f, 0.01f, 100.f);
}
Ray Camera::castScreenSpaceRay(uint32_t x, uint32_t y)
{
if(!_fov || !_aspect || !_near || !_far || !_viewPort.width || !_viewPort.height) {
return Ray();
}
Ray ray;
ray.origin = position;
// Define viewing fustrum in camera space.
// TODO: Transform this into the world coordinate.
float farPlaneHeight = glm::tan(_fov) * _far * 2.f;
float farPlaneWidth = farPlaneHeight * _aspect;
float u = farPlaneWidth / _viewPort.width;
float v = farPlaneHeight / _viewPort.height;
glm::vec3 target = {
// x
(x * u) - (farPlaneWidth * 0.5f),
// y
((_viewPort.height - y) * v) - (farPlaneHeight * 0.5f),
// z
-_far
};
ray.direction = target - position;
return ray;
}
void Camera::castScannerRay(uint32_t left, uint32_t top, uint32_t right, uint32_t bottom, std::function<void(const Ray&, uint32_t, uint32_t)> callback)
{
if (!_fov || !_aspect || !_near || !_far || !_viewPort.width || !_viewPort.height || !callback) {
return;
}
// Define viewing fustrum in camera space.
// TODO: Transform this into the world coordinate.
float farPlaneHeight = glm::tan(_fov) * _far * 2.f;
float farPlaneWidth = farPlaneHeight * _aspect;
float u = farPlaneWidth / _viewPort.width;
float v = farPlaneHeight / _viewPort.height;
for (uint32_t row = top; row < bottom; ++row)
{
for (uint32_t col = left; col < right; ++col)
{
Ray ray;
ray.origin = position;
glm::vec3 target = {
// x
(col * u) - (farPlaneWidth * 0.5f),
// y
((_viewPort.height - row) * v) - (farPlaneHeight * 0.5f),
// z
-_far
};
ray.direction = target - position;
callback(ray, col, row);
}
}
} |
//
// SeasonsRootViewController.h
// Hiyoko
//
// Created by 天々座理世 on 2018/08/30.
// Copyright © 2018 MAL Updater OS X Group. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "SeasonsViewController.h"
NS_ASSUME_NONNULL_BEGIN
@interface SeasonsRootViewController : UINavigationController
@property (strong) SeasonsViewController *seasonviewcontroller;
@end
NS_ASSUME_NONNULL_END
|
import { SectionTitle } from 'components/molecules';
import React, { useContext, useEffect } from 'react';
import { SectionVariants } from 'components/molecules/SectionTitle';
import { RoundedButton, ContentTitle } from 'components/atoms';
import { LanguageContext } from 'contexts';
import ProjectCard from 'components/molecules/ProjectCard';
import Decorations from 'components/decorations';
import ProjectsSectionProps from 'components/organisms/Projects/types';
import { useAnimation } from 'framer-motion';
import { useInView } from 'react-intersection-observer';
import Animated from 'components/animations';
import { forwardRef } from 'react';
import Slider from "react-slick";
const Achievement = forwardRef<HTMLElement | undefined, ProjectsSectionProps>(
({ title, content, className }: ProjectsSectionProps, ref) => {
const controls = useAnimation();
const [refView, inView] = useInView({
threshold: 0.1,
});
const language = useContext(LanguageContext);
const { items } = content;
const bulletsClasses = 'w-32 h-32 text-indigo-400 md:w-40 md:h-40';
useEffect(() => {
if (inView) {
controls.start('show');
}
}, [controls, inView]);
const settings = {
dots: true,
infinite: true,
speed: 500,
slidesToShow: 1,
slidesToScroll: 1
};
return (
<section
id="achievement"
className={className}
ref={section => {
refView(section);
if (ref) {
if (typeof ref === 'function') {
ref(section);
} else {
ref.current = section;
}
}
}}
>
<SectionTitle
variant={SectionVariants.CENTER}
lineProps={{
animate: controls,
custom: 1,
transition: {
delay: 1,
},
}}
>
<Animated.Letter text={title} animate={controls} custom={0} delay={1} />
</SectionTitle>
<div className="relative mt-10 md:mt-8">
{/* <Animated.FromDirection
from="top"
animate={controls}
custom={1}
delay={1.8}
className="absolute -top-8 -left-12 md:-top-9 md:-left-16 -z-10"
>
<Decorations.Bullets className={bulletsClasses} />
</Animated.FromDirection>
<Animated.FromDirection
from="bottom"
animate={controls}
custom={1}
delay={1.8}
className="absolute -bottom-8 -right-12 md:-bottom-9 md:-right-16 -z-10"
>
<Decorations.Bullets className={bulletsClasses} />
</Animated.FromDirection> */}
<div className="relative wrap overflow-hidden py-10 h-full">
<div className="border-2-2 absolute border-opacity-100 border-indigo-400 h-full border md:left-1/2 left-10 hidden md:flex" ></div>
<div className="mb-8 flex justify-between md:flex-row-reverse items-center w-full left-timeline">
<div className="order-1 md:w-5/12"></div>
<div className="z-20 items-center order-1 bg-yellow-200 shadow-lg w-auto h-auto px-5 py-1 rounded-full hidden md:flex">
<h1 className="mx-auto font-semibold text-lg text-gray-500 ">2019</h1>
</div>
<Animated.FromDirection className="order-1 bg-gray-100 rounded-lg shadow-lg w-full md:w-5/12" from="right" animate={controls} custom={1} delay={1.2} duration={0.8}>
<Animated.Reveal from="right" className=" px-4 py-4" animate={controls} delay={1.4} duration={1.2}>
<Slider {...settings} className="mb-10 rounded-lg w-full h-auto">
<div>
<img className="rounded-lg w-full h-auto" src="./img/achievement/timeline-2019.jpg" alt="" />
</div>
<div>
<img className="rounded-lg w-full h-auto" src="./img/achievement/achievment-1.png" alt="" />
</div>
<div>
<img className="rounded-lg w-full h-auto" src="./img/achievement/achievment-2.png" alt="" />
</div>
<div>
<img className="rounded-lg w-full h-auto" src="./img/achievement/achievment-3.png" alt="" />
</div>
</Slider>
<ContentTitle className="mb-3 font-bold text-gray-800 text-xl"> Juara Umum Komurindo Kombat 2018-2019</ContentTitle>
<ul className="list-disc ml-5">
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 1 Divisi Muatan Roket</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 1 Divisi Wahana Sistem Kendali</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 3 Divisi Muatan Balon Atmosfer</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Presentasi Terbaik Divisi Wahana Sistem Kendali</p></li>
</ul>
</Animated.Reveal>
</Animated.FromDirection>
</div>
<div className="mb-8 flex justify-between items-center w-full right-timeline">
<div className="order-1 md:w-5/12"></div>
<div className="z-20 items-center order-1 bg-yellow-200 shadow-lg w-auto h-auto px-5 py-1 rounded-full hidden md:flex">
<h1 className="mx-auto font-semibold text-lg text-gray-500">2017</h1>
</div>
<Animated.FromDirection className="order-1 bg-gray-100 rounded-lg shadow-lg w-full md:w-5/12" from="left" animate={controls} custom={1} delay={1.6} duration={0.8}>
<Animated.Reveal from="left" className=" px-4 py-4" animate={controls} delay={1.8} duration={1.2}>
<Slider {...settings} className="mb-10 rounded-lg w-full h-auto">
<div>
<img className="rounded-lg w-full h-auto" src="./img/achievement/timeline-2017.jpg" alt="" />
</div>
</Slider>
<ContentTitle className="mb-3 font-bold text-gray-800 text-xl"> Juara Umum Komurindo Kombat 2017</ContentTitle>
<ul className="list-disc ml-5">
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 1 Divisi Muatan Balon Atmosfer</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 2 Divisi Muatan Roket</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 2 Divisi Wahana Sistem Kendali</p></li>
</ul>
</Animated.Reveal>
</Animated.FromDirection>
</div>
<div className="mb-8 flex justify-between md:flex-row-reverse items-center w-full left-timeline">
<div className="order-1 md:w-5/12"></div>
<div className="z-20 items-center order-1 bg-yellow-200 shadow-lg w-auto h-auto px-5 py-1 rounded-full hidden md:flex">
<h1 className="mx-auto font-semibold text-lg text-gray-500">2016</h1>
</div>
<Animated.FromDirection className="order-1 bg-gray-100 rounded-lg shadow-lg w-full md:w-5/12 px-4 py-4" from="right" animate={controls} custom={1} delay={2.0} duration={0.8}>
<Animated.Reveal from="right" className="px-4 py-4" animate={controls} delay={2.2} duration={1.2}>
<Slider {...settings} className="mb-10 rounded-lg w-full h-auto">
<div>
<img className="rounded-lg w-full h-auto" src="./img/achievement/timeline-2016.jpg" alt="" />
</div>
</Slider>
<ContentTitle className="mb-3 font-bold text-gray-800 text-xl"> Juara Umum Komurindo Kombat 2016</ContentTitle>
<ul className="list-disc ml-5">
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 1 Divisi Muatan Balon Atmosfer</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 2 Divisi Muatan Roket</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Juara 2 Divisi Wahana Sistem Kendali</p></li>
<li><p className="text-sm text-gray-900 text-opacity-100">Desain Terbaik Divisi Muatan Roket</p></li>
</ul>
</Animated.Reveal>
</Animated.FromDirection>
</div>
</div>
</div >
{/* <Animated.FromDirection className="flex justify-center mt-8" from="bottom" animate={controls} custom={2} delay={1}>
<RoundedButton as="a" href="https://github.com/itsfaqih">
{language.value === 'en' ? 'See more' : 'Lihat Selengkapnya'}
</RoundedButton>
</Animated.FromDirection> */}
</section >
);
}
);
export default Achievement;
|
#!/bin/bash
#####
#
# Golang installation
# by official suggested way...
#
###
#TODO(darin-m): it's better to get settings from command line arguments...
password=""
if [ -n "$1" ]
then
password=$1
shift
echo "password: $password"
else
echo "You should define sudo password!"
echo "Usage: cmd <sudo-password>"
exit 1
fi
echo
echo "### Intalling Golang ###"
echo "########################"
echo
echo "Donwnloading golang into the /tmp directory"
echo
wget --directory-prefix=/tmp https://dl.google.com/go/go1.11.2.linux-amd64.tar.gz
echo
echo "Untaring the archive into the \’/tmp\’ directory"
echo
echo $password | sudo -S tar -C /opt -xzf /tmp/go1.11.2.linux-amd64.tar.gz
echo
echo "Cleaning up artifacts after installation"
rm /tmp/go1.11.2.linux-amd64.tar.gz
echo
echo "Creating links for go, godoc and gofmt in the /usr/local/bin directory"
echo
echo $password | sudo -S ln -s /opt/go/bin/go /usr/local/bin/
echo $password | sudo -S ln -s /opt/go/bin/godoc /usr/local/bin/
echo $password | sudo -S ln -s /opt/go/bin/gofmt /usr/local/bin/
echo
echo "## skipping this step because we've already created symlink in /usr/local/bin"
echo "## Add path to this directory to the PATH env variable"
echo "## export PATH=\$PATH:/usr/local/go/bin"
echo
echo "Creating \’go\’ directory as \$HOME/go"
echo
mkdir $HOME/go
echo "Creating your workspace directory, $HOME/go."
echo
export GOPATH=$HOME/go
echo
echo
# show version
go version
echo
echo
|
#!/bin/bash
#SBATCH --nodes=1 # number of nodes requested
#SBATCH --ntasks=1 # number of tasks (default: 1)
#SBATCH --cpus-per-task=40 # number of CPUs per task
#SBATCH --partition=all # partition to run in (all or maxwell)
#SBATCH --job-name=CO3-WC # job name
#SBATCH --output=logs/CO3-WC-%N-%j.out # output file name
#SBATCH --error=logs/CO3-WC-%N-%j.err # error file name
#SBATCH --time=1:00:00 # runtime requested
#SBATCH --mail-user=ayan.paul@desy.de # notification email
#SBATCH --mail-type=END,FAIL # notification type
#SBATCH --export=ALL
#SBATCH --array 1-8
export LD_PRELOAD=""
# run the application:
Rscript causal_shapley_script_cluster_parallel.R ${1}
|
#!/bin/bash
G_AWK="${G_AWK:-awk}"
function main
{
source "$(dirname $(realpath $0))/../../../bash/bashtest/bashtest.sh"
if [ "$#" -gt -0 ]; then
bt_set_verbose
fi
bt_enter
bt_eval test_all
bt_exit_success
}
function test_self_gen
{
# have to change dir to parent so the includes can be found
pushd ../ > "/dev/null"
bt_diff_ok "<($G_AWK -f smpg.awk self.smpg) smpg.awk"
popd > "/dev/null"
}
function run { eval "$G_AWK -f ../smpg.awk $@"; }
function run2 { run "$@ 2>&1 1>/dev/null"; }
function test_errors
{
local L_RUN=""
L_RUN="$(run2)"
bt_assert_failure
local L_RES=\
'Use: smpg.awk [options] <input-file>
Try: smpg.awk -vHelp=1'
bt_diff_ok "<(echo '$L_RES') <(echo '$L_RUN')"
L_RUN="$(run2 <(echo 'foo'))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 1: line not source or a comment$\" <(echo \"$L_RUN\")) <(echo 1)"
L_RUN="$(run2 <(echo '%foo'))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 1: expected 'begin', got 'foo' instead$\" <(echo \"$L_RUN\")) <(echo 1)"
L_RUN="$(run2 <(echo '%begin'))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line end-of-file: expected 'generate', got 'begin' instead$\" <(echo \"$L_RUN\")) <(echo 1)"
local L_SRC=\
'%begin
%include
%end
foo'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 4: line not source or a comment$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%end
%foo'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 4: expected 'fsm', got 'foo' instead$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%foo'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line end-of-file: expected 'generate', got 'include' instead$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%end
%fsm
%end'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 4: syntax should be '%fsm <fsm-name>'$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%end
%fsm fsm_name
%end
%handler
%end'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 6: 'fsm' block empty$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%end
%fsm fsm_name
foo
%end
%handler
%end'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 8: syntax should be '%handler <regex> \[args\]'$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%end
%fsm fsm_name
foo
%end
%handler foo
%end
;
%template
%end'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 10: syntax should be '%template <regex>'$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
%end
%fsm fsm_name
foo
%end
%handler foo
;
%end
%template foo
%end
%other
%end
%generate'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 5: fsm: bad separator$\" <(echo \"$L_RUN\")) <(echo 1)"
L_SRC=\
'%begin
%include
empty
%end
%fsm fsm_name
foo -> bar
bar -> foo
%end
%handler foo
;
%end
%template foo
%end
%other
%end
%generate'
L_RUN="$(run2 <(echo "$L_SRC"))"
bt_assert_failure
bt_diff_ok "<(grep -c \"smpg.awk: error: file .* line 3: reading 'empty': .*$\" <(echo \"$L_RUN\")) <(echo 1)"
}
function test_version
{
bt_diff_ok "<(echo '$(run -vVersion=1)') <(echo 'smpg.awk 1.0')"
}
function test_all
{
bt_eval test_version
bt_eval test_errors
bt_eval test_self_gen
}
main "$@"
|
class Product:
def __init__(self, name, price):
self.name = name
self.price = price
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_price(self):
return self.price
def set_price(self, price):
self.price = price |
<filename>src/api/user.ts
import axios from 'axios';
import { ISaveResultQuiz, ICompletedQuiz } from '@interfaces/quizzes.interface';
interface IGetCompletedQuizzes {
completedQuizzes: ICompletedQuiz[];
}
export const saveTheResultOfTheQuiz = async (data: ISaveResultQuiz) => {
try {
await axios.put(`/users/save-result-quiz`, data);
} catch (error) {
console.log('Error');
}
};
export const getTheResultOfTheQuizzes = async (id: string) => {
try {
const { data } = await axios.get<IGetCompletedQuizzes>(
`/users/get-result-quizzes/${id}`,
);
return data.completedQuizzes;
} catch (error) {
console.log('Error');
}
};
|
<reponame>zmike808/party-panel-2
/*
* Copyright (c) 2020, TheStonedTurtle <https://github.com/TheStonedTurtle>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.zmike808.runelite.partypanel.ui.skills;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.image.BufferedImage;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
import net.runelite.api.Constants;
import net.runelite.api.SpriteID;
import net.runelite.client.game.SpriteManager;
import net.runelite.client.ui.FontManager;
import net.runelite.client.util.ImageUtil;
import com.zmike808.runelite.partypanel.ImgUtil;
public class SkillPanelSlot extends JPanel
{
private static final Dimension PANEL_HALF_SIZE = new Dimension(Constants.ITEM_SPRITE_WIDTH, Constants.ITEM_SPRITE_HEIGHT + 4);
static final Dimension PANEL_FULL_SIZE = new Dimension(PANEL_HALF_SIZE.width * 2, PANEL_HALF_SIZE.height);
private final JLabel boostedLabel = new JLabel();
private final JLabel baseLabel = new JLabel();
private BufferedImage background;
private BufferedImage skillHalf;
private BufferedImage statHalf;
@Override
protected void paintComponent(Graphics g)
{
super.paintComponent(g);
if (background == null)
{
return;
}
g.drawImage(background, 0, 0, null);
}
private void updateBackgroundImage()
{
if (skillHalf != null && statHalf != null)
{
background = ImgUtil.combineImages(skillHalf, statHalf);
this.repaint();
}
}
SkillPanelSlot(final int boostedLevel, final int baseLevel)
{
super();
setOpaque(false);
setPreferredSize(PANEL_FULL_SIZE);
setLayout(new BorderLayout());
final JPanel textPanel = new JPanel();
textPanel.setLayout(new GridBagLayout());
textPanel.setPreferredSize(PANEL_HALF_SIZE);
textPanel.setOpaque(false);
final GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.weighty = .5;
c.weightx = 1;
c.fill = GridBagConstraints.BOTH;
boostedLabel.setText(String.valueOf(boostedLevel));
boostedLabel.setVerticalAlignment(JLabel.CENTER);
boostedLabel.setHorizontalAlignment(JLabel.LEFT);
boostedLabel.setFont(FontManager.getRunescapeSmallFont());
boostedLabel.setForeground(Color.YELLOW);
boostedLabel.setBorder(new EmptyBorder(6, 3, 0, 0));
c.anchor = GridBagConstraints.NORTHWEST;
textPanel.add(boostedLabel, c);
baseLabel.setText(String.valueOf(baseLevel));
baseLabel.setVerticalAlignment(JLabel.CENTER);
baseLabel.setHorizontalAlignment(JLabel.RIGHT);
baseLabel.setBorder(new EmptyBorder(0, 0, 6, 6));
baseLabel.setFont(FontManager.getRunescapeSmallFont());
baseLabel.setForeground(Color.YELLOW);
c.anchor = GridBagConstraints.SOUTHEAST;
c.gridy++;
textPanel.add(baseLabel, c);
add(textPanel, BorderLayout.EAST);
}
void initImages(final BufferedImage skillIcon, final SpriteManager spriteManager)
{
spriteManager.getSpriteAsync(SpriteID.STATS_TILE_HALF_LEFT, 0, img ->
{
skillHalf = ImgUtil.overlapImages(skillIcon, SkillPanelSlot.resize(img));
updateBackgroundImage();
});
spriteManager.getSpriteAsync(SpriteID.STATS_TILE_HALF_RIGHT_WITH_SLASH, 0, img ->
{
statHalf = SkillPanelSlot.resize(img);
updateBackgroundImage();
});
}
static BufferedImage resize(final BufferedImage img)
{
return ImageUtil.resizeImage(img, PANEL_HALF_SIZE.width, PANEL_HALF_SIZE.height);
}
public void updateBaseLevel(final int baseLevel)
{
baseLabel.setText(String.valueOf(baseLevel));
baseLabel.repaint();
}
public void updateBoostedLevel(final int boostedLevel)
{
boostedLabel.setText(String.valueOf(boostedLevel));
boostedLabel.repaint();
}
}
|
<filename>src/vscripts/lib/client_util.d.ts
declare function CreateEmptyTalents(hero: string): void;
/**
* Client-side implementation of some base NPC extensions.
* Moddota types don't let me extend C_DOTA_BaseNPC.
*/
declare interface CDOTA_BaseNPC {
HasTalent(talentName: string): boolean;
FindTalentValue(talentName: string, key?: string): number;
GetTalentSpecialValueFor(value: string): number;
HasShard(): boolean;
} |
package com.platform.api;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.platform.annotation.IgnoreAuth;
import com.platform.annotation.LoginUser;
import com.platform.entity.*;
import com.platform.service.*;
import com.platform.util.ApiBaseAction;
import com.platform.util.ApiPageUtils;
import com.platform.utils.Base64;
import com.platform.utils.CharUtil;
import com.platform.utils.Query;
import com.platform.utils.StringUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* 作者: @author Harmon <br>
* 时间: 2017-08-11 08:32<br>
* 描述: ApiIndexController <br>
*/
@Api(tags = "评论")
@RestController
@RequestMapping("/api/comment")
public class ApiCommentController extends ApiBaseAction {
@Autowired
private ApiCommentService commentService;
@Autowired
private ApiUserService userService;
@Autowired
private ApiCommentPictureService commentPictureService;
@Autowired
private ApiCouponService apiCouponService;
@Autowired
private ApiUserCouponService apiUserCouponService;
@Autowired
private ApiOrderService orderService;
/**
* 发表评论
*/
@ApiOperation(value = "发表评论")
@PostMapping("postcomment")
public Object postComment(@LoginUser UserVo loginUser) {
JSONObject jsonParam = getJsonRequest();
String comments=jsonParam.getString("comments");
System.out.println(jsonParam.toString());
System.out.println(jsonParam.get("comments"));
List<CommentVo> componentList=JSONObject.parseArray(comments,CommentVo.class);
for(int i=0;i<componentList.size();i++) {
CommentVo commentVo=componentList.get(i);
if(commentVo.getStars()==null) {
commentVo.setType(1);
}
if(commentVo.getStars()>=4) {
commentVo.setType(1);
}
else if(commentVo.getStars()>=2) {
commentVo.setType(2);
}
else {
commentVo.setType(3);
}
commentVo.setType_id(0);
commentVo.setValue_id(commentVo.getGoods_id());
commentVo.setAdd_time(System.currentTimeMillis() / 1000);
commentVo.setUser_id(loginUser.getUserId());
if(commentVo.getContent()==null||commentVo.getContent().trim().length()==0){
commentVo.setContent("此用户没有评价");
}
commentVo.setContent(Base64.encode(commentVo.getContent()));
if(commentVo.getImages()!=null&&commentVo.getImages().length>0) {
commentVo.setImageflag(true);
}
Integer insertId = commentService.save(commentVo);
//
int inx = 0;
System.out.println("begin insert image");
if (insertId > 0 && null != commentVo.getImages() && commentVo.getImages().length > 0) {
System.out.println("begin insert image="+commentVo.getImages());
for (String imgLink : commentVo.getImages()) {
inx++;
CommentPictureVo pictureVo = new CommentPictureVo();
System.out.println("begin insert insertId="+insertId);
pictureVo.setComment_id(commentVo.getId());
pictureVo.setPic_url(imgLink);
pictureVo.setSort_order(inx);
System.out.println("save insert commentPictureService");
commentPictureService.save(pictureVo);
}
}
if (insertId > 0 && null != commentVo.getVideos() && commentVo.getVideos().length > 0) {
for (Video video : commentVo.getVideos()) {
inx++;
CommentPictureVo pictureVo = new CommentPictureVo();
System.out.println("begin insert insertId="+insertId);
pictureVo.setComment_id(commentVo.getId());
pictureVo.setPic_url(video.getUrlimg());
pictureVo.setSort_order(inx);
pictureVo.setVideo_url(video.getUrl());
System.out.println("save insert commentPictureService");
commentPictureService.save(pictureVo);
}
}
OrderVo orderVo = orderService.queryObject(commentVo.getOrder_id());
orderVo.setOrder_status(302);
orderVo.setShipping_status(2);
orderVo.setConfirm_time(new Date());
orderService.update(orderVo);
}
return toResponsObject(0, "评论添加成功",null);
}
/**
* 发表评论
*/
@ApiOperation(value = "发表评论")
@PostMapping("post")
public Object post(@LoginUser UserVo loginUser) {
Map resultObj = new HashMap();
//
JSONObject jsonParam = getJsonRequest();
Integer typeId = jsonParam.getInteger("typeId");
Integer valueId = jsonParam.getInteger("valueId");
String content = jsonParam.getString("content");
JSONArray imagesList = jsonParam.getJSONArray("imagesList");
CommentVo commentEntity = new CommentVo();
commentEntity.setType_id(typeId);
commentEntity.setValue_id(valueId);
commentEntity.setContent(content);
commentEntity.setStatus(0);
//
commentEntity.setAdd_time(System.currentTimeMillis() / 1000);
commentEntity.setUser_id(loginUser.getUserId());
commentEntity.setContent(Base64.encode(commentEntity.getContent()));
Integer insertId = commentService.save(commentEntity);
//
if (insertId > 0 && null != imagesList && imagesList.size() > 0) {
int i = 0;
for (Object imgLink : imagesList) {
i++;
CommentPictureVo pictureVo = new CommentPictureVo();
pictureVo.setComment_id(insertId);
pictureVo.setPic_url(imgLink.toString());
pictureVo.setSort_order(i);
commentPictureService.save(pictureVo);
}
}
// 是否领取优惠券
if (insertId > 0 && typeId == 0) {
// 当前评价的次数
Map param = new HashMap();
param.put("value_id", valueId);
List<CommentVo> commentVos = commentService.queryList(param);
boolean hasComment = false;
for (CommentVo commentVo : commentVos) {
if (commentVo.getUser_id().equals(loginUser.getUserId())
&& !commentVo.getId().equals(insertId)) {
hasComment = true;
}
}
if (!hasComment) {
Map couponParam = new HashMap();
couponParam.put("send_type", 6);
CouponVo newCouponConfig = apiCouponService.queryMaxUserEnableCoupon(couponParam);
if (null != newCouponConfig
&& newCouponConfig.getMin_transmit_num() >= commentVos.size()) {
UserCouponVo userCouponVo = new UserCouponVo();
userCouponVo.setAdd_time(new Date());
userCouponVo.setCoupon_id(newCouponConfig.getId());
userCouponVo.setCoupon_number(CharUtil.getRandomString(12));
userCouponVo.setUser_id(loginUser.getUserId());
apiUserCouponService.save(userCouponVo);
resultObj.put("coupon", newCouponConfig);
}
}
}
if (insertId > 0) {
return toResponsObject(0, "评论添加成功", resultObj);
} else {
return toResponsFail("评论保存失败");
}
}
/**
*/
@ApiOperation(value = "评论数量")
@GetMapping("count")
public Object count(Integer typeId, Integer valueId) {
Map<String, Object> resultObj = new HashMap();
//
Map param = new HashMap();
param.put("type_id", typeId);
param.put("value_id", valueId);
Integer allCount = commentService.queryTotal(param);
Integer hasPicCount = commentService.queryhasPicTotal(param);
//
resultObj.put("allCount", allCount);
resultObj.put("hasPicCount", hasPicCount);
return toResponsSuccess(resultObj);
}
/**
*/
@ApiOperation(value = "评论数量")
@GetMapping("groupcount")
public Object groupcount( Integer valueId) {
Map<String, Object> resultObj = new HashMap();
//
Map param = new HashMap();
param.put("goods_id", valueId);
List<GroupCountVo> groupCountVoList = commentService.queryGroupCount(param);
GroupCountVo[] groupCountVos=new GroupCountVo[5];
groupCountVos[0]=new GroupCountVo();
groupCountVos[1]=new GroupCountVo();
groupCountVos[2]=new GroupCountVo();
groupCountVos[3]=new GroupCountVo();
groupCountVos[4]=new GroupCountVo();
groupCountVos[0].setFlag(0);
groupCountVos[1].setFlag(1);
groupCountVos[2].setFlag(2);
groupCountVos[3].setFlag(3);
groupCountVos[4].setFlag(4);
for(GroupCountVo groupCountVo:groupCountVoList) {
groupCountVos[0].setNum( groupCountVos[0].getNum()+groupCountVo.getNum());
if(groupCountVo.getStars()==null) {
groupCountVos[1].setNum( groupCountVos[1].getNum()+groupCountVo.getNum());
}
if(groupCountVo.getStars()>=4) {
groupCountVos[1].setNum( groupCountVos[1].getNum()+groupCountVo.getNum());
}
if(groupCountVo.getStars()>=2&&groupCountVo.getStars()<4) {
groupCountVos[2].setNum( groupCountVos[2].getNum()+groupCountVo.getNum());
}
if(groupCountVo.getStars()<2) {
groupCountVos[3].setNum( groupCountVos[3].getNum()+groupCountVo.getNum());
}
System.out.println("isImageflag="+groupCountVo.isImageflag());
if(groupCountVo.isImageflag()) {
groupCountVos[4].setNum( groupCountVos[4].getNum()+groupCountVo.getNum());
}
}
return toResponsSuccess(groupCountVos);
}
/**
* @param typeId
* @param valueId
* @param type 选择评论的类型 0 全部, 1 好评 2 中评 3差评 4 有图
* @param page
* @param size
* @return
*/
@ApiOperation(value = "选择评论类型")
@IgnoreAuth
@GetMapping("list")
public Object listtype( Integer valueId, Integer type,
@RequestParam(value = "page", defaultValue = "1") Integer page, @RequestParam(value = "size", defaultValue = "10") Integer size,
String sort, String order) {
Map<String, Object> resultObj = new HashMap();
List<CommentVo> commentList = new ArrayList();
Map param = new HashMap();
param.put("value_id", valueId);
param.put("page", page);
param.put("limit", size);
if (StringUtils.isNullOrEmpty(sort)) {
param.put("order", "desc");
} else {
param.put("order", sort);
}
if (StringUtils.isNullOrEmpty(order)) {
param.put("sidx", "id");
} else {
param.put("sidx", order);
}
if(type!=null&&type!=0&&type!=4) {
param.put("type", type);
}
if(type!=null&&type==4) {
param.put("imageflag", 1);
}
//查询列表数据
Query query = new Query(param);
commentList = commentService.queryList(query);
int total = commentService.queryTotal(query);
ApiPageUtils pageUtil = new ApiPageUtils(commentList, total, query.getLimit(), query.getPage());
//
for (CommentVo commentItem : commentList) {
commentItem.setContent(Base64.decode(commentItem.getContent()));
commentItem.setUser_info(userService.queryObject(commentItem.getUser_id()));
Map paramPicture = new HashMap();
paramPicture.put("comment_id", commentItem.getId());
List<CommentPictureVo> commentPictureEntities = commentPictureService.queryList(paramPicture);
Integer imgnum=0;
Integer videonum=0;
for(CommentPictureVo commentPictureVo : commentPictureEntities ) {
if(commentPictureVo.getVideo_url()==null) {
imgnum++;
}else {
videonum++;
}
}
String[] images=new String[imgnum];
Video[] videos=new Video[videonum];
imgnum=0;
videonum=0;
for(CommentPictureVo commentPictureVo : commentPictureEntities ) {
if(commentPictureVo.getVideo_url()==null) {
images[imgnum]=commentPictureVo.getPic_url();
imgnum++;
}else {
videos[videonum]=new Video();
videos[videonum].setUrl(commentPictureVo.getPic_url());
videos[videonum].setUrlimg(commentPictureVo.getVideo_url());
videonum++;
}
}
commentItem.setImages(images);
commentItem.setVideos(videos);
commentItem.setPic_list(commentPictureEntities);
}
return toResponsSuccess(pageUtil);
}
/**
* @param typeId
* @param valueId
* @param showType 选择评论的类型 0 全部, 1 好评 2 中评 3差评 4 有图
* @param page
* @param size
* @return
*/
@ApiOperation(value = "选择评论类型")
@IgnoreAuth
@GetMapping("listgroup")
public Object list(Integer typeId, Integer valueId, Integer showType,
@RequestParam(value = "page", defaultValue = "1") Integer page, @RequestParam(value = "size", defaultValue = "10") Integer size,
String sort, String order) {
Map<String, Object> resultObj = new HashMap();
List<CommentVo> commentList = new ArrayList();
Map param = new HashMap();
param.put("type_id", typeId);
param.put("value_id", valueId);
param.put("page", page);
param.put("limit", size);
if (StringUtils.isNullOrEmpty(sort)) {
param.put("order", "desc");
} else {
param.put("order", sort);
}
if (StringUtils.isNullOrEmpty(order)) {
param.put("sidx", "id");
} else {
param.put("sidx", order);
}
if (null != showType && showType == 1) {
param.put("hasPic", 1);
}
//查询列表数据
Query query = new Query(param);
commentList = commentService.queryList(query);
int total = commentService.queryTotal(query);
ApiPageUtils pageUtil = new ApiPageUtils(commentList, total, query.getLimit(), query.getPage());
//
for (CommentVo commentItem : commentList) {
commentItem.setContent(Base64.decode(commentItem.getContent()));
commentItem.setUser_info(userService.queryObject(commentItem.getUser_id()));
Map paramPicture = new HashMap();
paramPicture.put("comment_id", commentItem.getId());
List<CommentPictureVo> commentPictureEntities = commentPictureService.queryList(paramPicture);
commentItem.setPic_list(commentPictureEntities);
}
return toResponsSuccess(pageUtil);
}
} |
def search_string(s1, s2):
if s2 in s1:
return True
else:
return False |
#include <iostream>
int main() {
int arr[] = {1, 2, 3, 4, 5};
int cumulativeSum = 0;
for (int i = 0; i < 5; i++) {
cumulativeSum += arr[i];
std::cout << "Cumulative Sum of " << arr[i] << " is " << cumulativeSum << "\n";
}
return 0;
} |
<reponame>hotspacode/neeza<filename>neeza-spy/src/main/java/io/github/hotspacode/neeza/spy/init/HeartbeatSenderInit.java<gh_stars>1-10
package io.github.hotspacode.neeza.spy.init;
import io.github.hotspacode.neeza.base.concurrent.NamedThreadFactory;
import io.github.hotspacode.neeza.base.log.NeezaLog;
import io.github.hotspacode.neeza.spy.transport.NettyHttpHeartbeatSender;
import io.github.hotspacode.neeza.transport.api.HeartbeatSender;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class HeartbeatSenderInit {
private ScheduledExecutorService pool = null;
private void initSchedulerIfNeeded() {
if (pool == null) {
pool = new ScheduledThreadPoolExecutor(1,
new NamedThreadFactory("nezza-heartbeat-send-task", true),
new ThreadPoolExecutor.DiscardOldestPolicy());
}
}
public void init() {
initSchedulerIfNeeded();
HeartbeatSender heartbeatSender = new NettyHttpHeartbeatSender();
scheduleHeartbeatTask(heartbeatSender);
}
private void scheduleHeartbeatTask(final HeartbeatSender sender) {
pool.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
sender.sendHeartbeat();
} catch (Throwable e) {
NeezaLog.warn("[HeartbeatSender] Send heartbeat error", e);
}
}
}, 10000, sender.intervalMilliseconds(), TimeUnit.MILLISECONDS);
}
}
|
import pandas as pd
import matplotlib.pyplot as plt
from bs4 import BeautifulSoup as BS
import geopandas as gpd
import requests
'''
I believe a good way of organizing the code will be creating classes for the different
kinds of map creation we've been using. Here I aggregate all maps I've made from scraping
tables in state websites into instances of the same class.
'''
class MapScraper:
def __init__(self, name, url, shapefile):
self.url = url
self.shapefile = shapefile
self.name = name
def get(self):
response = requests.get(self.url)
soup = BS(response.content, "html.parser")
html_table = soup.find('table')
return(html_table)
def transform(self, html_table):
estado = pd.read_html(str(html_table))[0]
# This rename line is very ugly but it looks necessary to deal with the states I got.
estado.rename({'Cidade': 'Municipio', 'Nome do município': 'Municipio', 'Confir-mados':'Confirmados'}, axis=1, inplace=True)
estado.replace('-', 0, inplace=True)
estado['Municipio'] = estado['Municipio'].str.replace(r'\d+\.\s', '').str.upper()
mapa_dos_municipios = gpd.read_file(self.shapefile)
mapa_dos_municipios.columns = ['Municipio', 'idMunicipio', 'geometry']
map_conf = pd.merge(mapa_dos_municipios, estado, on='Municipio', how='left')
map_conf["Confirmados"] = map_conf["Confirmados"].fillna(0).astype(float)
map_conf["casos_categorizados"] = pd.cut(map_conf["Confirmados"],
bins = [-1,1, 10,50,500, 1000, 10000],
labels = ["0", "1-10", "11-50", "51-500", ">500", ">1000"])
return(map_conf)
def plot_map(self, map_conf):
fig, ax = plt.subplots()
ax = map_conf.plot(column='casos_categorizados',
categorical=True,
legend=True,
figsize=(10,6),
markersize=46,
cmap = "Reds",
edgecolor='k',
linewidth=0.1,
ax=ax);
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
leg = ax.get_legend()
leg.set_bbox_to_anchor((0., 0., 0.3, 0.32))
plt.savefig(f"casos_{self.name}" + ".png", dpi = 300)
def calls(self):
html_table = self.get()
map_conf = self.transform(html_table)
self.plot_map(map_conf)
ma = MapScraper(
url="https://www.corona.ma.gov.br/",
shapefile="./shapefiles/21MUE250GC_SIR.shp",
name = "MA")
rr = MapScraper(
url="https://roraimacontraocorona.rr.gov.br/winner/public/mapa.xhtml",
shapefile="./shapefiles/14MUE250GC_SIR.shp",
name = "RR")
#rs = MapScraper(
# url="http://ti.saude.rs.gov.br/covid19/",
# shapefile="../../../estados_shapes/unzipped/43MUE250GC_SIR.shp",
# name = "RS")
ma.calls()
rr.calls()
#rs.calls()
|
class ApiClient:
def __init__(self, client):
self._client = client
def get_data(self, url, query_parameters, header_parameters, operation_config):
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code != 200:
# Handle non-200 status code, for example:
if response.status_code == 404:
print(f"Resource not found at {url}")
elif response.status_code == 401:
print("Unauthorized access")
else:
print(f"Unexpected status code: {response.status_code}")
else:
# Process the response when status code is 200
data = response.json()
# Perform further processing or return the data
return data |
// @flow
/* eslint-disable no-unused-vars */
import React from 'react'
import styled from 'styled-components'
import Row from './Row'
import Col from './Col'
import { withBreakpoints } from './BreakpointProvider'
import { divvy, breakpoint, passOn } from '../../utils'
type Props = {
children?: Array<React.Element<>>,
debug?: boolean,
xs?: boolean,
sm?: boolean,
md?: boolean,
lg?: boolean,
breakpoints?: Object,
}
function HiddenContainer(props: Props) {
const { children, debug, xs, sm, md, lg, breakpoints, ...rest } = props
const newChildren = passOn(children, [Row, Col], child => {
return {
debug:
typeof child.props.debug === 'undefined' ? debug : child.props.debug,
}
})
return <div {...rest}>{newChildren}</div>
}
const compute = name =>
breakpoint(
name,
(props, name) => `display:${props[name] ? 'none' : 'inherit'};`
)
const Hidden = styled(HiddenContainer)`
${compute('xs')}
${compute('sm')}
${compute('md')}
${compute('lg')}
`
export default withBreakpoints(Hidden)
|
<filename>src/SplayLibrary/Core/Glsl.cpp
#include <SplayLibrary/SplayLibrary.hpp>
#include <SplayLibrary/Private/Private.hpp>
#define SPLD_VEC(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC_SPECIAL_FUNC(vecName, vecSize, eltType)
// Vec external operators
#define SPLD_VEC_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_##vecName##_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_EXTERNAL_OP_CMP(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_EXTERNAL_OP_STREAM(vecName, vecSize, eltType)
#define SPLD_vec_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP_U_ARITH(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_EXTERNAL_OP_S_ARITH(vecName, vecSize, eltType)
#define SPLD_dvec_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP_U_ARITH(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_EXTERNAL_OP_S_ARITH(vecName, vecSize, eltType)
#define SPLD_ivec_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP_U_ARITH(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP_BITWISE(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_EXTERNAL_OP_S_ARITH(vecName, vecSize, eltType)
#define SPLD_uvec_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP_U_ARITH(vecName, vecSize, eltType) \
SPLD_VEC_EXTERNAL_OP_BITWISE(vecName, vecSize, eltType)
#define SPLD_bvec_EXTERNAL_OP(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_EXTERNAL_OP_LOGIC(vecName, vecSize, eltType)
#define SPLD_VEC_EXTERNAL_OP_U_ARITH(vecName, vecSize, eltType) \
vecName##vecSize operator+(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w += v; } \
vecName##vecSize operator-(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w -= v; } \
vecName##vecSize operator*(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w *= v; } \
vecName##vecSize operator/(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w /= v; } \
\
vecName##vecSize operator+(const vecName##vecSize##& u, eltType x) { vecName##vecSize v(u); return v += x; } \
vecName##vecSize operator-(const vecName##vecSize##& u, eltType x) { vecName##vecSize v(u); return v -= x; } \
vecName##vecSize operator*(const vecName##vecSize##& u, eltType x) { vecName##vecSize v(u); return v *= x; } \
vecName##vecSize operator/(const vecName##vecSize##& u, eltType x) { vecName##vecSize v(u); return v /= x; } \
\
vecName##vecSize operator+(eltType x, const vecName##vecSize##& u) { vecName##vecSize v(u); return v += x; } \
vecName##vecSize operator-(eltType x, const vecName##vecSize##& u) { vecName##vecSize v(u); return v -= x; } \
vecName##vecSize operator*(eltType x, const vecName##vecSize##& u) { vecName##vecSize v(u); return v *= x; } \
vecName##vecSize operator/(eltType x, const vecName##vecSize##& u) { vecName##vecSize v(u); return v /= x; }
#define SPLD_VEC_EXTERNAL_OP_BITWISE(vecName, vecSize, eltType) \
vecName##vecSize operator|(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w |= v; } \
vecName##vecSize operator&(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w &= v; } \
vecName##vecSize operator^(const vecName##vecSize##& u, const vecName##vecSize##& v) { vecName##vecSize w(u); return w ^= v; }
#define SPLD_VEC2_EXTERNAL_OP_S_ARITH(vecName, vecSize, eltType) \
vecName##2 operator+(const vecName##2& u) { return u; } \
vecName##2 operator-(const vecName##2& u) { return { -u.x, -u.y }; }
#define SPLD_VEC3_EXTERNAL_OP_S_ARITH(vecName, vecSize, eltType) \
vecName##3 operator+(const vecName##3& u) { return u; } \
vecName##3 operator-(const vecName##3& u) { return { -u.x, -u.y, -u.z }; }
#define SPLD_VEC4_EXTERNAL_OP_S_ARITH(vecName, vecSize, eltType) \
vecName##4 operator+(const vecName##4& u) { return u; } \
vecName##4 operator-(const vecName##4& u) { return { -u.x, -u.y, -u.z, -u.w }; }
#define SPLD_VEC2_EXTERNAL_OP_LOGIC(vecName, vecSize, eltType) \
vecName##2 operator||(const vecName##2& u, const vecName##2& v) { return { u.x || v.x, u.y || v.y }; } \
vecName##2 operator&&(const vecName##2& u, const vecName##2& v) { return { u.x && v.x, u.y && v.y }; } \
\
vecName##2 operator||(const vecName##2& u, eltType x) { return { u.x || x, u.y || x }; } \
vecName##2 operator&&(const vecName##2& u, eltType x) { return { u.x && x, u.y && x }; } \
\
vecName##2 operator||(eltType x, const vecName##2& u) { return { u.x || x, u.y || x }; } \
vecName##2 operator&&(eltType x, const vecName##2& u) { return { u.x && x, u.y && x }; }
#define SPLD_VEC3_EXTERNAL_OP_LOGIC(vecName, vecSize, eltType) \
vecName##3 operator||(const vecName##3& u, const vecName##3& v) { return { u.x || v.x, u.y || v.y, u.z || v.z }; } \
vecName##3 operator&&(const vecName##3& u, const vecName##3& v) { return { u.x && v.x, u.y && v.y, u.z && v.z }; } \
\
vecName##3 operator||(const vecName##3& u, eltType x) { return { u.x || x, u.y || x, u.z || x }; } \
vecName##3 operator&&(const vecName##3& u, eltType x) { return { u.x && x, u.y && x, u.z && x }; } \
\
vecName##3 operator||(eltType x, const vecName##3& u) { return { u.x || x, u.y || x, u.z || x }; } \
vecName##3 operator&&(eltType x, const vecName##3& u) { return { u.x && x, u.y && x, u.z && x }; }
#define SPLD_VEC4_EXTERNAL_OP_LOGIC(vecName, vecSize, eltType) \
vecName##4 operator||(const vecName##4& u, const vecName##4& v) { return { u.x || v.x, u.y || v.y, u.z || v.z, u.w || v.w }; } \
vecName##4 operator&&(const vecName##4& u, const vecName##4& v) { return { u.x && v.x, u.y && v.y, u.z && v.z, u.w && v.w }; } \
\
vecName##4 operator||(const vecName##4& u, eltType x) { return { u.x || x, u.y || x, u.z || x, u.w || x }; } \
vecName##4 operator&&(const vecName##4& u, eltType x) { return { u.x && x, u.y && x, u.z && x, u.w && x }; } \
\
vecName##4 operator||(eltType x, const vecName##4& u) { return { u.x || x, u.y || x, u.z || x, u.w || x }; } \
vecName##4 operator&&(eltType x, const vecName##4& u) { return { u.x && x, u.y && x, u.z && x, u.w && x }; }
#define SPLD_VEC2_EXTERNAL_OP_CMP(vecName, vecSize, eltType) \
bool operator==(const vecName##2& u, const vecName##2& v) { return u.x == v.x && u.y == v.y; } \
bool operator!=(const vecName##2& u, const vecName##2& v) { return u.x != v.x || u.y != v.y; }
#define SPLD_VEC3_EXTERNAL_OP_CMP(vecName, vecSize, eltType) \
bool operator==(const vecName##3& u, const vecName##3& v) { return u.x == v.x && u.y == v.y && u.z == v.z; } \
bool operator!=(const vecName##3& u, const vecName##3& v) { return u.x != v.x || u.y != v.y || u.z != v.z; }
#define SPLD_VEC4_EXTERNAL_OP_CMP(vecName, vecSize, eltType) \
bool operator==(const vecName##4& u, const vecName##4& v) { return u.x == v.x && u.y == v.y && u.z == v.z && u.w == v.w; } \
bool operator!=(const vecName##4& u, const vecName##4& v) { return u.x != v.x || u.y != v.y || u.z != v.z || u.w != v.w; }
#define SPLD_VEC2_EXTERNAL_OP_STREAM(vecName, vecSize, eltType) \
std::ostream& operator<<(std::ostream& stream, const vecName##vecSize##& u) { stream << "vec2(" << u.x << ", " << u.y << ")"; return stream; }
#define SPLD_VEC3_EXTERNAL_OP_STREAM(vecName, vecSize, eltType) \
std::ostream& operator<<(std::ostream& stream, const vecName##vecSize##& u) { stream << "vec3(" << u.x << ", " << u.y << ", " << u.z << ")"; return stream; }
#define SPLD_VEC4_EXTERNAL_OP_STREAM(vecName, vecSize, eltType) \
std::ostream& operator<<(std::ostream& stream, const vecName##vecSize##& u) \
{ \
stream << "vec4(" << u.x << ", " << u.y << ", " << u.z << ", " << u.w << ")"; \
return stream; \
}
// Vec special functions
#define SPLD_VEC_SPECIAL_FUNC(vecName, vecSize, eltType) SPLD_##vecName##_SPECIAL_FUNC(vecName, vecSize, eltType)
#define SPLD_vec_SPECIAL_FUNC(vecName, vecSize, eltType) \
SPLD_VEC_REAL_FUNC(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_REAL_FUNC(vecName, vecSize, eltType)
#define SPLD_dvec_SPECIAL_FUNC(vecName, vecSize, eltType) \
SPLD_VEC_REAL_FUNC(vecName, vecSize, eltType) \
SPLD_VEC##vecSize##_REAL_FUNC(vecName, vecSize, eltType)
#define SPLD_ivec_SPECIAL_FUNC(vecName, vecSize, eltType)
#define SPLD_uvec_SPECIAL_FUNC(vecName, vecSize, eltType)
#define SPLD_bvec_SPECIAL_FUNC(vecName, vecSize, eltType)
#define SPLD_VEC_REAL_FUNC(vecName, vecSize, eltType) \
vecName##vecSize radians(const vecName##vecSize& u) { return u * scp::pi / 180.0; } \
vecName##vecSize degrees(const vecName##vecSize& u) { return u * 180.0 / scp::pi; } \
vecName##vecSize inversesqrt(const vecName##vecSize& u) { return 1.0 / sqrt(u); } \
vecName##vecSize fract(const vecName##vecSize& u) { return u - floor(u); } \
vecName##vecSize mod(const vecName##vecSize& u, const vecName##vecSize& v) { return u - v * floor(u / v); } \
vecName##vecSize mod(const vecName##vecSize& u, eltType x) { return u - x * floor(u / x); } \
eltType length(const vecName##vecSize##& u) { return std::sqrt(dot(u, u)); } \
eltType distance(const vecName##vecSize##& u, const vecName##vecSize##& v) { return length(v - u); } \
vecName##vecSize normalize(const vecName##vecSize##& u) { return u / length(u); } \
vecName##vecSize faceforward(const vecName##vecSize##& n, const vecName##vecSize##& i, const vecName##vecSize##& nRef) { return dot(nRef, i) < 0 ? n : -n; } \
vecName##vecSize reflect(const vecName##vecSize##& i, const vecName##vecSize##& n) { return i - 2 * dot(n, i) * n; } \
vecName##vecSize refract(const vecName##vecSize##& i, const vecName##vecSize##& n, eltType eta) \
{ \
const eltType d = dot(n, i); \
const eltType k = 1 - eta * eta * (1 - d * d); \
return k < 0 ? 0 : eta * i - (eta * d + std::sqrt(k)) * n; \
}
#define SPLD_VEC2_REAL_FUNC(vecName, vecSize, eltType) \
vecName##2 sin(const vecName##2& u) { return { std::sin(u.x), std::sin(u.y) }; } \
vecName##2 cos(const vecName##2& u) { return { std::cos(u.x), std::cos(u.y) }; } \
vecName##2 tan(const vecName##2& u) { return { std::tan(u.x), std::tan(u.y) }; } \
vecName##2 asin(const vecName##2& u) { return { std::asin(u.x), std::asin(u.y) }; } \
vecName##2 acos(const vecName##2& u) { return { std::acos(u.x), std::acos(u.y) }; } \
vecName##2 atan(const vecName##2& y, const vecName##2& x) { return { std::atan2(y.x, x.x), std::atan2(y.y, x.y) }; } \
vecName##2 atan(const vecName##2& u) { return { std::atan(u.x), std::atan(u.y) }; } \
vecName##2 sinh(const vecName##2& u) { return { std::sinh(u.x), std::sinh(u.y) }; } \
vecName##2 cosh(const vecName##2& u) { return { std::cosh(u.x), std::cosh(u.y) }; } \
vecName##2 tanh(const vecName##2& u) { return { std::tanh(u.x), std::tanh(u.y) }; } \
vecName##2 asinh(const vecName##2& u) { return { std::asinh(u.x), std::asinh(u.y) }; } \
vecName##2 acosh(const vecName##2& u) { return { std::acosh(u.x), std::acosh(u.y) }; } \
vecName##2 atanh(const vecName##2& u) { return { std::atanh(u.x), std::atanh(u.y) }; } \
vecName##2 pow(const vecName##2& u, const vecName##2& p) { return { std::pow(u.x, p.x), std::pow(u.y, p.y) }; } \
vecName##2 exp(const vecName##2& u) { return { std::exp(u.x), std::exp(u.y) }; } \
vecName##2 log(const vecName##2& u) { return { std::log(u.x), std::log(u.y) }; } \
vecName##2 exp2(const vecName##2& u) { return { std::exp2(u.x), std::exp2(u.y) }; } \
vecName##2 log2(const vecName##2& u) { return { std::log2(u.x), std::log2(u.y) }; } \
vecName##2 sqrt(const vecName##2& u) { return { std::sqrt(u.x), std::sqrt(u.y) }; } \
vecName##2 floor(const vecName##2& u) { return { std::floor(u.x), std::floor(u.y) }; } \
vecName##2 trunc(const vecName##2& u) { return { std::trunc(u.x), std::trunc(u.y) }; } \
vecName##2 round(const vecName##2& u) { return { std::round(u.x), std::round(u.y) }; } \
vecName##2 ceil(const vecName##2& u) { return { std::ceil(u.x), std::ceil(u.y) }; } \
eltType dot(const vecName##2& u, const vecName##2& v) { return u.x * v.x + u.y * v.y; }
#define SPLD_VEC3_REAL_FUNC(vecName, vecSize, eltType) \
vecName##3 sin(const vecName##3& u) { return { std::sin(u.x), std::sin(u.y), std::sin(u.z) }; } \
vecName##3 cos(const vecName##3& u) { return { std::cos(u.x), std::cos(u.y), std::cos(u.z) }; } \
vecName##3 tan(const vecName##3& u) { return { std::tan(u.x), std::tan(u.y), std::tan(u.z) }; } \
vecName##3 asin(const vecName##3& u) { return { std::asin(u.x), std::asin(u.y), std::asin(u.z) }; } \
vecName##3 acos(const vecName##3& u) { return { std::acos(u.x), std::acos(u.y), std::acos(u.z) }; } \
vecName##3 atan(const vecName##3& y, const vecName##3& x) { return { std::atan2(y.x, x.x), std::atan2(y.y, x.y), std::atan2(y.z, x.z) }; } \
vecName##3 atan(const vecName##3& u) { return { std::atan(u.x), std::atan(u.y), std::atan(u.z) }; } \
vecName##3 sinh(const vecName##3& u) { return { std::sinh(u.x), std::sinh(u.y), std::sinh(u.z) }; } \
vecName##3 cosh(const vecName##3& u) { return { std::cosh(u.x), std::cosh(u.y), std::cosh(u.z) }; } \
vecName##3 tanh(const vecName##3& u) { return { std::tanh(u.x), std::tanh(u.y), std::tanh(u.z) }; } \
vecName##3 asinh(const vecName##3& u) { return { std::asinh(u.x), std::asinh(u.y), std::asinh(u.z) }; } \
vecName##3 acosh(const vecName##3& u) { return { std::acosh(u.x), std::acosh(u.y), std::acosh(u.z) }; } \
vecName##3 atanh(const vecName##3& u) { return { std::atanh(u.x), std::atanh(u.y), std::atanh(u.z) }; } \
vecName##3 pow(const vecName##3& u, const vecName##3& p) { return { std::pow(u.x, p.x), std::pow(u.y, p.y), std::pow(u.z, p.z) }; } \
vecName##3 exp(const vecName##3& u) { return { std::exp(u.x), std::exp(u.y), std::exp(u.z) }; } \
vecName##3 log(const vecName##3& u) { return { std::log(u.x), std::log(u.y), std::log(u.z) }; } \
vecName##3 exp2(const vecName##3& u) { return { std::exp2(u.x), std::exp2(u.y), std::exp2(u.z) }; } \
vecName##3 log2(const vecName##3& u) { return { std::log2(u.x), std::log2(u.y), std::log2(u.z) }; } \
vecName##3 sqrt(const vecName##3& u) { return { std::sqrt(u.x), std::sqrt(u.y), std::sqrt(u.z) }; } \
vecName##3 floor(const vecName##3& u) { return { std::floor(u.x), std::floor(u.y), std::floor(u.z) }; } \
vecName##3 trunc(const vecName##3& u) { return { std::trunc(u.x), std::trunc(u.y), std::trunc(u.z) }; } \
vecName##3 round(const vecName##3& u) { return { std::round(u.x), std::round(u.y), std::round(u.z) }; } \
vecName##3 ceil(const vecName##3& u) { return { std::ceil(u.x), std::ceil(u.y), std::ceil(u.z) }; } \
eltType dot(const vecName##3& u, const vecName##3& v) { return u.x * v.x + u.y * v.y + u.z * v.z; } \
vecName##3 cross(const vecName##3& u, const vecName##3& v) { return { u[1]*v[2] - u[2]*v[1], u[2]*v[0] - u[0]*v[2], u[0]*v[1] - u[1]*v[0] }; }
#define SPLD_VEC4_REAL_FUNC(vecName, vecSize, eltType) \
vecName##4 sin(const vecName##4& u) { return { std::sin(u.x), std::sin(u.y), std::sin(u.z), std::sin(u.w) }; } \
vecName##4 cos(const vecName##4& u) { return { std::cos(u.x), std::cos(u.y), std::cos(u.z), std::cos(u.w) }; } \
vecName##4 tan(const vecName##4& u) { return { std::tan(u.x), std::tan(u.y), std::tan(u.z), std::tan(u.w) }; } \
vecName##4 asin(const vecName##4& u) { return { std::asin(u.x), std::asin(u.y), std::asin(u.z), std::asin(u.w) }; } \
vecName##4 acos(const vecName##4& u) { return { std::acos(u.x), std::acos(u.y), std::acos(u.z), std::acos(u.w) }; } \
vecName##4 atan(const vecName##4& y, const vecName##4& x) { return { std::atan2(y.x, x.x), std::atan2(y.y, x.y), std::atan2(y.z, x.z), std::atan2(y.w, x.w) }; }\
vecName##4 atan(const vecName##4& u) { return { std::atan(u.x), std::atan(u.y), std::atan(u.z), std::atan(u.w) }; } \
vecName##4 sinh(const vecName##4& u) { return { std::sinh(u.x), std::sinh(u.y), std::sinh(u.z), std::sinh(u.w) }; } \
vecName##4 cosh(const vecName##4& u) { return { std::cosh(u.x), std::cosh(u.y), std::cosh(u.z), std::cosh(u.w) }; } \
vecName##4 tanh(const vecName##4& u) { return { std::tanh(u.x), std::tanh(u.y), std::tanh(u.z), std::tanh(u.w) }; } \
vecName##4 asinh(const vecName##4& u) { return { std::asinh(u.x), std::asinh(u.y), std::asinh(u.z), std::asinh(u.w) }; } \
vecName##4 acosh(const vecName##4& u) { return { std::acosh(u.x), std::acosh(u.y), std::acosh(u.z), std::acosh(u.w) }; } \
vecName##4 atanh(const vecName##4& u) { return { std::atanh(u.x), std::atanh(u.y), std::atanh(u.z), std::atanh(u.w) }; } \
vecName##4 pow(const vecName##4& u, const vecName##4& p) { return { std::pow(u.x, p.x), std::pow(u.y, p.y), std::pow(u.z, p.z), std::pow(u.w, p.w) }; } \
vecName##4 exp(const vecName##4& u) { return { std::exp(u.x), std::exp(u.y), std::exp(u.z), std::exp(u.w) }; } \
vecName##4 log(const vecName##4& u) { return { std::log(u.x), std::log(u.y), std::log(u.z), std::log(u.w) }; } \
vecName##4 exp2(const vecName##4& u) { return { std::exp2(u.x), std::exp2(u.y), std::exp2(u.z), std::exp2(u.w) }; } \
vecName##4 log2(const vecName##4& u) { return { std::log2(u.x), std::log2(u.y), std::log2(u.z), std::log2(u.w) }; } \
vecName##4 sqrt(const vecName##4& u) { return { std::sqrt(u.x), std::sqrt(u.y), std::sqrt(u.z), std::sqrt(u.w) }; } \
vecName##4 floor(const vecName##4& u) { return { std::floor(u.x), std::floor(u.y), std::floor(u.z), std::floor(u.w) }; } \
vecName##4 trunc(const vecName##4& u) { return { std::trunc(u.x), std::trunc(u.y), std::trunc(u.z), std::trunc(u.w) }; } \
vecName##4 round(const vecName##4& u) { return { std::round(u.x), std::round(u.y), std::round(u.z), std::round(u.w) }; } \
vecName##4 ceil(const vecName##4& u) { return { std::ceil(u.x), std::ceil(u.y), std::ceil(u.z), std::ceil(u.w)}; } \
eltType dot(const vecName##4& u, const vecName##4& v) { return u.x * v.x + u.y * v.y + u.z * v.z + u.w * v.w; }
namespace spl
{
SPLD_VEC(vec, 2, float);
SPLD_VEC(vec, 3, float);
SPLD_VEC(vec, 4, float);
SPLD_VEC(dvec, 2, double);
SPLD_VEC(dvec, 3, double);
SPLD_VEC(dvec, 4, double);
SPLD_VEC(ivec, 2, int32_t);
SPLD_VEC(ivec, 3, int32_t);
SPLD_VEC(ivec, 4, int32_t);
SPLD_VEC(uvec, 2, uint32_t);
SPLD_VEC(uvec, 3, uint32_t);
SPLD_VEC(uvec, 4, uint32_t);
SPLD_VEC(bvec, 2, uint32_t);
SPLD_VEC(bvec, 3, uint32_t);
SPLD_VEC(bvec, 4, uint32_t);
}
|
#!/bin/bash
CD_CMD="cd "\\\"$(pwd)\\\"" && clear"
if echo "$SHELL" | grep -E "/fish$" &> /dev/null; then
CD_CMD="cd "\\\"$(pwd)\\\""; and clear"
fi
VERSION=$(sw_vers -productVersion)
if (( $(expr $VERSION '<' 10.7.0) )); then
IN_WINDOW="in window 1"
fi
osascript<<END
try
tell application "System Events"
if (count(processes whose name is "Terminal")) is 0 then
tell application "Terminal"
activate
do script "$CD_CMD" $IN_WINDOW
end tell
else
tell application "Terminal"
activate
do script "$CD_CMD"
end tell
end if
end tell
end try
END |
set -e
RES=$1
RES_KB=$(($RES/1000))
mkdir -p binding_data
cd binding_data
for CELL_TYPE in Gm12878 K562
do
if [ ! -e wgEncodeBroadHmm$CELL_TYPE"HMM".bed ]
then
curl http://hgdownload.cse.ucsc.edu/goldenPath/hg19/encodeDCC/wgEncodeBroadHmm/wgEncodeBroadHmm$CELL_TYPE"HMM".bed.gz -o wgEncodeBroadHmm$CELL_TYPE"HMM".bed.gz
gunzip wgEncodeBroadHmm$CELL_TYPE"HMM".bed.gz
fi
if [ ! -e $CELL_TYPE"_active".bed ]
then
cat wgEncodeBroadHmm$CELL_TYPE"HMM".bed | awk '$4 == "1_Active_Promoter" || $4 == "2_Weak_Promoter" || $4 == "3_Poised_Promoter" || $4 == "4_Strong_Enhancer" || $4 == "5_Strong_Enhancer" || $4 == "6_Weak_Enhancer" || $4 == "7_Weak_Enhancer" || $4 == "9_Txn_Transition" || $4 == "10_Txn_Elongation" || $4 == "11_Weak_Txn" {print $0}' > $CELL_TYPE"_active".bed
fi
WINDOW_FILE=hg19_${RES_KB}kb_windows.bed
if [ ! -e $WINDOW_FILE ]
then
curl http://hgdownload.cse.ucsc.edu/goldenPath/hg19/bigZips/hg19.chrom.sizes -o hg19.chrom.sizes
bedtools makewindows -g hg19.chrom.sizes -w $RES > $WINDOW_FILE
fi
COVERAGE_FILE=$CELL_TYPE"_"${RES_KB}kb_active_coverage.bed
if [ ! -e $COVERAGE_FILE ]
then
bedtools coverage -a $WINDOW_FILE -b $CELL_TYPE"_active".bed > $COVERAGE_FILE
fi
for CHROM in `seq 22`
do
if [ ! -e $CELL_TYPE"_"$CHROM"_"${RES_KB}kb_active_coverage.bed ]
then
cat $COVERAGE_FILE | awk -v chrom=chr$CHROM '$1 == chrom {print $0}' > $CELL_TYPE"_"$CHROM"_"${RES_KB}kb_active_coverage.bed
fi
done
done
cd ..
|
#!/bin/bash
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
build_root=$(cd "$(dirname "$0")/.." && pwd)
cd $build_root
build_folder=$build_root"/cmake/wolfssl"
# Set the default cores
CORES=$(grep -c ^processor /proc/cpuinfo 2>/dev/null || sysctl -n hw.ncpu)
rm -r -f $build_folder
mkdir -p $build_folder
pushd $build_folder
cmake $build_root -Drun_unittests:BOOL=ON -Duse_wolfssl:BOOL=ON
make --jobs=$CORES
ctest -j $CORES --output-on-failure
popd |
#!/bin/bash
# Start up the server in a way that won't block Travis.
npm start &
sleep 1
echo Hobknob started
|
#
curl http://localhost:5984/yomikatari/_all_docs?include_docs=true
|
import React from "react";
import { useIntl } from "react-intl";
import { Box } from "@material-ui/core";
import { Helmet } from "react-helmet";
import { FormattedMessage } from "react-intl";
import { Typography } from "@material-ui/core";
import "./404.scss";
import AnimalImage from "../../images/404/animal.svg";
import CustomButton from "../../components/CustomButton";
import Astronaut from "../../components/Astronaut";
import { withPrefix } from "gatsby";
const NotFound = () => {
const intl = useIntl();
return (
<Box
alignItems="center"
className="notFoundPage"
display="flex"
flexDirection="column"
height={1}
justifyContent="center"
>
<Helmet>
<title>
{`${intl.formatMessage({
id: "notfound.title",
})} | ${intl.formatMessage({ id: "product" })}`}
</title>
</Helmet>
<Astronaut />
<Typography variant="h1">
<FormattedMessage id="notfound.why" />
</Typography>
<Typography className="desc" variant="body1">
<FormattedMessage id="notfound.desc" />
</Typography>
<img src={AnimalImage} />
<CustomButton className="bgPurple" href={withPrefix("dashboard")}>
<FormattedMessage id="notfound.home" />
</CustomButton>
<Typography className="code" variant="h3">
404
</Typography>
</Box>
);
};
export default NotFound;
|
#include "../comms/commands.h"
#include "../comms/telegramBot.h"
#include "../lights/lights.h"
#include "../utils/time.h"
const char* commands[] = {
"/stats",
"/geek_stats",
"/toggle_lights",
"/increase",
"/decrease",
"/toggle_timer",
"/state"
};
const uint8_t n_commands = sizeof(commands)/ sizeof(commands[0]);
char reply1[MAX_REPLY_LENGTH];
char reply2[MAX_REPLY_LENGTH];
char reply3[MAX_REPLY_LENGTH];
char* replies[] = {reply1, reply2, reply3};
void messageHandler(const char *message, int i);
int8_t parseCommand(const char *message);
void cmd_stats(int i);
void cmd_geekstats(int i);
void cmd_toggleLights(int i);
void cmd_increaseBrightness(int i);
void cmd_decreaseBrightness(int i);
void cmd_toggleTimer(int i);
void cmd_timeOfDay(int i);
void cmd_unknown(int i);
void messageHandler(const char *message, int i){
static void (*const cmd_jump_table[n_commands])(int) PROGMEM = {
cmd_stats,
cmd_geekstats,
cmd_toggleLights,
cmd_increaseBrightness,
cmd_decreaseBrightness,
cmd_toggleTimer,
cmd_timeOfDay,
};
int8_t command = parseCommand(message);
if(command < 0 || command >= n_commands){
cmd_unknown(i);
}
else{
cmd_jump_table[command](i);
}
}
int8_t parseCommand(const char *message){
if (message[0] == '/'){
for(int i = n_commands-1; i >= 0; i--){
if (strcmp(message, commands[i])==0){
return i;
}
}
}
return -1;
}
void cmd_stats(int i){
uint32_t time= getUptimeSeconds();
uint32_t day = time / (24 * 3600);
time = time % (24 * 3600);
uint8_t hour = time / 3600;
time %= 3600;
uint8_t min = time / 60;
time %= 60;
uint8_t sec = time;
snprintf_P(
replies[i], MAX_REPLY_LENGTH,
(PGM_P)F("I've been alive for: %lud %uh %um %us\nI've replied %lu times"),
day,hour,min,sec,messages_replied+1
);
}
void cmd_geekstats(int i){
snprintf_P(
replies[i], MAX_REPLY_LENGTH,
(PGM_P)F("Last reset reason: %s\nFree Heap: %luB\nFragmentation: %u%%"),
ESP.getResetReason().c_str(),ESP.getFreeHeap(), ESP.getHeapFragmentation()
);
}
void cmd_toggleLights(int i){
if(toggleLights()){
strcpy_P(replies[i],(PGM_P)F("It's bright!"));
}
else{
strcpy_P(replies[i],(PGM_P)F("It's dark!"));
}
}
void cmd_increaseBrightness(int i){
adjustBrightness(1);
snprintf_P(
replies[i], MAX_REPLY_LENGTH,
(PGM_P)F("The lights are increased to %u%%"),
brightness/10
);
}
void cmd_decreaseBrightness(int i){
adjustBrightness(0);
snprintf_P(
replies[i], MAX_REPLY_LENGTH,
(PGM_P)F("The lights are dimmed to %u%%"),
brightness/10
);
}
void cmd_toggleTimer(int i){
if(toggleTimer()){
strcpy_P(replies[i],(PGM_P)F("Timer mode active"));
}
else{
strcpy_P(replies[i],(PGM_P)F("I'll follow your commands"));
}
}
void cmd_timeOfDay(int i){
initTimeClient();
updateLights();
snprintf_P(
replies[i], MAX_REPLY_LENGTH,
(PGM_P)F("It's now %s and the lights are at %u%%"),
timer_chars[timer_state], brightness/10*lights_on
);
}
void cmd_unknown(int i){
strcpy_P(replies[i], (PGM_P)F("Command not understood"));
}
|
def most_common_words(text):
words = text.split()
frequency = {}
for word in words:
if word in frequency:
frequency[word] += 1
else:
frequency[word] = 1
# Sort the words by their frequency
sorted_words = sorted(frequency.items(), key=lambda x: x[1], reverse=True)
most_common_words = [word[0] for word in sorted_words]
return most_common_words |
package segment_tree;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 14427번: 수열과 쿼리 15
*
* @see https://www.acmicpc.net/problem/14427/
*
*/
public class Boj14427 {
private static final String NEW_LINE = "\n";
private static final int INF = 1_000_000_001;
private static Pair[] seg;
private static class Pair{
int value;
int idx;
public Pair(int value, int idx) {
this.value = value;
this.idx = idx;
}
}
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
int N = Integer.parseInt(br.readLine());
int S = 1;
while(S < N) S <<= 1;
seg = new Pair[S * 2];
for(int i = 0; i < seg.length; i++) {
seg[i] = new Pair(INF, i);
}
StringTokenizer st = new StringTokenizer(br.readLine());
for(int i = S; i < S + N; i++) {
seg[i] = new Pair(Integer.parseInt(st.nextToken()), i);
}
init();
int M = Integer.parseInt(br.readLine());
while(M-- > 0) {
st = new StringTokenizer(br.readLine());
int query = Integer.parseInt(st.nextToken());
int index = 0, chVal = 0;
if(query == 1) {
index = Integer.parseInt(st.nextToken());
chVal = Integer.parseInt(st.nextToken());
change(index + S - 1, chVal); // 1의 경우 값 변경
}
else {
sb.append(seg[1].idx - S + 1).append(NEW_LINE); // 2의 경우 최소 값의 인덱스
}
}
System.out.println(sb);
}
private static void init() {
for(int i = seg.length - 1; i > 0; i -= 2) {
Pair p = getPair(i, -1);
seg[i / 2] = new Pair(p.value, p.idx);
}
}
private static void change(int target, int num) {
seg[target] = new Pair(num, target);
while(target / 2 != 0) {
Pair p = new Pair(-1, -1);
if(target % 2 == 0) p = getPair(target, 1); // 짝수와 홀수 인덱스 구분해서 값 지정
else p = getPair(target, -1);
seg[target / 2] = new Pair(p.value, p.idx);
target /= 2;
}
}
private static Pair getPair(int target, int adder) {
int value = Math.min(seg[target].value, seg[target + adder].value);
int idx = seg[target].value > seg[target + adder].value ? seg[target + adder].idx : seg[target].idx;
if(seg[target].value == seg[target + adder].value) idx = seg[target].idx < seg[target + adder].idx ? seg[target].idx : seg[target + adder].idx;
return new Pair(value, idx);
}
}
|
<reponame>huazai128/nest-emp2-wechat
import { PipeTransform, Injectable, ArgumentMetadata } from '@nestjs/common';
import { get } from 'lodash';
import { Request } from 'express'
/**
* session 解析
* @export
* @class SessionPipe
* @implements {PipeTransform<IRequest, IRequest>}
*/
@Injectable()
export class SessionPipe implements PipeTransform<Request, Request> {
transform(req: Request, metadata: ArgumentMetadata): Request {
const user = get(req, 'session.user') || {}
req.isLogin = !!user.userId
return req
}
} |
#!/bin/bash
set -o xtrace
set -o errexit
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
function print_failure {
docker ps -a
for failed in $(docker ps -a --format "{{.Names}}" --filter status=exited); do
docker logs --tail=all $failed
done
echo "FAILED"
exit 1
}
# Populate globals.yml
cat << EOF > /etc/kolla/globals.yml
---
kolla_base_distro: "$1"
kolla_install_type: "$2"
kolla_internal_address: "169.254.169.10"
docker_pull_policy: "missing"
docker_restart_policy: "no"
network_interface: "eth0"
neutron_external_interface: "fake_interface"
EOF
# Create dummy interface for neutron
ip l a fake_interface type dummy
# Actually do the deployment
tools/kolla-ansible deploy || print_failure
# TODO(SamYaple): Actually validate that all containers are started
docker ps -a
# TODO(SamYaple): Actually do functional testing of OpenStack
|
def sort_numbers(numbers):
return sorted(numbers) |
import React from 'react';
export const sandboxIco = () => (
<svg
width='12'
height='12'
viewBox='0 0 12 12'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<rect width='12' height='12' rx='2' fill='#FFC854' />
</svg>
);
export const liveIco = () => (
<svg
width='12'
height='12'
viewBox='0 0 12 12'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<rect width='12' height='12' rx='2' fill='#2FC29F' />
</svg>
);
|
#!/usr/bin/env bash
if [ "$TRAVIS_BRANCH" = 'master' ] && [ "$TRAVIS_PULL_REQUEST" == 'false' ]; then
openssl aes-256-cbc -K $encrypted_056348374494_key -iv $encrypted_056348374494_iv -in cd/codesigning.asc.enc -out cd/codesigning.asc -d
gpg --fast-import cd/codesigning.asc
fi |
#!/usr/bin/env bash
main() {
make_keystore dks-keystore.jks
extract_public_certificate dks-keystore.jks dks.crt
make_truststore dks.crt
make_keystore ucfs-claimant-kafka-consumer-keystore.jks
extract_public_certificate ucfs-claimant-kafka-consumer-keystore.jks
make_truststore ucfs-claimant-kafka-consumer.crt
make_keystore ucfs-claimant-kafka-consumer-tests-keystore.jks
extract_public_certificate ucfs-claimant-kafka-consumer-tests-keystore.jks
import_into_truststore dks-truststore.jks ucfs-claimant-kafka-consumer.crt
import_into_truststore dks-truststore.jks \
ucfs-claimant-kafka-consumer-tests.crt
import_into_truststore ucfs-claimant-kafka-consumer-truststore.jks dks.crt
import_into_truststore ucfs-claimant-kafka-consumer-truststore.jks rds-ca-2019-eu-west-2.pem
extract_pems ./ucfs-claimant-kafka-consumer-tests-keystore.jks
extract_pems ./dks-keystore.jks
cp -v dks-crt.pem ucfs-claimant-kafka-consumer-tests-key.pem \
ucfs-claimant-kafka-consumer-tests-crt.pem ./containers/tests
mv -v dks-truststore.jks containers/dks
mv -v dks-keystore.jks containers/dks
}
make_keystore() {
local keystore=${1:?Usage: $FUNCNAME keystore [common-name]}
local common_name=${2:-${keystore%-keystore.jks}}
[[ -f $keystore ]] && rm -v $keystore
keytool -v \
-genkeypair \
-keyalg RSA \
-alias cid \
-keystore $keystore \
-storepass $(password) \
-validity 365 \
-keysize 2048 \
-keypass $(password) \
-dname "CN=$common_name,OU=DataWorks,O=DWP,L=Leeds,ST=West Yorkshire,C=UK"
}
make_truststore() {
local certificate=${1:?Usage: $FUNCNAME certificate [truststore]}
local truststore=${2:-${certificate%.crt}-truststore.jks}
[[ -f $truststore ]] && rm -v $truststore
import_into_truststore $truststore $certificate self
}
extract_public_certificate() {
local keystore=${1:?Usage: $FUNCNAME keystore [certificate]}
local certificate=${2:-${keystore%-keystore.jks}.crt}
[[ -f $certificate ]] && rm -v $certificate
keytool -v \
-exportcert \
-keystore $keystore \
-storepass $(password) \
-alias cid \
-file $certificate
}
import_into_truststore() {
local truststore=${1:?Usage: $FUNCNAME truststore certificate}
local certificate=${2:?Usage: $FUNCNAME truststore certificate}
local alias=${3:-${certificate%.crt}}
keytool -importcert \
-noprompt \
-v \
-trustcacerts \
-alias $alias \
-file $certificate \
-keystore $truststore \
-storepass $(password)
}
extract_pems() {
local keystore=${1:-keystore.jks}
local key=${2:-${keystore%-keystore.jks}-key.pem}
local certificate=${3:-${keystore%-keystore.jks}-crt.pem}
local intermediate_store=${keystore/jks/p12}
local filename=$(basename $keystore)
local alias=cid
[[ -f $intermediate_store ]] && rm -v $intermediate_store
[[ -f $key ]] && rm -v $key
if keytool -importkeystore \
-srckeystore $keystore \
-srcstorepass $(password) \
-srckeypass $(password) \
-srcalias $alias \
-destalias $alias \
-destkeystore $intermediate_store \
-deststoretype PKCS12 \
-deststorepass $(password) \
-destkeypass $(password); then
local pwd=$(password)
export pwd
openssl pkcs12 \
-in $intermediate_store \
-nodes \
-nocerts \
-password env:pwd \
-out $key
openssl pkcs12 \
-in $intermediate_store \
-nokeys \
-out $certificate \
-password env:pwd
unset pwd
else
echo Failed to generate intermediate keystore $intermediate_store >&2
fi
}
password() {
echo changeit
}
|
class Test {
constructor() {
console.log('hello world');
}
}
export const test = new Test();
|
<reponame>rotationalio/whisper
export * as createSecretStyles from "./createSecretStyles";
export * as footerStyles from "./footerStyles";
export * as createSecretFormStyles from "./createSecretFormStyles";
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=18F2550
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/AccessB.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=AccessB.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=accessb.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/accessb.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/accessb.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/accessb.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
rosbag record /rosout_agg
|
<reponame>Paul-Browne/web-build-process
module.exports = function (number) {
return number + 21;
};
|
<reponame>thuann-vn/react-native-stater
import React from 'react'
import { useDispatch, useSelector } from 'react-redux'
import { AppleHeader } from '@freakycoder/react-native-header-view'
import { Screen } from '@/Components'
import { Layout } from '@/Theme'
import { useTranslation } from 'react-i18next'
import { SafeAreaView } from 'react-native-safe-area-context'
import { useTheme } from '@/Contexts/ThemeContext'
const HistoryContainer = () => {
const { t } = useTranslation()
const { colors, isDark, setScheme } = useTheme()
const profile = useSelector((state) => state.firebase.auth)
const dispatch = useDispatch()
return (
<Screen>
<SafeAreaView style={Layout.fill}>
<AppleHeader
onChangeText={(text) => console.log(text)}
largeTitle={t('Histories')}
imageSource={{ uri: profile.photoURL }}
largeTitleFontColor={colors.text}
/>
</SafeAreaView>
</Screen>
)
}
export default HistoryContainer
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-N-VB-ADJ-ADV/13-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-N-VB-ADJ-ADV/13-512+0+512-shuffled-N-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_half_quarter --eval_function last_quarter_eval |
def generate_most_active_html(all_most_active: list) -> str:
most_active_html = ''
for most_active in all_most_active:
most_active_html += f'<div class="most-active-item">{most_active}</div>\n'
return most_active_html |
<gh_stars>0
/* ---------------------------------------------------------------- *
<NAME> <<EMAIL>>
Definition of kuu::rasperi::Controller class.
* ---------------------------------------------------------------- */
#pragma once
#include <vector>
#include <memory>
class QString;
namespace kuu
{
namespace rasperi
{
class Camera;
class CameraController;
class MainWindow;
class Model;
/* ---------------------------------------------------------------- *
* ---------------------------------------------------------------- */
class Controller
{
public:
Controller();
std::shared_ptr<Camera> camera() const;
std::shared_ptr<CameraController> cameraController() const;
MainWindow& mainWindow() const;
void setImageSize(int w, int h);
void rasterize(bool filled);
void showUi();
void viewPbrSphereScene();
bool importModel(const QString& filepath);
bool importModels(const std::vector<Model>& models,
bool moveRelatedToOrigo = true);
bool saveImage(const QString& filepath);
private:
struct Impl;
std::shared_ptr<Impl> impl;
};
} // namespace rasperi
} // namespace kuu
|
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = '<EMAIL> (<NAME>)'
from datetime import datetime, time
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from models import ConflictException
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import BooleanMessage
from models import Conference
from models import ConferenceForm
from models import ConferenceForms
from models import ConferenceQueryForm
from models import ConferenceQueryForms
from models import TeeShirtSize
from models import Session
from models import SessionForm
from models import SessionForms
from models import Speaker
from models import SpeakerForm
from models import SpeakerForms
from models import SpeakerQueryForm
from models import SpeakerQueryForms
from models import SessionType
from models import SessionHighlightsForm
from models import SessionSpeakerFieldForm
from utils import getUserId
from settings import WEB_CLIENT_ID
from google.appengine.api import memcache
from models import StringMessage
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
MEMCACHE_FEATUREDSPEAKER_KEY = "FEATUREDSPEAKER"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
DEFAULTS = {
"city": "Default City",
"maxAttendees": 0,
"seatsAvailable": 0,
"topics": [ "Default", "Topic" ],
}
OPERATORS = {
'EQ': '=',
'GT': '>',
'GTEQ': '>=',
'LT': '<',
'LTEQ': '<=',
'NE': '!='
}
FIELDS = {
'CITY': 'city',
'TOPIC': 'topics',
'MONTH': 'month',
'MAX_ATTENDEES': 'maxAttendees',
}
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
SPEAKERDEFAULTS = {
"company": "NOT_SPECIFIED",
"sex": "Male",
"field": ["NOT_SPECIFIED"],
}
SESSION_CREATE_REQUEST = endpoints.ResourceContainer(
SessionForm,
websafeConferenceKey=messages.StringField(1),
)
CON_SESSION_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CON_SES_TYPE_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
typeOfSession=messages.EnumField(SessionType, 2),
)
SES_SEPAKER_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
speaker=messages.StringField(1),
)
SESSION_DEFAULTS = {
"highlights": ["NOT_SPECIFIED"],
}
SES_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
sessionKey=messages.StringField(1),
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api(name='conference', version='v1',
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Conference objects - - - - - - - - - - - - - - - - -
def _copyConferenceToForm(self, conf, displayName):
"""Copy relevant fields from Conference to ConferenceForm."""
cf = ConferenceForm()
for field in cf.all_fields():
if hasattr(conf, field.name):
# convert Date to date string; just copy others
if field.name.endswith('Date'):
setattr(cf, field.name, str(getattr(conf, field.name)))
else:
setattr(cf, field.name, getattr(conf, field.name))
elif field.name == "websafeKey":
setattr(cf, field.name, conf.key.urlsafe())
if displayName:
setattr(cf, 'organizerDisplayName', displayName)
cf.check_initialized()
return cf
def _createConferenceObject(self, request):
"""Create or update Conference object, returning ConferenceForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.name:
raise endpoints.BadRequestException("Conference 'name' field required")
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeKey']
del data['organizerDisplayName']
# add default values for those missing (both data model & outbound Message)
for df in DEFAULTS:
if data[df] in (None, []):
data[df] = DEFAULTS[df]
setattr(request, df, DEFAULTS[df])
# convert dates from strings to Date objects; set month based on start_date
if data['startDate']:
data['startDate'] = datetime.strptime(data['startDate'][:10], "%Y-%m-%d").date()
data['month'] = data['startDate'].month
else:
data['month'] = 0
if data['endDate']:
data['endDate'] = datetime.strptime(data['endDate'][:10], "%Y-%m-%d").date()
# set seatsAvailable to be same as maxAttendees on creation
if data["maxAttendees"] > 0:
data["seatsAvailable"] = data["maxAttendees"]
# generate Profile Key based on user ID and Conference
# ID based on Profile key get Conference key from ID
p_key = ndb.Key(Profile, user_id)
c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
c_key = ndb.Key(Conference, c_id, parent=p_key)
data['key'] = c_key
data['organizerUserId'] = request.organizerUserId = user_id
# create Conference, send email to organizer confirming
# creation of Conference & return (modified) ConferenceForm
Conference(**data).put()
# TODO 2: add confirmation email sending task to queue
taskqueue.add(params={'email': user.email(), 'conferenceInfo': repr(request)},
url = '/tasks/send_confirmation_email')
return request
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
# update existing conference
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(ConferenceForm, ConferenceForm, path='conference',
http_method='POST', name='createConference')
def createConference(self, request):
"""Create new conference."""
return self._createConferenceObject(request)
@endpoints.method(CONF_POST_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='PUT', name='updateConference')
def updateConference(self, request):
"""Update conference w/provided fields & return w/updated info."""
return self._updateConferenceObject(request)
@endpoints.method(CONF_GET_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='GET', name='getConference')
def getConference(self, request):
"""Return requested conference (by websafeConferenceKey)."""
# get Conference object from request; bail if not found
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
prof = conf.key.parent().get()
# return ConferenceForm
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='getConferencesCreated',
http_method='POST', name='getConferencesCreated')
def getConferencesCreated(self, request):
"""Return conferences created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
confs = Conference.query(ancestor=ndb.Key(Profile, user_id))
prof = ndb.Key(Profile, user_id).get()
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, getattr(prof, 'displayName')) for conf in confs]
)
def _getQuery(self, request):
"""Return formatted query from the submitted filters."""
q = Conference.query()
inequality_filter, filters = self._formatFilters(request.filters)
# If exists, sort on inequality filter first
if not inequality_filter:
q = q.order(Conference.name)
else:
q = q.order(ndb.GenericProperty(inequality_filter))
q = q.order(Conference.name)
for filtr in filters:
if filtr["field"] in ["month", "maxAttendees"]:
filtr["value"] = int(filtr["value"])
formatted_query = ndb.query.FilterNode(filtr["field"], filtr["operator"], filtr["value"])
q = q.filter(formatted_query)
return q
def _formatFilters(self, filters):
"""Parse, check validity and format user supplied filters."""
formatted_filters = []
inequality_field = None
for f in filters:
filtr = {field.name: getattr(f, field.name) for field in f.all_fields()}
try:
filtr["field"] = FIELDS[filtr["field"]]
filtr["operator"] = OPERATORS[filtr["operator"]]
except KeyError:
raise endpoints.BadRequestException("Filter contains invalid field or operator.")
# Every operation except "=" is an inequality
if filtr["operator"] != "=":
# check if inequality operation has been used in previous filters
# disallow the filter if inequality was performed on a different field before
# track the field on which the inequality operation is performed
if inequality_field and inequality_field != filtr["field"]:
raise endpoints.BadRequestException("Inequality filter is allowed on only one field.")
else:
inequality_field = filtr["field"]
formatted_filters.append(filtr)
return (inequality_field, formatted_filters)
@endpoints.method(ConferenceQueryForms, ConferenceForms,
path='queryConferences',
http_method='POST',
name='queryConferences')
def queryConferences(self, request):
"""Query for conferences."""
conferences = self._getQuery(request)
# need to fetch organiser displayName from profiles
# get all keys and use get_multi for speed
organisers = [(ndb.Key(Profile, conf.organizerUserId)) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return individual ConferenceForm object per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, names[conf.organizerUserId]) for conf in \
conferences]
)
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# get Profile from datastore
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
# create new Profile if not there
if not profile:
profile = Profile(
key = p_key,
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
#if field == 'teeShirtSize':
# setattr(prof, field, str(val).upper())
#else:
# setattr(prof, field, val)
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# - - - Registration - - - - - - - - - - - - - - - - - - - -
@ndb.transactional(xg=True)
def _conferenceRegistration(self, request, reg=True):
"""Register or unregister user for selected conference."""
retval = None
prof = self._getProfileFromUser() # get user Profile
# check if conf exists given websafeConfKey
# get conference; check that it exists
wsck = request.websafeConferenceKey
conf = ndb.Key(urlsafe=wsck).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % wsck)
# register
if reg:
# check if user already registered otherwise add
if wsck in prof.conferenceKeysToAttend:
raise ConflictException(
"You have already registered for this conference")
# check if seats avail
if conf.seatsAvailable <= 0:
raise ConflictException(
"There are no seats available.")
# register user, take away one seat
prof.conferenceKeysToAttend.append(wsck)
conf.seatsAvailable -= 1
retval = True
# unregister
else:
# check if user already registered
if wsck in prof.conferenceKeysToAttend:
# unregister user, add back one seat
prof.conferenceKeysToAttend.remove(wsck)
conf.seatsAvailable += 1
retval = True
else:
retval = False
# write things back to the datastore & return
prof.put()
conf.put()
return BooleanMessage(data=retval)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='conferences/attending',
http_method='GET', name='getConferencesToAttend')
def getConferencesToAttend(self, request):
"""Get list of conferences that user has registered for."""
prof = self._getProfileFromUser() # get user Profile
conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend]
conferences = ndb.get_multi(conf_keys)
# get organizers
organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return set of ConferenceForm objects per Conference
return ConferenceForms(items=[self._copyConferenceToForm(conf, names[conf.organizerUserId])\
for conf in conferences]
)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='POST', name='registerForConference')
def registerForConference(self, request):
"""Register user for selected conference."""
return self._conferenceRegistration(request)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='DELETE', name='unregisterFromConference')
def unregisterFromConference(self, request):
"""Unregister user for selected conference."""
return self._conferenceRegistration(request, reg=False)
# - - - Announcements - - - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = '%s %s' % (
'Last chance to attend! The following conferences '
'are nearly sold out:',
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/announcement/get',
http_method='GET', name='getAnnouncement')
def getAnnouncement(self, request):
"""Return Announcement from memcache."""
# TODO 1
# return an existing announcement from Memcache or an empty string.
announcement = memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY)
if not announcement:
announcement = ""
return StringMessage(data=announcement)
# - - - TASK1: Speaker - - - - - - - - - - - - - - - - - - - -
def _createSpeakerObject(self, request):
'''Create Speaker object, returning SpeakerForm'''
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
if not request.name:
raise endpoints.BadRequestException("Speaker 'name' field required.")
if not request.email:
raise endpoints.BadRequestException("Speaker 'email' field required.")
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
for df in SPEAKERDEFAULTS:
if data[df] in (None, []):
data[df] = SPEAKERDEFAULTS[df]
setattr(request, df, SPEAKERDEFAULTS[df])
s_key = ndb.Key(Speaker, data['email'])
data['key'] = s_key
Speaker(**data).put()
return request
def _copySpeakerToForm(self, speaker):
'''Copy relevant fields from Speaker to SpeakerForm'''
speaker_form = SpeakerForm()
for field in speaker_form.all_fields():
if hasattr(speaker, field.name):
setattr(speaker_form, field.name, getattr(speaker, field.name))
speaker_form.check_initialized()
return speaker_form
@endpoints.method(SpeakerForm, SpeakerForm, path='speaker',
http_method='POST', name='createSpeaker')
def createSpeaker(self, request):
'''Create new speaker and upadte speaker.'''
return self._createSpeakerObject(request)
def _queryForSpeakers(self, filters):
"""
Using the filter to query for speakers.
Returns:
Query results for the filters.
Args:
filters: the SpeakerQueryForms's filters.
"""
formatted_filters = []
for f in filters:
filtr = {field.name: getattr(f, field.name) for field in f.all_fields()}
formatted_filters.append(filtr)
q = Speaker.query()
q = q.order(Speaker.name)
for filtr in formatted_filters:
query = ndb.query.FilterNode(filtr['field'], "=", filtr['value'])
q = q.filter(query)
return q
@endpoints.method(SpeakerQueryForms, SpeakerForms,
path='querySpeakers', http_method='POST',
name='querySpeakers')
def querySpeakers(self, request):
'''Query for speakers'''
speakers = self._queryForSpeakers(request.filters)
return SpeakerForms(items=[self._copySpeakerToForm(speaker) for speaker in speakers])
# - - - TASK1:Session - - - - - - - - - - - - - - - - - - - -
def _createSessionObject(self, request):
"""
Using the request to create session
Returns:
SessionForm: including all the session info.
Args:
SESSION_CREATE_REQUEST request container
"""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
c_key = ndb.Key(urlsafe=request.websafeConferenceKey)
conf = c_key.get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can create the session.')
if not request.name:
raise endpoints.BadRequestException("Session 'name' field required")
if not request.speaker:
raise endpoints.BadRequestException("Session 'speaker' field required")
speaker_key = ndb.Key(Speaker, request.speaker)
if not speaker_key.get():
raise endpoints.NotFoundException(
'No speaker found with id: %s' % request.speaker)
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeConferenceKey']
del data['websafeKey']
for df in SESSION_DEFAULTS:
if data[df] in (None, []):
data[df] = SESSION_DEFAULTS[df]
setattr(request, df, SESSION_DEFAULTS[df])
if data['date']:
data['date'] = datetime.strptime(data['date'][:10], "%Y-%m-%d").date()
else:
data['date'] = conf.startDate
if data['startTime']:
data['startTime'] = datetime.strptime(data['startTime'][:5], "%H:%M").time()
if data['typeOfSession']:
data['typeOfSession'] = str(getattr(request, 'typeOfSession'))
else:
data['typeOfSession'] = str(SessionType.NOT_SPECIFIED)
s_id = Session.allocate_ids(size=1, parent=c_key)[0]
s_key = ndb.Key(Session, s_id, parent=c_key)
data['key'] = s_key
Session(**data).put()
taskqueue.add(params={'speaker_email': request.speaker,
'wsck': request.websafeConferenceKey}, url = '/tasks/set_featured_speaker')
return self._copySessionToForm(s_key.get())
def _copySessionToForm(self, session):
'''Copy relevant fields from Session to SessionForm.'''
s_form = SessionForm()
for field in s_form.all_fields():
if hasattr(session, field.name):
if field.name in ['date', 'startTime']:
setattr(s_form, field.name, str(getattr(session, field.name)))
elif field.name == 'typeOfSession':
setattr(s_form, field.name, getattr(SessionType, getattr(session, field.name)))
else:
setattr(s_form, field.name, getattr(session, field.name))
setattr(s_form, 'websafeKey', session.key.urlsafe())
s_form.check_initialized()
return s_form
@endpoints.method(SESSION_CREATE_REQUEST, SessionForm,
path='session', http_method='POST', name='createSession')
def createSession(self, request):
'''Create new session'''
return self._createSessionObject(request)
@endpoints.method(CON_SESSION_GET_REQUEST, SessionForms,
path='conference/sessions/{websafeConferenceKey}', http_method='GET',
name='getConferenceSessions')
def getConferenceSessions(self, request):
'''Return all sessions in a conference'''
c_key = ndb.Key(urlsafe=request.websafeConferenceKey)
conf = c_key.get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
sessions = Session.query(ancestor=c_key)
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
@endpoints.method(CON_SES_TYPE_GET_REQUEST, SessionForms,
path='conference/sessions/query/type/{typeOfSession}',
http_method='GET', name='getConferenceSessionsByType')
def getConferenceSessionsByType(self, request):
'''Return all sessions of a specified type'''
c_key = ndb.Key(urlsafe=request.websafeConferenceKey)
conf = c_key.get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
q = Session.query(ancestor=c_key)
session_type = str(getattr(request, 'typeOfSession'))
q = q.filter(Session.typeOfSession == session_type)
return SessionForms(
items=[self._copySessionToForm(session) for session in q]
)
@endpoints.method(SES_SEPAKER_GET_REQUEST, SessionForms,
path='session/querybuspeaker', http_method='POST',
name='getSessionsBySpeaker')
def getSessionsBySpeaker(self, request):
'''Return all sessions of the speaker'''
speaker_key = ndb.Key(Speaker, request.speaker)
if not speaker_key.get():
raise endpoints.NotFoundException(
'No speaker found with id: %s' % request.speaker)
q = Session.query()
q = q.filter(Session.speaker == request.speaker)
return SessionForms(
items=[self._copySessionToForm(session) for session in q]
)
# - - - TASK2:Wishlist - - - - - - - - - - - - - - - - - - - -
def _wishlistHandle(self, request, add=True):
"""
Using the request to handle wishlist
Returns:
BooleanMessage: if add/remove is succuss
Args:
request: SES_REQUEST
add: Bool, if is true add to wishlist else remove.
"""
prof = self._getProfileFromUser()
s_key = ndb.Key(urlsafe=request.sessionKey)
session = s_key.get()
retval = None
if not session:
raise endpoints.NotFoundException(
'No session found with key: %s' % request.sessionKey)
if add:
wsck = s_key.parent().urlsafe()
if wsck not in prof.conferenceKeysToAttend:
raise ConflictException(
"You must register for the conference:%s first" % wsck)
if request.sessionKey in prof.wishlist:
raise ConflictException(
"You have already add this session to your wishlist")
prof.wishlist.append(request.sessionKey)
retval = True
else:
if request.sessionKey in prof.wishlist:
prof.wishlist.remove(request.sessionKey)
retval = True
else:
retval = False
prof.put()
return BooleanMessage(data=retval)
@endpoints.method(SES_REQUEST, BooleanMessage,
path='session/add_whishlist', http_method='POST',
name='addSessionToWishlist')
def addSessionToWishlist(self, request):
'''Add session to wishlist'''
return self._wishlistHandle(request)
@endpoints.method(SES_REQUEST, BooleanMessage,
path='session/delete_wishlist', http_method='DELETE',
name='deleteSessionInWishlist')
def deleteSessionInWishlist(self, request):
'''Removes the session from the user's whislist'''
return self._wishlistHandle(request, add=False)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='wishlist', http_method='GET', name='getSessionsInWishlist')
def getSessionsInWishlist(self, request):
'''Get all the sessions in the user's wishlist'''
prof = self._getProfileFromUser()
wssks = prof.wishlist
s_keys = [ndb.Key(urlsafe=wssk) for wssk in wssks]
sessions = ndb.get_multi(s_keys)
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
# - - - TASK2: Two additional queries - - - - - - - - - - - - - - - - - - - -
@endpoints.method(SessionHighlightsForm, SessionForms,
path='session/highlights', http_method='GET',
name='getSessionsWithHighlights')
def getSessionsWithHighlights(self, request):
'''Get sessions in the list of highlights'''
q = Session.query(Session.highlights.IN(request.highlights))
return SessionForms(
items=[self._copySessionToForm(session) for session in q]
)
@endpoints.method(SessionSpeakerFieldForm, SessionForms,
path='session/speakerfield', http_method='GET',
name='getSessionsWithSpeakerField')
def getSessionsWithSpeakerField(self, request):
'''Get sessions with the speaker's fields'''
speakers = Speaker.query(Speaker.field.IN(request.fields)).fetch()
if speakers == []:
return SessionForms(items=[])
speakers_email = [speaker.email for speaker in speakers]
sessions = Session.query(Session.speaker.IN(speakers_email))
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='session/task3', http_method='POST',
name='task3')
def task3(self, request):
'''
This query is for the task3:Query Problem
Args:
have no input info
Return:
a list of sessions in which each session
is not workshop and start before 7:00pm
'''
aim_time = time(19)
sessions = Session.query(Session.startTime < aim_time)
result_sessions = []
result_sessions = [session for session in sessions if
session.typeOfSession != 'WORKSHOP' and
session.startTime != None]
return SessionForms(
items=[self._copySessionToForm(session) for session in result_sessions]
)
# - - - TASK4: Add a Task - - - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheFeaturedSpeaker(speaker_email, wsck):
'''
If there is more than one session by this speaker at this conference,
then assign the Featured Speaker to memcache
Args:
speaker_email: The speaker's email
wsck: the aimed conference's web safe url key
'''
# Fetch a list of Sessions at the provided Conference
# that the Speaker is speaking at.
s_key = ndb.Key(Speaker, speaker_email)
speaker = s_key.get()
c_key = ndb.Key(urlsafe=wsck)
q = Session.query(ancestor=c_key)
sessions = q.filter(Session.speaker == speaker_email).fetch()
# if sesions count <= 1 break the function.
if len(sessions) <= 1:
return
# Set the featured string for the speaker.
featuredInfo = "| %s's sessions: %s" %(speaker.name ,','.join(session.name for session in sessions))
cacheInfo = memcache.get(MEMCACHE_FEATUREDSPEAKER_KEY)
featuredStr = ""
# This tag is used to check if speaker is already in the memcache
isChanged = False
if not cacheInfo:
featuredStr = "Featured Speakers:" + featuredInfo
isChanged = True
else:
infos = cacheInfo.split('|',1)
for (i, info) in enumerate(infos):
if i == 0:
continue
if speaker.name in info:
isChanged=True
infos[i] = featuredInfo[1:]
featuredStr = '|'.join(infos)
# If the speaker's info is not in the memcache, then append this str.
if not isChanged:
featuredStr += featuredInfo
# Set memcache
memcache.set(MEMCACHE_FEATUREDSPEAKER_KEY, featuredStr)
@endpoints.method(message_types.VoidMessage, StringMessage,
path='featuredspeaker', name='getFeaturedSpeaker')
def getFeaturedSpeaker(self, request):
'''Get featured speaker info'''
featuredInfo = memcache.get(MEMCACHE_FEATUREDSPEAKER_KEY)
if not featuredInfo:
featuredInfo = ""
return StringMessage(data=featuredInfo)
api = endpoints.api_server([ConferenceApi]) # register API
|
import { makeStyles, Theme } from "@material-ui/core";
export const useStyles = makeStyles((theme: Theme) => ({
form: {
display: "flex",
flexDirection: "column",
gap: theme.spacing(2),
maxWidth: 400,
margin: "auto",
"& .MuiTextField-root": {
width: "100%"
}
},
height__full: {
height: "100%"
},
hide: {
display: "none"
}
}));
|
target_burn_frontend --erase-all --verify -P 4444 --unlock --image=/home/kevin/amazon-freertos/vendors/andes/boards/corvette_f1_n25/aws_tests/aws_tests.bin --algorithm-bin=/home/kevin/amazon-freertos/vendors/andes/tools/target_bin/target_SPI_v5_32.bin
|
fn main() {
// Create a new empty linked list
let mut list: LinkedList<i32> = LinkedList::new();
// Add elements to the front of the linked list
list.push_front(3);
list.push_front(2);
list.push_front(1);
// Add elements to the back of the linked list
list.push_back(4);
list.push_back(5);
// Remove and print elements from the front of the linked list
while let Some(element) = list.pop_front() {
println!("Popped element: {}", element);
}
// Iterate over the elements of the linked list
for element in &list {
println!("Iterated element: {}", element);
}
} |
import os
import numpy as np
import argparse
import random
import sys
from tqdm import tqdm
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.optim as optim
import torch.utils.data
from datetime import datetime
from PIL import Image, ImageDraw
import seaborn as sns
from box import AutoregressiveBoxEncoder, AutoregressiveBoxDecoder
from layout import BatchCollator, LayoutDataset
def gen_colors(num_colors):
"""
Generate uniformly distributed `num_colors` colors
:param num_colors:
:return:
"""
palette = sns.color_palette(None, num_colors)
rgb_triples = [[int(x[0]*255), int(x[1]*255), int(x[2]*255)] for x in palette]
return rgb_triples
def plot_layout(real_boxes, predicted_boxes, labels, width, height, colors=None):
blank_image = Image.new("RGB", (int(width), int(height)), (255, 255, 255))
blank_draw = ImageDraw.Draw(blank_image)
number_boxes = real_boxes.shape[0]
for i in range(number_boxes):
real_box = real_boxes[i].tolist()
predicted_box = predicted_boxes[i].tolist()
label = int(labels[i])
real_x1, real_y1 = int(real_box[0] * width), int(real_box[1] * height)
real_x2, real_y2 = real_x1 + int(real_box[2] * width), real_y1 + int(real_box[3] * height)
predicted_x1, predicted_y1 = int(predicted_box[0] * width), int(predicted_box[1] * height)
predicted_x2, predicted_y2 = predicted_x1 + int(predicted_box[2] * width), predicted_y1 + int(
predicted_box[3] * height)
real_color = (0, 0, 0)
if colors is not None:
real_color = tuple(colors[label])
blank_draw.rectangle([(real_x1, real_y1), (real_x2, real_y2)], outline=real_color)
blank_draw.rectangle([(predicted_x1, predicted_y1), (predicted_x2, predicted_y2)], outline=(0, 0, 0))
return blank_image
def evaluate(model, loader, loss, prefix='', colors=None):
errors = []
model.eval()
losses = None
box_losses = []
divergence_losses = []
for batch_i, (indexes, target) in tqdm(enumerate(loader)):
label_set = torch.stack([t.label_set for t in target], dim=0).to(device)
counts = torch.stack([t.count for t in target], dim=0).to(device)
boxes = [t.bbox.to(device) for t in target]
labels = [t.label.to(device) for t in target]
number_boxes = np.stack([len(t) for t in target], axis=0)
max_number_boxes = np.max(number_boxes)
batch_size = label_set.size(0)
predicted_boxes = torch.zeros((batch_size, max_number_boxes, 4)).to(device)
# import ipdb; ipdb.set_trace()
for step in range(max_number_boxes):
# determine who has a box.
has_box = number_boxes > step
# determine their history of box/labels.
current_label_set = label_set[has_box, :]
current_counts = counts[has_box, :]
all_boxes = [boxes[i] for i, has in enumerate(has_box) if has]
all_labels = [labels[i] for i, has in enumerate(has_box) if has]
current_label = torch.stack([l[step] for l in all_labels], dim=0).to(device)
current_label = label_encodings[current_label.long() - 1]
current_box = torch.stack([b[step] for b in all_boxes], dim=0).to(device)
# now, consider the history.
if step == 0:
previous_labels = torch.zeros((batch_size, 0, 7)).to(device)
previous_boxes = torch.zeros((batch_size, 0, 4)).to(device)
else:
previous_labels = torch.stack([l[step - 1] for l in all_labels], dim=0).unsqueeze(1)
previous_labels = label_encodings[previous_labels.long() - 1]
# we need to 1-hot these. only take the previous one since
# we'll accumulate state instead.
previous_boxes = torch.stack([b[step - 1] for b in all_boxes], dim=0).unsqueeze(1)
# take a step. x, label_set, current_label, count_so_far):
state = (h[has_box].unsqueeze(0), c[has_box].unsqueeze(0)) if step > 1 else None
predicted_boxes_step, kl_divergence, z, state = model(current_box, current_label_set, current_label,
previous_labels, previous_boxes, state=state)
predicted_boxes[has_box, step] = predicted_boxes_step
box_loss_step = loss(predicted_boxes_step, current_box)
losses = box_loss_step if losses is None else torch.cat([losses, box_loss_step])
box_losses.append(box_loss_step.reshape(-1))
divergence_losses.append(kl_divergence.reshape(-1))
if state is not None:
h, c = torch.zeros((batch_size, 128)).to(device), torch.zeros((batch_size, 128)).to(device)
h[has_box, :] = state[0][-1]
c[has_box, :] = state[1][-1]
if batch_i == 0 and colors is not None:
# try plotting the first batch.
for i in range(batch_size):
count = number_boxes[i]
plotted = plot_layout(
boxes[i].detach().cpu().numpy(),
predicted_boxes[i, :count],
labels[i].detach().cpu().numpy()-1,
target[i].width,
target[i].height,
colors=colors)
plotted.save(f"{prefix}_{i:05d}.png")
# pdb.set_trace()
average_loss = torch.mean(losses)
print(f"validation: average loss: {average_loss}")
count_losses = torch.cat(box_losses)
divergence_losses = torch.cat(divergence_losses)
loss_epoch = torch.mean(count_losses) + torch.mean(divergence_losses)
return loss_epoch.item()
class GaussianLogLikelihood(nn.Module):
def __init__(self):
super(GaussianLogLikelihood, self).__init__()
self.var = 0.02 ** 2
def forward(self, predicted, expected):
# not really sure if I am supposed to use the variance
# stated in the paper.
error = torch.mean((predicted - expected) ** 2, dim=-1)
return error
class AutoregressiveBoxVariationalAutoencoder(nn.Module):
def __init__(self, number_labels, conditioning_size, representation_size):
super(AutoregressiveBoxVariationalAutoencoder, self).__init__()
self.representation_size = representation_size
self.encoder = AutoregressiveBoxEncoder(number_labels, conditioning_size, representation_size)
self.decoder = AutoregressiveBoxDecoder(conditioning_size, representation_size)
def sample(self, mu, log_var):
batch_size = mu.size(0)
device = mu.device
standard_normal = torch.randn((batch_size, self.representation_size), device=device)
z = mu + standard_normal * torch.exp(0.5 * log_var)
kl_divergence = -0.5 * torch.sum(
1 + log_var - (mu ** 2) - torch.exp(log_var), dim=1)
return z, kl_divergence
def forward(self, x, label_set, current_label, labels_so_far, boxes_so_far, state=None):
mu, s, condition, state = self.encoder(x, label_set, current_label, labels_so_far, boxes_so_far, state)
z, kl_divergence = self.sample(mu, s)
boxes = self.decoder(z, condition)
return boxes, kl_divergence, z, state
if __name__ == "__main__":
parser = argparse.ArgumentParser('Box VAE')
parser.add_argument("--exp", default="box_vae", help="postfix for experiment name")
parser.add_argument("--log_dir", default="./logs", help="/path/to/logs/dir")
parser.add_argument("--train_json", default="./instances_train.json", help="/path/to/train/json")
parser.add_argument("--val_json", default="./instances_val.json", help="/path/to/val/json")
parser.add_argument("--max_length", type=int, default=128, help="batch size")
parser.add_argument("--seed", type=int, default=42, help="random seed")
parser.add_argument("--epochs", type=int, default=50, help="number of epochs")
parser.add_argument("--batch_size", type=int, default=32, help="batch size")
parser.add_argument("--lr", type=float, default=0.0001, help="learning rate")
parser.add_argument("--beta_1", type=float, default=0.9, help="beta_1 for adam")
parser.add_argument('--evaluate', action='store_true', help="evaluate only")
parser.add_argument('--save_every', type=int, default=10, help="evaluate only")
args = parser.parse_args()
if not args.evaluate:
now = datetime.now().strftime("%m%d%y_%H%M%S")
log_dir = os.path.join(args.log_dir, f"{now}_{args.exp}")
samples_dir = os.path.join(log_dir, "samples")
ckpt_dir = os.path.join(log_dir, "checkpoints")
os.makedirs(samples_dir, exist_ok=True)
os.makedirs(ckpt_dir, exist_ok=True)
else:
log_dir = args.log_dir
samples_dir = os.path.join(log_dir, "samples")
ckpt_dir = os.path.join(log_dir, "checkpoints")
random.seed(args.seed)
torch.manual_seed(args.seed)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(f"using device: {device}")
collator = BatchCollator()
train_dataset = LayoutDataset(args.train_json, args.max_length)
train_loader = torch.utils.data.DataLoader(
train_dataset,
batch_size=args.batch_size,
shuffle=False,
num_workers=0,
collate_fn=collator)
validation_dataset = LayoutDataset(args.val_json, args.max_length)
validation_loader = torch.utils.data.DataLoader(
validation_dataset,
batch_size=args.batch_size,
shuffle=True,
num_workers=0,
collate_fn=collator)
NUMBER_LABELS = train_dataset.number_labels
colors = gen_colors(NUMBER_LABELS)
label_encodings = torch.eye(NUMBER_LABELS).float().to(device)
box_loss = GaussianLogLikelihood().to(device)
autoencoder = AutoregressiveBoxVariationalAutoencoder(
NUMBER_LABELS,
conditioning_size=128,
representation_size=32).to(device)
# evaluate the model
if args.evaluate:
min_epoch = -1
min_loss = 1e100
for epoch in range(args.epochs):
checkpoint_path = os.path.join(log_dir, "checkpoints", 'epoch_%d.pth' % epoch)
if not os.path.exists(checkpoint_path):
continue
print('Evaluating', checkpoint_path)
checkpoint = torch.load(checkpoint_path)
autoencoder.load_state_dict(checkpoint["model_state_dict"], strict=True)
loss = evaluate(autoencoder, validation_loader, box_loss)
print('End of epoch %d : %f' % (epoch, loss))
if loss < min_loss:
min_loss = loss
min_epoch = epoch
print('Best epoch: %d Best nll: %f' % (min_epoch, min_loss))
sys.exit(0)
opt = optim.Adam(autoencoder.parameters(), lr=args.lr, betas=(args.beta_1, 0.999))
epoch_number = 0
while True:
if (epoch_number > 0) and (epoch_number == args.epochs):
print("done!")
break
print(f"starting epoch {epoch_number+1}")
autoencoder.train()
with tqdm(enumerate(train_loader)) as tq:
for batch_i, (indexes, target) in tq:
autoencoder.zero_grad()
box_loss.zero_grad()
label_set = torch.stack([t.label_set for t in target], dim=0).to(device)
counts = torch.stack([t.count for t in target], dim=0).to(device)
boxes = [t.bbox.to(device) for t in target]
labels = [t.label.to(device) for t in target]
number_boxes = np.stack([len(t) for t in target], axis=0)
max_number_boxes = np.max(number_boxes)
batch_size = label_set.size(0)
# previous_boxes = torch.zeros((batch_size, max_number_boxes, 4)).to(device)
box_losses = []
divergence_losses = []
current_box_loss = torch.zeros((batch_size, max_number_boxes)).to(device)
current_divergence_loss = torch.zeros((batch_size, max_number_boxes)).to(device)
for step in range(max_number_boxes):
# determine who has a box.
has_box = number_boxes > step
# determine their history of box/labels.
current_label_set = label_set[has_box, :]
current_counts = counts[has_box, :]
all_boxes = [boxes[i] for i, has in enumerate(has_box) if has]
all_labels = [labels[i] for i, has in enumerate(has_box) if has]
current_label = torch.stack([l[step] for l in all_labels], dim=0).to(device)
current_label = label_encodings[current_label.long() - 1]
current_box = torch.stack([b[step] for b in all_boxes], dim=0).to(device)
# now, consider the history.
if step == 0:
previous_labels = torch.zeros((batch_size, 0, 7)).to(device)
previous_boxes = torch.zeros((batch_size, 0, 4)).to(device)
else:
previous_labels = torch.stack([l[step - 1] for l in all_labels], dim=0).unsqueeze(1)
previous_labels = label_encodings[previous_labels.long() - 1]
# we need to 1-hot these. only take the previous one since
# we'll accumulate state instead.
previous_boxes = torch.stack([b[step - 1] for b in all_boxes], dim=0).unsqueeze(1)
# take a step. x, label_set, current_label, count_so_far):
state = (h[has_box].unsqueeze(0), c[has_box].unsqueeze(0)) if step > 1 else None
predicted_boxes, kl_divergence, z, state = autoencoder(current_box, current_label_set, current_label,
previous_labels, previous_boxes, state=state)
if not (state is None):
h, c = torch.zeros((batch_size, 128)).to(device), torch.zeros((batch_size, 128)).to(device)
h[has_box, :] = state[0][-1]
c[has_box, :] = state[1][-1]
box_loss_step = box_loss(predicted_boxes, current_box)
current_box_loss[has_box, step] = box_loss_step
current_divergence_loss[has_box, step] = kl_divergence
number_boxes = torch.from_numpy(number_boxes).to(device).float()
box_loss_batch = torch.mean(torch.sum(current_box_loss, dim=-1) / number_boxes)
divergence_loss_batch = torch.mean(torch.sum(current_divergence_loss, dim=-1) / number_boxes)
loss_batch = box_loss_batch + 0.0001 * divergence_loss_batch
loss_batch.backward()
opt.step()
tq.set_description(f"{epoch_number+1}/{args.epochs} box_loss: {box_loss_batch.item()}"
f"kl: {divergence_loss_batch.item()}")
# if (epoch_number + 1) % 1 == 0:
# validation_loss, validation_accuracy = evaluate()
# print("validation loss [{0}/{1}: {2:4f}".format(epoch_number, NUMBER_EPOCHS, validation_loss.item()))
# # write out a checkpoint too.
prefix = os.path.join(samples_dir, f"epoch_{epoch_number+1:03d}")
evaluate(autoencoder, validation_loader, box_loss, prefix=prefix, colors=colors)
torch.save({
"epoch": epoch_number,
"model_state_dict": autoencoder.state_dict(),
}, os.path.join(ckpt_dir, "epoch_{0}.pth".format(epoch_number)))
epoch_number += 1
|
<gh_stars>100-1000
/*!
\brief Contains a automated container class for managing Gles buffers and textures for a model.
\file PVRUtils/OpenGLES/ModelGles.h
\author PowerVR by Imagination, Developer Technology Team
\copyright Copyright (c) Imagination Technologies Limited.
*/
//!\cond NO_DOXYGEN
#include "ModelGles.h"
namespace pvr {
namespace utils {
void ModelGles::destroy()
{
model = nullptr;
for (auto& mesh : meshes)
{
if (mesh.vbos.size()) { gl::DeleteBuffers(static_cast<GLsizei>(mesh.vbos.size()), mesh.vbos.data()); }
mesh.vbos.clear();
if (mesh.ibo)
{
gl::DeleteBuffers(1, &mesh.ibo);
mesh.ibo = 0;
}
}
meshes.clear();
if (textures.size())
{
gl::DeleteTextures(static_cast<GLsizei>(textures.size()), textures.data());
textures.clear();
}
}
void ModelGles::init(pvr::IAssetProvider& assetProvider, pvr::assets::Model& inModel, Flags flags)
{
this->model = &inModel;
textures.resize(this->model->getNumTextures());
meshes.resize(this->model->getNumMeshes());
if ((flags & Flags::LoadTextures) == Flags::LoadTextures)
{
for (uint32_t i = 0; i < this->model->getNumTextures(); ++i)
{ textures[i] = pvr::utils::textureUpload(assetProvider, this->model->getTexture(i).getName().c_str(), (flags & Flags::GLES2Only) == Flags::GLES2Only); }
}
if ((flags & Flags::LoadMeshes) == Flags::LoadMeshes)
{
for (uint32_t i = 0; i < this->model->getNumMeshes(); ++i)
{
auto& mesh = this->model->getMesh(i);
pvr::utils::createMultipleBuffersFromMesh(mesh, meshes[i].vbos, meshes[i].ibo);
}
}
}
void ModelGles::init(pvr::IAssetProvider& assetProvider, pvr::assets::ModelHandle& inModel, Flags flags)
{
if (!inModel) { throw pvr::InvalidArgumentError("model", "Model cannot be an empty ModelHandle"); }
modelHandle = inModel;
init(assetProvider, *inModel, flags);
}
} // namespace utils
} // namespace pvr
//!\endcond
|
<gh_stars>1-10
call log('create_index.sql','begin');
-- ###################################
call create_index('term','CUI');
call create_index('term','Term');
call create_index('term','PreferredNameCHV');
call create_index('term','PreferredNameUMLS');
call create_index('term','PreferredByCHV');
call create_index('term','PreferredByUMLS');
call create_index('term','ScoreCombo');
call create_index('term','ScoreComboTop');
call create_index('term','StringID');
call create_index('term','ConceptID');
-- ###################################
call create_index('stop','CUI');
call create_index('stop','STR');
-- ###################################
call create_index('wrong','CUI');
call create_index('wrong','Wrong');
-- ###################################
call log('create_index.sql','done');
|
#!/usr/bin/bash.exe
set -e
function finish {
echo "disk space at end of build:"
df -h
}
trap finish EXIT
echo "disk space at beginning of build:"
df -h
# shellcheck source=ci/setup_cache.sh
. "$(dirname "$0")"/setup_cache.sh
[ -z "${ENVOY_SRCDIR}" ] && export ENVOY_SRCDIR=/c/source
read -ra BAZEL_STARTUP_OPTIONS <<< "${BAZEL_STARTUP_OPTIONS:-}"
# Default to msvc-cl if not overridden
read -ra BAZEL_BUILD_EXTRA_OPTIONS <<< "${BAZEL_BUILD_EXTRA_OPTIONS:---config=msvc-cl}"
read -ra BAZEL_EXTRA_TEST_OPTIONS <<< "${BAZEL_EXTRA_TEST_OPTIONS:-}"
# Set up TMPDIR so bash and non-bash can access
# e.g. TMPDIR=/d/tmp, make a link from /d/d to /d so both bash and Windows programs resolve the
# same path
# This is due to this issue: https://github.com/bazelbuild/rules_foreign_cc/issues/334
# rules_foreign_cc does not currently use bazel output/temp directories by default, it uses mktemp
# which respects the value of the TMPDIR environment variable
drive="$(readlink -f "$TMPDIR" | cut -d '/' -f2)"
if [ ! -e "/$drive/$drive" ]; then
/c/windows/system32/cmd.exe /c "mklink /d $drive:\\$drive $drive:\\"
fi
BUILD_DIR=${BUILD_DIR:-/c/build}
if [[ ! -d "${BUILD_DIR}" ]]
then
echo "${BUILD_DIR} mount missing - did you forget -v <something>:${BUILD_DIR}? Creating."
mkdir -p "${BUILD_DIR}"
fi
# Environment setup.
export TEST_TMPDIR=${BUILD_DIR}/tmp
[[ "${BUILD_REASON}" != "PullRequest" ]] && BAZEL_EXTRA_TEST_OPTIONS+=(--nocache_test_results)
BAZEL_STARTUP_OPTIONS+=("--output_base=${TEST_TMPDIR/\/c/c:}")
BAZEL_BUILD_OPTIONS=(
-c opt
--show_task_finish
--verbose_failures
"--test_output=errors"
"--repository_cache=${BUILD_DIR/\/c/c:}/repository_cache"
"${BAZEL_BUILD_EXTRA_OPTIONS[@]}"
"${BAZEL_EXTRA_TEST_OPTIONS[@]}")
# Also setup some space for building Envoy standalone.
ENVOY_BUILD_DIR="${BUILD_DIR}"/envoy
mkdir -p "${ENVOY_BUILD_DIR}"
# This is where we copy build deliverables to.
ENVOY_DELIVERY_DIR="${ENVOY_BUILD_DIR}"/source/exe
mkdir -p "${ENVOY_DELIVERY_DIR}"
FAIL_GROUP=windows
if [[ "${BAZEL_BUILD_EXTRA_OPTIONS[*]}" =~ "clang-cl" ]]; then
FAIL_GROUP=clang_cl
fi
# Pre-Validate updates of all dependency libraries in bazel/foreign_cc and bazel/external
# and complete envoy-static build
bazel "${BAZEL_STARTUP_OPTIONS[@]}" build "${BAZEL_BUILD_OPTIONS[@]}" //bazel/... //source/exe:envoy-static --build_tag_filters=-skip_on_windows
# Copy binary to delivery directory
cp -f bazel-bin/source/exe/envoy-static.exe "${ENVOY_DELIVERY_DIR}/envoy.exe"
# Copy for azp, creating a tar archive
tar czf "${ENVOY_BUILD_DIR}"/envoy_binary.tar.gz -C "${ENVOY_DELIVERY_DIR}" envoy.exe
# Test invocations of known-working tests on Windows
bazel "${BAZEL_STARTUP_OPTIONS[@]}" test "${BAZEL_BUILD_OPTIONS[@]}" //test/... --test_tag_filters=-skip_on_windows,-fails_on_${FAIL_GROUP} --build_tests_only
echo "running flaky test reporting script"
"${ENVOY_SRCDIR}"/ci/flaky_test/run_process_xml.sh "$CI_TARGET"
# Build tests that are known flaky or failing to ensure no compilation regressions
bazel "${BAZEL_STARTUP_OPTIONS[@]}" build "${BAZEL_BUILD_OPTIONS[@]}" //test/... --test_tag_filters=-skip_on_windows,fails_on_${FAIL_GROUP} --build_tests_only
# Summarize known unbuildable or inapplicable tests (example)
# bazel "${BAZEL_STARTUP_OPTIONS[@]}" query 'kind(".*test rule", attr("tags", "skip_on_windows", //test/...))' 2>/dev/null | sort
|
import { memoFetch, setExpirationTime, setMaxMemo } from '../dist/index.mjs';
(async () => {
await setMaxMemo(10);
await setExpirationTime(5000);
const { data } = await memoFetch(
'https://naveropenapi.apigw.ntruss.com/map-geocode/v2/geocode?query=' +
encodeURIComponent('양천구'),
{
filter: ({ addresses }) => {
return {
x: addresses[0].x,
y: addresses[0].y,
};
},
headers: {
'X-NCP-APIGW-API-KEY-ID': 'sample',
'X-NCP-APIGW-API-KEY': 'sample',
},
},
);
console.log(data); /d/ { x: '126.8666435', y: '37.5170100' }
})();
|
<gh_stars>1-10
package io.github.vampirestudios.obsidian.api.obsidian.world;
import net.minecraft.util.Identifier;
public class SoundInformation {
public Identifier additionsSound;
public Identifier loopSound;
public Identifier moodSound;
}
|
#!/bin/bash
set -eu
export PYTHONPATH=`dirname $0`/..
basedir=$1
for file in $( find ${basedir} -mindepth 1 -name "*.json" ); do
python src/train_test_split_data.py \
--input_file=${file}
done
|
<reponame>richardmarston/cim4j
package cim4j;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import cim4j.BaseClass;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
import cim4j.UnitSymbol;
import cim4j.UnitMultiplier;
import cim4j.Float;
/*
Capacitance per unit of length.
*/
public class CapacitancePerLength extends BaseClass
{
private BaseClass[] CapacitancePerLength_class_attributes;
private BaseClass[] CapacitancePerLength_primitive_attributes;
private java.lang.String rdfid;
public void setRdfid(java.lang.String id) {
rdfid = id;
}
private abstract interface PrimitiveBuilder {
public abstract BaseClass construct(java.lang.String value);
};
private enum CapacitancePerLength_primitive_builder implements PrimitiveBuilder {
unit(){
public BaseClass construct (java.lang.String value) {
return new UnitSymbol(value);
}
},
multiplier(){
public BaseClass construct (java.lang.String value) {
return new UnitMultiplier(value);
}
},
denominatorUnit(){
public BaseClass construct (java.lang.String value) {
return new UnitSymbol(value);
}
},
denominatorMultiplier(){
public BaseClass construct (java.lang.String value) {
return new UnitMultiplier(value);
}
},
LAST_ENUM() {
public BaseClass construct (java.lang.String value) {
return new cim4j.Integer("0");
}
};
}
private enum CapacitancePerLength_class_attributes_enum {
value,
unit,
multiplier,
denominatorUnit,
denominatorMultiplier,
LAST_ENUM;
}
public CapacitancePerLength() {
CapacitancePerLength_primitive_attributes = new BaseClass[CapacitancePerLength_primitive_builder.values().length];
CapacitancePerLength_class_attributes = new BaseClass[CapacitancePerLength_class_attributes_enum.values().length];
}
public void updateAttributeInArray(CapacitancePerLength_class_attributes_enum attrEnum, BaseClass value) {
try {
CapacitancePerLength_class_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void updateAttributeInArray(CapacitancePerLength_primitive_builder attrEnum, BaseClass value) {
try {
CapacitancePerLength_primitive_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void setAttribute(java.lang.String attrName, BaseClass value) {
try {
CapacitancePerLength_class_attributes_enum attrEnum = CapacitancePerLength_class_attributes_enum.valueOf(attrName);
updateAttributeInArray(attrEnum, value);
System.out.println("Updated CapacitancePerLength, setting " + attrName);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
/* If the attribute is a String, it is a primitive and we will make it into a BaseClass */
public void setAttribute(java.lang.String attrName, java.lang.String value) {
try {
CapacitancePerLength_primitive_builder attrEnum = CapacitancePerLength_primitive_builder.valueOf(attrName);
updateAttributeInArray(attrEnum, attrEnum.construct(value));
System.out.println("Updated CapacitancePerLength, setting " + attrName + " to: " + value);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
public java.lang.String toString(boolean topClass) {
java.lang.String result = "";
java.lang.String indent = "";
if (topClass) {
for (CapacitancePerLength_primitive_builder attrEnum: CapacitancePerLength_primitive_builder.values()) {
BaseClass bc = CapacitancePerLength_primitive_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " CapacitancePerLength." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
for (CapacitancePerLength_class_attributes_enum attrEnum: CapacitancePerLength_class_attributes_enum.values()) {
BaseClass bc = CapacitancePerLength_class_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " CapacitancePerLength." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
result += super.toString(true);
}
else {
result += "(CapacitancePerLength) RDFID: " + rdfid;
}
return result;
}
public final java.lang.String debugName = "CapacitancePerLength";
public java.lang.String debugString()
{
return debugName;
}
public void setValue(java.lang.String s) {
System.out.println(debugString() + " is not sure what to do with " + s);
}
public BaseClass construct() {
return new CapacitancePerLength();
}
};
|
#!/bin/bash
#
# Copyright 2018 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script will orchestrate a sample end-to-end execution of the Hyperledger
# Fabric network.
#
# The end-to-end verification provisions a sample Fabric network consisting of
# two organizations, each maintaining two peers, and a “solo” ordering service.
#
# This verification makes use of two fundamental tools, which are necessary to
# create a functioning transactional network with digital signature validation
# and access control:
#
# * cryptogen - generates the x509 certificates used to identify and
# authenticate the various components in the network.
# * configtxgen - generates the requisite configuration artifacts for orderer
# bootstrap and channel creation.
#
# Each tool consumes a configuration yaml file, within which we specify the topology
# of our network (cryptogen) and the location of our certificates for various
# configuration operations (configtxgen). Once the tools have been successfully run,
# we are able to launch our network. More detail on the tools and the structure of
# the network will be provided later in this document. For now, let's get going...
# prepending $PWD/../bin to PATH to ensure we are picking up the correct binaries
# this may be commented out to resolve installed version of tools if desired
export PATH=${PWD}/../bin:${PWD}:$PATH
export FABRIC_CFG_PATH=${PWD}
# By default we standup a full network.
DEV_MODE=false
# Print the usage message
function printHelp () {
echo "Usage: "
echo " trade.sh up|down|restart|generate|reset|clean|upgrade|createneworg|startneworg|stopneworg [-c <channel name>] [-f <docker-compose-file>] [-i <imagetag>] [-o <logfile>] [-dev]"
echo " trade.sh -h|--help (print this message)"
echo " <mode> - one of 'up', 'down', 'restart' or 'generate'"
echo " - 'up' - bring up the network with docker-compose up"
echo " - 'down' - clear the network with docker-compose down"
echo " - 'restart' - restart the network"
echo " - 'generate' - generate required certificates and genesis block"
echo " - 'reset' - delete chaincode containers while keeping network artifacts"
echo " - 'clean' - delete network artifacts"
echo " - 'upgrade' - upgrade the network from v1.0.x to v1.1"
echo " -c <channel name> - channel name to use (defaults to \"tradechannel\")"
echo " -f <docker-compose-file> - specify which docker-compose file use (defaults to docker-compose-e2e.yaml)"
echo " -i <imagetag> - the tag to be used to launch the network (defaults to \"latest\")"
echo " -d - Apply command to the network in dev mode."
echo
echo "Typically, one would first generate the required certificates and "
echo "genesis block, then bring up the network. e.g.:"
echo
echo " trade.sh generate -c tradechannel"
echo " trade.sh up -c tradechannel -o logs/network.log"
echo " trade.sh up -c tradechannel -i 1.1.0-alpha"
echo " trade.sh down -c tradechannel"
echo " trade.sh upgrade -c tradechannel"
echo
echo "Taking all defaults:"
echo " trade.sh generate"
echo " trade.sh up"
echo " trade.sh down"
}
# Keeps pushd silent
pushd () {
command pushd "$@" > /dev/null
}
# Keeps popd silent
popd () {
command popd "$@" > /dev/null
}
# Ask user for confirmation to proceed
function askProceed () {
read -p "Continue? [Y/n] " ans
case "$ans" in
y|Y|"" )
echo "proceeding ..."
;;
n|N )
echo "exiting..."
exit 1
;;
* )
echo "invalid response"
askProceed
;;
esac
}
# Obtain CONTAINER_IDS and remove them
# TODO Might want to make this optional - could clear other containers
function clearContainers () {
CONTAINER_IDS=$(docker ps -aq)
if [ -z "$CONTAINER_IDS" -o "$CONTAINER_IDS" == " " ]; then
echo "---- No containers available for deletion ----"
else
docker rm -f $CONTAINER_IDS
fi
}
# Delete any images that were generated as a part of this setup
# specifically the following images are often left behind:
# TODO list generated image naming patterns
function removeUnwantedImages() {
DOCKER_IMAGE_IDS=$(docker images | grep "dev\|none\|test-vp\|peer[0-9]-" | awk '{print $3}')
if [ -z "$DOCKER_IMAGE_IDS" -o "$DOCKER_IMAGE_IDS" == " " ]; then
echo "---- No images available for deletion ----"
else
docker rmi -f $DOCKER_IMAGE_IDS
fi
}
# Do some basic sanity checking to make sure that the appropriate versions of fabric
# binaries/images are available. In the future, additional checking for the presence
# of go or other items could be added.
function checkPrereqs() {
# Note, we check configtxlator externally because it does not require a config file, and peer in the
# docker image because of FAB-8551 that makes configtxlator return 'development version' in docker
LOCAL_VERSION=$(configtxlator version | sed -ne 's/ Version: //p')
DOCKER_IMAGE_VERSION=$(docker run --rm hyperledger/fabric-tools:$IMAGETAG peer version | sed -ne 's/ Version: //p'|head -1)
echo "LOCAL_VERSION=$LOCAL_VERSION"
echo "DOCKER_IMAGE_VERSION=$DOCKER_IMAGE_VERSION"
if [ "$LOCAL_VERSION" != "$DOCKER_IMAGE_VERSION" ] ; then
echo "=================== WARNING ==================="
echo " Local fabric binaries and docker images are "
echo " out of sync. This may cause problems. "
echo "==============================================="
fi
}
# Generate the needed certificates, the genesis block and start the network.
function networkUp () {
checkPrereqs
# If we are in dev mode, we move to the devmode directory
if [ "$DEV_MODE" = true ] ; then
pushd ./devmode
export FABRIC_CFG_PATH=${PWD}
fi
# generate artifacts if they don't exist
if [ ! -d "crypto-config" ]; then
generateCerts
replacePrivateKey
generateChannelArtifacts
fi
# Create folder for docker network logs
LOG_DIR=$(dirname $LOG_FILE)
if [ ! -d $LOG_DIR ]
then
mkdir -p $LOG_DIR
fi
IMAGE_TAG=$IMAGETAG docker-compose -f $COMPOSE_FILE up >$LOG_FILE 2>&1 &
if [ "$DEV_MODE" = true ] ; then
popd
export FABRIC_CFG_PATH=${PWD}
fi
if [ $? -ne 0 ]; then
echo "ERROR !!!! Unable to start network"
exit 1
fi
}
# Generate the needed certificates, the configuration, and start the network components for the new org.
function newOrgNetworkUp () {
checkPrereqs
# generate artifacts if they don't exist
if [ ! -d "crypto-config/peerOrganizations/exportingentityorg.trade.com" ]; then
generateCertsForNewOrg
replacePrivateKeyForNewOrg
generateChannelConfigForNewOrg
fi
# Create folder for docker network logs
LOG_DIR=$(dirname $LOG_FILE_NEW_ORG)
if [ ! -d $LOG_DIR ]
then
mkdir -p $LOG_DIR
fi
IMAGE_TAG=$IMAGETAG docker-compose -f $COMPOSE_FILE_NEW_ORG up >$LOG_FILE_NEW_ORG 2>&1 &
if [ $? -ne 0 ]; then
echo "ERROR !!!! Unable to start network"
exit 1
fi
}
# Upgrade the network from one version to another
# If the new image tag (now in the IMAGETAG variable) is not passed in the command line using the "-i" switch:
# this assumes that the new iamge has already been tagged with "latest".
# Stop the orderer and peers, backup the ledger from orderer and peers, cleanup chaincode containers and images
# and relaunch the orderer and peers with latest tag
function upgradeNetwork () {
docker inspect -f '{{.Config.Volumes}}' orderer.trade.com |grep -q '/var/hyperledger/production/orderer'
if [ $? -ne 0 ]; then
echo "ERROR !!!! This network does not appear to be using volumes for its ledgers, did you start from fabric-samples >= v1.0.6?"
exit 1
fi
LEDGERS_BACKUP=./ledgers-backup
# create ledger-backup directory
mkdir -p $LEDGERS_BACKUP
export IMAGE_TAG=$IMAGETAG
COMPOSE_FILES="-f $COMPOSE_FILE"
echo "Upgrading orderer"
docker-compose $COMPOSE_FILES stop orderer.trade.com
docker cp -a orderer.trade.com:/var/hyperledger/production/orderer $LEDGERS_BACKUP/orderer.trade.com
docker-compose $COMPOSE_FILES up --no-deps orderer.trade.com
for PEER in peer0.exporterorg.trade.com peer0.importerorg.trade.com peer0.carrierorg.trade.com peer0.regulatororg.trade.com; do
echo "Upgrading peer $PEER"
# Stop the peer and backup its ledger
docker-compose $COMPOSE_FILES stop $PEER
docker cp -a $PEER:/var/hyperledger/production $LEDGERS_BACKUP/$PEER/
# Remove any old containers and images for this peer
CC_CONTAINERS=$(docker ps | grep dev-$PEER | awk '{print $1}')
if [ -n "$CC_CONTAINERS" ] ; then
docker rm -f $CC_CONTAINERS
fi
CC_IMAGES=$(docker images | grep dev-$PEER | awk '{print $1}')
if [ -n "$CC_IMAGES" ] ; then
docker rmi -f $CC_IMAGES
fi
# Start the peer again
docker-compose $COMPOSE_FILES up --no-deps $PEER
done
}
# Bring down running network
function networkDown () {
# If we are in dev mode, we move to the devmode directory
if [ "$DEV_MODE" = true ] ; then
pushd ./devmode
fi
docker-compose -f $COMPOSE_FILE down --volumes
for PEER in peer0.exporterorg.trade.com peer0.importerorg.trade.com peer0.carrierorg.trade.com peer0.regulatororg.trade.com; do
# Remove any old containers and images for this peer
CC_CONTAINERS=$(docker ps -a | grep dev-$PEER | awk '{print $1}')
if [ -n "$CC_CONTAINERS" ] ; then
docker rm -f $CC_CONTAINERS
fi
done
if [ "$DEV_MODE" = true ] ; then
popd
fi
}
# Bring down running network components of the new org
function newOrgNetworkDown () {
docker-compose -f $COMPOSE_FILE_NEW_ORG down --volumes
for PEER in peer0.exportingentityorg.trade.com; do
# Remove any old containers and images for this peer
CC_CONTAINERS=$(docker ps -a | grep dev-$PEER | awk '{print $1}')
if [ -n "$CC_CONTAINERS" ] ; then
docker rm -f $CC_CONTAINERS
fi
done
}
# Delete network artifacts
function networkClean () {
#Cleanup the chaincode containers
clearContainers
#Cleanup images
removeUnwantedImages
# If we are in dev mode, we move to the devmode directory
if [ "$DEV_MODE" = true ] ; then
pushd ./devmode
fi
# remove orderer block and other channel configuration transactions and certs
rm -rf channel-artifacts crypto-config add_org/crypto-config
# remove the docker-compose yaml file that was customized to the example
rm -f docker-compose-e2e.yaml add_org/docker-compose-exportingEntityOrg.yaml
# remove client certs
rm -rf client-certs
if [ "$DEV_MODE" = true ] ; then
popd
fi
}
# Using docker-compose-e2e-template.yaml, replace constants with private key file names
# generated by the cryptogen tool and output a docker-compose.yaml specific to this
# configuration
function replacePrivateKey () {
# Copy the template to the file that will be modified to add the private key
cp docker-compose-e2e-template.yaml docker-compose-e2e.yaml
if [ "$DEV_MODE" = true ] ; then
CURRENT_DIR=$PWD
cd crypto-config/peerOrganizations/devorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
if [ $(uname -s) == 'Darwin' ] ; then
sed -i '' "s/DEVORG_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
else
sed -i "s/DEVORG_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
fi
else
# The next steps will replace the template's contents with the
# actual values of the private key file names for the two CAs.
if [ $(uname -s) == 'Darwin' ] ; then
CURRENT_DIR=$PWD
cd crypto-config/peerOrganizations/exporterorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i '' "s/EXPORTER_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
cd crypto-config/peerOrganizations/importerorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i '' "s/IMPORTER_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
cd crypto-config/peerOrganizations/carrierorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i '' "s/CARRIER_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
cd crypto-config/peerOrganizations/regulatororg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i '' "s/REGULATOR_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
else
CURRENT_DIR=$PWD
cd crypto-config/peerOrganizations/exporterorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i "s/EXPORTER_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
cd crypto-config/peerOrganizations/importerorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i "s/IMPORTER_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
cd crypto-config/peerOrganizations/carrierorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i "s/CARRIER_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
cd crypto-config/peerOrganizations/regulatororg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
sed -i "s/REGULATOR_CA_PRIVATE_KEY/${PRIV_KEY}/g" docker-compose-e2e.yaml
fi
fi
}
function replacePrivateKeyForNewOrg () {
# Copy the template to the file that will be modified to add the private key
cp add_org/docker-compose-exportingEntityOrg-template.yaml add_org/docker-compose-exportingEntityOrg.yaml
# The next steps will replace the template's contents with the
# actual values of the private key file names for the two CAs.
CURRENT_DIR=$PWD
cd crypto-config/peerOrganizations/exportingentityorg.trade.com/ca/
PRIV_KEY=$(ls *_sk)
cd "$CURRENT_DIR"
if [ $(uname -s) == 'Darwin' ] ; then
sed -i '' "s/EXPORTINGENTITY_CA_PRIVATE_KEY/${PRIV_KEY}/g" add_org/docker-compose-exportingEntityOrg.yaml
else
sed -i "s/EXPORTINGENTITY_CA_PRIVATE_KEY/${PRIV_KEY}/g" add_org/docker-compose-exportingEntityOrg.yaml
fi
}
# We will use the cryptogen tool to generate the cryptographic material (x509 certs)
# for our various network entities. The certificates are based on a standard PKI
# implementation where validation is achieved by reaching a common trust anchor.
#
# Cryptogen consumes a file - ``crypto-config.yaml`` - that contains the network
# topology and allows us to generate a library of certificates for both the
# Organizations and the components that belong to those Organizations. Each
# Organization is provisioned a unique root certificate (``ca-cert``), that binds
# specific components (peers and orderers) to that Org. Transactions and communications
# within Fabric are signed by an entity's private key (``keystore``), and then verified
# by means of a public key (``signcerts``). You will notice a "count" variable within
# this file. We use this to specify the number of peers per Organization; in our
# case it's two peers per Org. The rest of this template is extremely
# self-explanatory.
#
# After we run the tool, the certs will be parked in a folder titled ``crypto-config``.
# Generates Org certs using cryptogen tool
function generateCerts (){
which cryptogen
if [ "$?" -ne 0 ]; then
echo "cryptogen tool not found. exiting"
exit 1
fi
echo
echo "##########################################################"
echo "##### Generate certificates using cryptogen tool #########"
echo "##########################################################"
# If we are in dev mode, we move to the devmode directory
if [ "$DEV_MODE" = true ] ; then
if [ $(basename $PWD) != "devmode" ] ; then
pushd ./devmode
export FABRIC_CFG_PATH=${PWD}
fi
fi
if [ -d "crypto-config" ]; then
rm -Rf crypto-config
fi
set -x
cryptogen generate --config=./crypto-config.yaml
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate certificates..."
exit 1
fi
echo
}
function generateCertsForNewOrg (){
which cryptogen
if [ "$?" -ne 0 ]; then
echo "cryptogen tool not found. exiting"
exit 1
fi
echo
echo "######################################################################"
echo "##### Generate certificates for new org using cryptogen tool #########"
echo "######################################################################"
if [ -d "crypto-config/peerOrganizations/exportingentityorg.trade.com" ]; then
rm -Rf crypto-config/peerOrganizations/exportingentityorg.trade.com
fi
set -x
cryptogen generate --config=./add_org/crypto-config.yaml
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate certificates..."
exit 1
fi
echo
}
# The `configtxgen tool is used to create four artifacts: orderer **bootstrap
# block**, fabric **channel configuration transaction**, and two **anchor
# peer transactions** - one for each Peer Org.
#
# The orderer block is the genesis block for the ordering service, and the
# channel transaction file is broadcast to the orderer at channel creation
# time. The anchor peer transactions, as the name might suggest, specify each
# Org's anchor peer on this channel.
#
# Configtxgen consumes a file - ``configtx.yaml`` - that contains the definitions
# for the sample network. There are five members - one Orderer Org (``TradeOrdererOrg``)
# and four Peer Orgs (``ExporterOrg``, ``ImporterOrg``, ``CarrierOrg`` & ``RegulatorOrg``)
# each managing and maintaining one peer node.
# This file also specifies a consortium - ``TradeConsortium`` - consisting of our
# four Peer Orgs. Pay specific attention to the "Profiles" section at the top of
# this file. You will notice that we have two unique headers. One for the orderer genesis
# block - ``FourOrgsTradeOrdererGenesis`` - and one for our channel - ``FourOrgsTradeChannel``.
# These headers are important, as we will pass them in as arguments when we create
# our artifacts. This file also contains two additional specifications that are worth
# noting. Firstly, we specify the anchor peers for each Peer Org
# (``peer0.exporterorg.trade.com`` & ``peer0.importerorg.trade.com``). Secondly, we point to
# the location of the MSP directory for each member, in turn allowing us to store the
# root certificates for each Org in the orderer genesis block. This is a critical
# concept. Now any network entity communicating with the ordering service can have
# its digital signature verified.
#
# This function will generate the crypto material and our four configuration
# artifacts, and subsequently output these files into the ``channel-artifacts``
# folder.
#
# If you receive the following warning, it can be safely ignored:
#
# [bccsp] GetDefault -> WARN 001 Before using BCCSP, please call InitFactories(). Falling back to bootBCCSP.
#
# You can ignore the logs regarding intermediate certs, we are not using them in
# this crypto implementation.
# Generate orderer genesis block, channel configuration transaction and
# anchor peer update transactions
function generateChannelArtifacts() {
which configtxgen
if [ "$?" -ne 0 ]; then
echo "configtxgen tool not found. exiting"
exit 1
fi
mkdir -p channel-artifacts
echo "###########################################################"
echo "######### Generating Orderer Genesis block ##############"
echo "###########################################################"
if [ "$DEV_MODE" = true ] ; then
PROFILE=OneOrgTradeOrdererGenesis
CHANNEL_PROFILE=OneOrgTradeChannel
else
PROFILE=FourOrgsTradeOrdererGenesis
CHANNEL_PROFILE=FourOrgsTradeChannel
fi
# Note: For some unknown reason (at least for now) the block file can't be
# named orderer.genesis.block or the orderer will fail to launch!
set -x
configtxgen -profile $PROFILE -outputBlock ./channel-artifacts/genesis.block
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate orderer genesis block..."
exit 1
fi
echo
echo "###################################################################"
echo "### Generating channel configuration transaction 'channel.tx' ###"
echo "###################################################################"
set -x
configtxgen -profile $CHANNEL_PROFILE -outputCreateChannelTx ./channel-artifacts/channel.tx -channelID $CHANNEL_NAME
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate channel configuration transaction..."
exit 1
fi
if [ "$DEV_MODE" = false ] ; then
echo
echo "#####################################################################"
echo "####### Generating anchor peer update for ExporterOrgMSP ##########"
echo "#####################################################################"
set -x
configtxgen -profile $CHANNEL_PROFILE -outputAnchorPeersUpdate ./channel-artifacts/ExporterOrgMSPanchors.tx -channelID $CHANNEL_NAME -asOrg ExporterOrgMSP
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate anchor peer update for ExporterOrgMSP..."
exit 1
fi
echo
echo "#####################################################################"
echo "####### Generating anchor peer update for ImporterOrgMSP ##########"
echo "#####################################################################"
set -x
configtxgen -profile $CHANNEL_PROFILE -outputAnchorPeersUpdate \
./channel-artifacts/ImporterOrgMSPanchors.tx -channelID $CHANNEL_NAME -asOrg ImporterOrgMSP -channelID $CHANNEL_NAME
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate anchor peer update for ImporterOrgMSP..."
exit 1
fi
echo
echo "####################################################################"
echo "####### Generating anchor peer update for CarrierOrgMSP ##########"
echo "####################################################################"
set -x
configtxgen -profile $CHANNEL_PROFILE -outputAnchorPeersUpdate \
./channel-artifacts/CarrierOrgMSPanchors.tx -channelID $CHANNEL_NAME -asOrg CarrierOrgMSP -channelID $CHANNEL_NAME
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate anchor peer update for CarrierOrgMSP..."
exit 1
fi
echo
echo "######################################################################"
echo "####### Generating anchor peer update for RegulatorOrgMSP ##########"
echo "######################################################################"
set -x
configtxgen -profile $CHANNEL_PROFILE -outputAnchorPeersUpdate \
./channel-artifacts/RegulatorOrgMSPanchors.tx -channelID $CHANNEL_NAME -asOrg RegulatorOrgMSP -channelID $CHANNEL_NAME
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate anchor peer update for RegulatorOrgMSP..."
exit 1
fi
echo
fi
}
# Generate configuration (policies, certificates) for new org in JSON format
function generateChannelConfigForNewOrg() {
which configtxgen
if [ "$?" -ne 0 ]; then
echo "configtxgen tool not found. exiting"
exit 1
fi
mkdir -p channel-artifacts
echo "####################################################################################"
echo "######### Generating Channel Configuration for Exporting Entity Org ##############"
echo "####################################################################################"
set -x
FABRIC_CFG_PATH=${PWD}/add_org/ && configtxgen -printOrg ExportingEntityOrgMSP -channelID $CHANNEL_NAME > ./channel-artifacts/exportingEntityOrg.json
res=$?
set +x
if [ $res -ne 0 ]; then
echo "Failed to generate channel configuration for exportingentity org..."
exit 1
fi
echo
}
# channel name (overrides default 'testchainid')
CHANNEL_NAME="tradechannel"
# use this as the default docker-compose yaml definition
COMPOSE_FILE=docker-compose-e2e.yaml
COMPOSE_FILE_NEW_ORG=add_org/docker-compose-exportingEntityOrg.yaml
# default image tag
IMAGETAG="latest"
# default log file
LOG_FILE="logs/network.log"
LOG_FILE_NEW_ORG="logs/network-neworg.log"
# Parse commandline args
MODE=$1;shift
# Determine whether starting, stopping, restarting or generating for announce
if [ "$MODE" == "up" ]; then
EXPMODE="Starting"
elif [ "$MODE" == "down" ]; then
EXPMODE="Stopping"
elif [ "$MODE" == "restart" ]; then
EXPMODE="Restarting"
elif [ "$MODE" == "clean" ]; then
EXPMODE="Cleaning"
elif [ "$MODE" == "generate" ]; then
EXPMODE="Generating certs and genesis block"
elif [ "$MODE" == "upgrade" ]; then
EXPMODE="Upgrading the network"
elif [ "$MODE" == "createneworg" ]; then
EXPMODE="Generating certs and configuration for new org"
elif [ "$MODE" == "startneworg" ]; then
EXPMODE="Starting peer and CA for new org"
elif [ "$MODE" == "stopneworg" ]; then
EXPMODE="Stopping peer and CA for new org"
else
printHelp
exit 1
fi
while getopts "h?m:c:f:i:o:d:" opt; do
case "$opt" in
h|\?)
printHelp
exit 0
;;
c) CHANNEL_NAME=$OPTARG
;;
f) COMPOSE_FILE=$OPTARG
;;
i) IMAGETAG=`uname -m`"-"$OPTARG
;;
o) LOG_FILE=$OPTARG
;;
d) DEV_MODE=$OPTARG
;;
esac
done
# Announce what was requested
echo "${EXPMODE} with channel '${CHANNEL_NAME}'"
# ask for confirmation to proceed
askProceed
#Create the network using docker compose
if [ "${MODE}" == "up" ]; then
networkUp
elif [ "${MODE}" == "down" ]; then ## Clear the network
networkDown
elif [ "${MODE}" == "generate" ]; then ## Generate Artifacts
generateCerts
replacePrivateKey
generateChannelArtifacts
elif [ "${MODE}" == "restart" ]; then ## Restart the network
networkDown
networkUp
elif [ "${MODE}" == "reset" ]; then ## Delete chaincode containers while keeping network artifacts
removeUnwantedImages
elif [ "${MODE}" == "clean" ]; then ## Delete network artifacts
networkClean
elif [ "${MODE}" == "upgrade" ]; then ## Upgrade the network from v1.0.x to v1.1
upgradeNetwork
elif [ "${MODE}" == "createneworg" ]; then ## Generate artifacts for new org
generateCertsForNewOrg
replacePrivateKeyForNewOrg
generateChannelConfigForNewOrg
elif [ "${MODE}" == "startneworg" ]; then ## Start the network components for the new org
newOrgNetworkUp
elif [ "${MODE}" == "stopneworg" ]; then ## Start the network components for the new org
newOrgNetworkDown
else
printHelp
exit 1
fi
|
#!/usr/bin/env bash
# disable transparent hugepage
if (! grep -q 'disable transparent hugepage' /etc/rc.local); then
echo 'never' >/sys/kernel/mm/transparent_hugepage/enabled
echo 'never' >/sys/kernel/mm/transparent_hugepage/defrag
cat >>/etc/rc.local <<-EOF
# disable transparent hugepage
echo 'never' > /sys/kernel/mm/transparent_hugepage/enabled
echo 'never' > /sys/kernel/mm/transparent_hugepage/defrag
EOF
chmod +x /etc/rc.d/rc.local
fi
exit 0
|
const conection = require('../helper/conection');
const AppError = require('../errors/AppErrors');
const CreateUser = require('../service/CreateUserService');
const AuthUser = require('../service/AuthUserService');
module.exports = {
async auth(request,response){
const { email,senha } = request.body;
const authUserService = new AuthUser();
const auth = await authUserService.execute({email,senha});
return response.json(auth);
},
async createUser(request,response){
const { email,senha,nome,idade,sexo } = request.body;
const createUserService = new CreateUser();
const user = await createUserService.execute({ email,senha,nome,idade,sexo });
return response.json(user);
}
} |
package styled
import (
"strings"
)
// Transform transforms a Text according to a transformer. It does nothing if
// the transformer is not valid.
func Transform(t Text, transformer string) Text {
f := FindTransformer(transformer)
if f == nil {
return t
}
t = t.Clone()
for _, seg := range t {
f(seg)
}
return t
}
// FindTransformer finds the named transformer, a function that mutates a
// *Segment. If the name is not a valid transformer, it returns nil.
func FindTransformer(name string) func(*Segment) {
switch {
// Catch special colors early
case name == "default":
return func(s *Segment) { s.Foreground = "" }
case name == "bg-default":
return func(s *Segment) { s.Background = "" }
case strings.HasPrefix(name, "bg-"):
if color := name[len("bg-"):]; isValidColorName(color) {
return func(s *Segment) { s.Background = color }
}
case strings.HasPrefix(name, "no-"):
if f := boolFieldAccessor(name[len("no-"):]); f != nil {
return func(s *Segment) { *f(s) = false }
}
case strings.HasPrefix(name, "toggle-"):
if f := boolFieldAccessor(name[len("toggle-"):]); f != nil {
return func(s *Segment) {
p := f(s)
*p = !*p
}
}
default:
if isValidColorName(name) {
return func(s *Segment) { s.Foreground = name }
}
if f := boolFieldAccessor(name); f != nil {
return func(s *Segment) { *f(s) = true }
}
}
return nil
}
func boolFieldAccessor(name string) func(*Segment) *bool {
switch name {
case "bold":
return func(s *Segment) *bool { return &s.Bold }
case "dim":
return func(s *Segment) *bool { return &s.Dim }
case "italic":
return func(s *Segment) *bool { return &s.Italic }
case "underlined":
return func(s *Segment) *bool { return &s.Underlined }
case "blink":
return func(s *Segment) *bool { return &s.Blink }
case "inverse":
return func(s *Segment) *bool { return &s.Inverse }
default:
return nil
}
}
|
#!/usr/bin/env bash
mkdir -p generated
cat ci/exe-from-jar.sh reactive-blerpc/build/libs/reactive-blerpc-jdk8.jar > generated/reactive-blerpc
chmod +x generated/reactive-blerpc
|
<gh_stars>0
import { resolve, relative } from 'path';
import slug from 'slugify';
import { removeExtension } from './path';
export const getIconId = (filepath: string, root: string) => {
let iconId = removeExtension(relative(resolve(root), resolve(filepath))).replace(/(\/|\\|\.)+/g, '-');
let matches = iconId.match(/^\d{3}\-/);
if(matches) {
iconId = iconId.substr(4);
}
return slug(iconId, { replacement: '-' });
}
|
import hashlib
def get_long_token(short_token):
# Generate a long-lived access token based on the short-lived token
# Example algorithm: Hash the short token using SHA-256
long_token = hashlib.sha256(short_token.encode('utf-8')).hexdigest()
return long_token |
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.
package frc.robot;
import edu.wpi.first.math.controller.PIDController;
import edu.wpi.first.math.controller.RamseteController;
import edu.wpi.first.math.controller.SimpleMotorFeedforward;
import edu.wpi.first.math.geometry.Pose2d;
import edu.wpi.first.math.geometry.Rotation2d;
import edu.wpi.first.math.geometry.Translation2d;
import edu.wpi.first.math.trajectory.Trajectory;
import edu.wpi.first.math.trajectory.TrajectoryConfig;
import edu.wpi.first.math.trajectory.constraint.DifferentialDriveVoltageConstraint;
import edu.wpi.first.wpilibj.GenericHID;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.XboxController;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import frc.robot.commands.ExampleCommand;
import frc.robot.commands.FiveBallAuton;
import frc.robot.subsystems.Drivetrain;
import frc.robot.subsystems.ExampleSubsystem;
import frc.util.TrajectoryCache;
import edu.wpi.first.wpilibj2.command.Command;
import edu.wpi.first.wpilibj2.command.RamseteCommand;
import edu.wpi.first.wpilibj2.command.RunCommand;
import static frc.robot.Constants.*;
import java.nio.file.Paths;
/**
* This class is where the bulk of the robot should be declared. Since Command-based is a
* "declarative" paradigm, very little robot logic should actually be handled in the {@link Robot}
* periodic methods (other than the scheduler calls). Instead, the structure of the robot (including
* subsystems, commands, and button mappings) should be declared here.
*/
public class RobotContainer {
// The robot's subsystems and commands are defined here...
private final ExampleSubsystem m_exampleSubsystem = new ExampleSubsystem();
public final static Drivetrain drivetrain = Drivetrain.getInstance();
private final ExampleCommand m_autoCommand = new ExampleCommand(m_exampleSubsystem);
public final Joystick driverJoystick = new Joystick(0);
public final XboxController driverXbox = new XboxController(2);
public SendableChooser<String> pathSelector;
/** The container for the robot. Contains subsystems, OI devices, and commands. */
public RobotContainer() {
// Configure the button bindings
configureButtonBindings();
pathSelector = new SendableChooser<>();
SmartDashboard.putData(pathSelector);
loadTrajectoryPaths();
drivetrain.setDefaultCommand(new RunCommand(
() -> drivetrain.arcadeDrive(-driverXbox.getRawAxis(1), driverXbox.getRawAxis(4), true)
, drivetrain));
}
/**
* Use this method to define your button->command mappings. Buttons can be created by
* instantiating a {@link GenericHID} or one of its subclasses ({@link
* edu.wpi.first.wpilibj.Joystick} or {@link XboxController}), and then passing it to a {@link
* edu.wpi.first.wpilibj2.command.button.JoystickButton}.
*/
private void configureButtonBindings() {}
public void loadTrajectoryPaths() {
TrajectoryCache.clear();
// sendCacheTrajectory("Slalom", "output/SlalomPath");
// pathSelector.setDefaultOption("ThreeBallAuton", "ThreeBallAuton");
// pathSelector.addOption("SixBallBackAuton", "SixBallBackAuton");
pathSelector.addOption("Straight", "Straight");
pathSelector.addOption("3 Ball Test", "3 Ball Test");
pathSelector.addOption("HalfArc", "HalfArc");
pathSelector.addOption("BackHalfArc", "BackHalfArc");
pathSelector.addOption("TerminalLoad", "TerminalLoad");
pathSelector.addOption("5 Ball Auton", "5 Ball Auton");
TrajectoryCache.add("3 Ball Test", "3 Ball Test");
TrajectoryCache.add("Straight", "Straight");
TrajectoryCache.add("HalfArc", "HalfArc");
TrajectoryCache.add("BackHalfArc", "BackHalfArc");
TrajectoryCache.add("TerminalLoad", "TerminalLoad");
}
/**
* Use this to pass the autonomous command to the main {@link Robot} class.
*
* @return the command to run in autonomous
*/
public Command getAutonomousCommand() {
// An ExampleCommand will run in autonomous
var autoVoltageConstraint = new DifferentialDriveVoltageConstraint(
new SimpleMotorFeedforward(DrivetrainConstants.kA, DrivetrainConstants.kV, DrivetrainConstants.kA),
DrivetrainConstants.KINEMATICS,
10
);
var TrajectoryConfig = new TrajectoryConfig(
DrivetrainConstants.MAX_VELOCITY, DrivetrainConstants.MAX_ACCEL);
if (pathSelector.getSelected().equals("5 Ball Auton")) {
return new FiveBallAuton(drivetrain).andThen(() -> drivetrain.setVoltages(0, 0));
}
Trajectory pathTrajectory = TrajectoryCache.get(pathSelector.getSelected());
RamseteCommand ramseteCommand = createRamseteCommand(pathTrajectory);
// Reset odometry to the starting pose of the trajectory.
drivetrain.resetOdometry(pathTrajectory.getInitialPose());
// drivetrain.gyro.setAngleAdjustment(pathTrajectory.getInitialPose().getRotation().getRadians() - drivetrain.gyro.getYaw());
// drivetrain.gyro.zeroYaw();
// Run path following command, then stop at the end.
return ramseteCommand.andThen(() -> drivetrain.setVoltages(0, 0));
}
public RamseteCommand createRamseteCommand(Trajectory pathTrajectory) {
return new RamseteCommand(
pathTrajectory,
drivetrain::getPose,
new RamseteController(DrivetrainConstants.B, DrivetrainConstants.ZETA),
new SimpleMotorFeedforward(DrivetrainConstants.kS,
DrivetrainConstants.kV,
DrivetrainConstants.kA),
DrivetrainConstants.KINEMATICS,
drivetrain::getWheelSpeeds,
new PIDController(0, 0, 0),
new PIDController(0, 0, 0),
// RamseteCommand passes volts to the callback
drivetrain::tankDriveVolts,
drivetrain
);
}
}
|
#!/usr/bin/env bash
#
# Generated by: https://github.com/swagger-api/swagger-codegen.git
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
frameworkVersion=net45
netfx=${frameworkVersion#net}
echo "[INFO] Target framework: ${frameworkVersion}"
echo "[INFO] Download nuget and packages"
wget -nc https://nuget.org/nuget.exe;
mozroots --import --sync
mono nuget.exe install src/IO.Trakerr/packages.config -o packages;
echo "[INFO] Copy DLLs to the 'bin' folder"
mkdir -p bin;
cp packages/Newtonsoft.Json.8.0.3/lib/net45/Newtonsoft.Json.dll bin/Newtonsoft.Json.dll;
cp packages/RestSharp.105.1.0/lib/net45/RestSharp.dll bin/RestSharp.dll;
echo "[INFO] Run 'mcs' to build bin/IO.Trakerr.dll"
mcs -sdk:${netfx} -r:bin/Newtonsoft.Json.dll,\
bin/RestSharp.dll,\
System.Runtime.Serialization.dll \
-target:library \
-out:bin/IO.Trakerr.dll \
-recurse:'src/IO.Trakerr/*.cs' \
-doc:bin/IO.Trakerr.xml \
-platform:anycpu
if [ $? -ne 0 ]
then
echo "[ERROR] Compilation failed with exit code $?"
exit 1
else
echo "[INFO] bin/IO.Trakerr.dll was created successfully"
fi
|
import requests
from lxml import html
url = "http://example.com"
r = requests.get(url)
tree = html.fromstring(r.content)
# Scrape table with results
table = tree.xpath('//table[@id="results-table"]')[0]
# Access table entries
rows = table.xpath('.//tr')
for row in rows:
cells = row.xpath('.//td')
for cell in cells:
print(cell.text_content()) |
Yes, the sequence [1, 2, 3, 4, 5] is a Monotonically Increasing sequence. |
module Bocu
module Timelines
class Explore
include Her::Model
include CommonScopes
CATEGORIES = %i[coub_of_the_day newest random].freeze
collection_path "#{Bocu::TIMELINE_ENDPOINT}/explore/:category_id"
parse_root_in_json :coubs, format: :active_model_serializers
CATEGORIES.each { |category| scope category, -> { where(category_id: category) } }
end
end
end
|
<filename>src/main/java/org/ringingmaster/util/javafx/propertyeditor/PropertyValueListener.java
package org.ringingmaster.util.javafx.propertyeditor;
/**
* TODO Comments
*
* @author <NAME>
*/
public interface PropertyValueListener {
void propertyValue_renderingChanged(PropertyValue propertyValue);
}
|
def modify_string(name: str) -> str:
if name.startswith('s'):
name = name[1:-1]
elif name.endswith('y'):
name = name[:-1] + 'ies'
else:
name += 'ing'
return name |
var annotated_dup =
[
[ "QActive", "struct_q_active.html", "struct_q_active" ],
[ "QActiveDummy", "qs_8h.html#struct_q_active_dummy", "qs_8h_struct_q_active_dummy" ],
[ "QActiveVtable", "struct_q_active_vtable.html", "struct_q_active_vtable" ],
[ "QEQueue", "qequeue_8h.html#struct_q_e_queue", "qequeue_8h_struct_q_e_queue" ],
[ "QEvt", "struct_q_evt.html", "struct_q_evt" ],
[ "QFreeBlock", "qf__pkg_8h.html#struct_q_free_block", "qf__pkg_8h_struct_q_free_block" ],
[ "QHsm", "struct_q_hsm.html", "struct_q_hsm" ],
[ "QHsmAttr", "qep_8h.html#union_q_hsm_attr", "qep_8h_union_q_hsm_attr" ],
[ "QHsmVtable", "struct_q_hsm_vtable.html", "struct_q_hsm_vtable" ],
[ "QK_PrivAttr", "qk_8h.html#struct_q_k___priv_attr", "qk_8h_struct_q_k___priv_attr" ],
[ "QMActive", "struct_q_m_active.html", "struct_q_m_active" ],
[ "QMPool", "qmpool_8h.html#struct_q_m_pool", "qmpool_8h_struct_q_m_pool" ],
[ "QMsm", "struct_q_msm.html", "struct_q_msm" ],
[ "QMState", "qep_8h.html#struct_q_m_state", "qep_8h_struct_q_m_state" ],
[ "QMTranActTable", "qep_8h.html#struct_q_m_tran_act_table", "qep_8h_struct_q_m_tran_act_table" ],
[ "QPSet", "qpset_8h.html#struct_q_p_set", "qpset_8h_struct_q_p_set" ],
[ "QSPrivAttr", "qs_8h.html#struct_q_s_priv_attr", "qs_8h_struct_q_s_priv_attr" ],
[ "QSrxPrivAttr", "qs__pkg_8h.html#struct_q_srx_priv_attr", "qs__pkg_8h_struct_q_srx_priv_attr" ],
[ "QTicker", "struct_q_ticker.html", "struct_q_ticker" ],
[ "QTimeEvt", "struct_q_time_evt.html", "struct_q_time_evt" ],
[ "QXK_PrivAttr", "qxk_8h.html#struct_q_x_k___priv_attr", "qxk_8h_struct_q_x_k___priv_attr" ],
[ "QXMutex", "qxthread_8h.html#struct_q_x_mutex", "qxthread_8h_struct_q_x_mutex" ],
[ "QXSemaphore", "qxthread_8h.html#struct_q_x_semaphore", "qxthread_8h_struct_q_x_semaphore" ],
[ "QXThread", "struct_q_x_thread.html", "struct_q_x_thread" ]
]; |
<reponame>jrfaller/maracas
package com.github.maracas.rest.data;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
public record PullRequestResponse(
String message,
MaracasReport report
) {
public PullRequestResponse(String message) {
this(message, null);
}
public String toJson() throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.writeValueAsString(this);
}
}
|
<reponame>mbrodt/portfolio
import React from 'react'
import Arrow from './arrow'
import SectionHeading from '../components/UI/SectionHeading'
const Projects = ({ projects }) => {
return (
<section
data-aos="fade-up"
data-aos-delay="300"
id="projects"
className="container text-center my-20"
>
<SectionHeading>Projects</SectionHeading>
<div className="flex flex-wrap justify-center sm:m-8 md:m-0 xs:px-4 sm:px-0">
{projects.map(project => (
<Project key={project.node.title} project={project.node} />
))}
</div>
</section>
)
}
const Project = ({ project }) => {
return (
<div
className="bg-white rounded-lg flex p-2 sm:p-4 shadow-2xl m-4 md:max-w-xs "
key={project}
>
<div className="mx-auto flex flex-col justify-between text-left m-4 px-2 md:px-8">
<div className="flex justify-between items-baseline">
<h3 className="mb-4 mr-2 text-lg sm:text-xl font-bold">
{project.title}
</h3>
<Label labelText={project.label} />
</div>
<p className="text-gray-800 leading-normal mb-4">
{project.description}
</p>
<a
target="_blank"
rel="noopener noreferrer"
href={`${project.link}`}
className="no-underline bg-indigo-500 text-white flex h-10 w-48 items-center justify-around hover:bg-indigo-600 w-full"
>
{' '}
<p className="mb-0">{project.footerText}</p>
<Arrow direction="right" />
</a>
</div>
</div>
)
}
const Label = ({ labelText }) => {
const classes = [
'text-xs',
'px-3',
'py-1',
'rounded-full',
'text-center',
'font-bold',
]
switch (labelText) {
case 'Client':
classes.push('bg-green-200 text-green-900')
break
case 'Personal':
classes.push('bg-blue-200 text-blue-900')
break
default:
break
}
return <p className={classes.join(' ')}>{labelText}</p>
}
export default Projects
|
<reponame>OSWeDev/oswedev
export default interface IRenderedData {
data_dateindex: number;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.