text stringlengths 1 1.05M |
|---|
#!/bin/bash
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 0xd68fa50fea312927
sudo bash -c 'echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.2.list'
sudo apt update
sudo apt install -y mongodb-org
sudo systemctl start mongod
sudo systemctl enable mongod
|
<filename>cmd/chart-to-order/main.go
/*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"encoding/json"
"io/ioutil"
"os"
"time"
"kubepack.dev/kubepack/apis/kubepack/v1alpha1"
"github.com/google/uuid"
flag "github.com/spf13/pflag"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/klog/v2"
"sigs.k8s.io/yaml"
)
var (
url = "https://bundles.byte.builders/ui/"
name = "mongodb-editor-options"
version = "v0.1.0"
)
func main() {
flag.StringVar(&url, "url", url, "Chart repo url")
flag.StringVar(&name, "name", name, "Name of bundle")
flag.StringVar(&version, "version", version, "Version of bundle")
flag.Parse()
order := v1alpha1.Order{
TypeMeta: metav1.TypeMeta{
APIVersion: v1alpha1.SchemeGroupVersion.String(),
Kind: v1alpha1.ResourceKindOrder,
}, ObjectMeta: metav1.ObjectMeta{
Name: name,
UID: types.UID(uuid.New().String()),
CreationTimestamp: metav1.NewTime(time.Now()),
},
Spec: v1alpha1.OrderSpec{
Packages: []v1alpha1.PackageSelection{
{
Chart: &v1alpha1.ChartSelection{
ChartRef: v1alpha1.ChartRef{
URL: url,
Name: name,
},
Version: version,
ReleaseName: name,
Namespace: metav1.NamespaceDefault,
Bundle: nil,
ValuesFile: "values.yaml",
ValuesPatch: nil,
Resources: nil,
WaitFors: nil,
},
},
},
KubeVersion: "",
},
}
err := os.MkdirAll("artifacts/"+name, 0o755)
if err != nil {
klog.Fatal(err)
}
{
data, err := yaml.Marshal(order)
if err != nil {
klog.Fatal(err)
}
err = ioutil.WriteFile("artifacts/"+name+"/order.yaml", data, 0o644)
if err != nil {
klog.Fatal(err)
}
}
{
data, err := json.MarshalIndent(order, "", " ")
if err != nil {
klog.Fatal(err)
}
err = ioutil.WriteFile("artifacts/"+name+"/order.json", data, 0o644)
if err != nil {
klog.Fatal(err)
}
}
}
|
import { uuidv4 } from "@andrewcaires/utils.js";
import { Response, Request, NextFunction } from "express";
import { existsSync } from "fs";
import mime from "mime-types";
import multer from "multer";
import { join } from "path";
import { Responses } from "../helpers/Responses";
import { API_UPLOAD_COUNT, API_UPLOAD_FIELD, API_UPLOAD_SIZE, API_UPLOAD_TEMP } from "../config";
interface UploadOptions {
count?: number;
field?: string;
path?: string;
size?: number;
types?: string[];
}
const getFileName = (path: string, ext: string): string => {
const name = uuidv4() + "." + ext;
if (existsSync(join(path, name))) {
return getFileName(path, ext);
}
return name;
};
export const upload = (options?: UploadOptions) => {
const path = options?.path || API_UPLOAD_TEMP;
const storage = multer.diskStorage({
destination: (req, file, cb) => cb(null, path),
filename: async (req, file, cb) => cb(null, getFileName(path, mime.extension(file.mimetype) || "bin")),
});
const fileSize = (options?.size || API_UPLOAD_SIZE) * 1000000;
const upload = multer({
storage: storage,
async fileFilter(req, file, cb) {
const index = options?.types?.indexOf(file.mimetype);
if (index !== undefined && index < 0) {
return cb(new Error("File type not allowed"));
}
return cb(null, true);
},
limits: { fileSize },
});
const count = options?.count || API_UPLOAD_COUNT;
const field = options?.field || API_UPLOAD_FIELD;
const middleware = upload.array(field, count);
return (req: Request, res: Response, next: NextFunction) => {
middleware(req, res, function (error) {
if (error) {
return Responses.error(res, error.message);
}
return next();
});
};
};
|
#!/usr/bin/env bash
systemctl stop fuseki
systemctl stop marple
#general vars
echo ">>>> Updating Fuseki"
export TC_USER=fuseki
export TC_GROUP=fuseki
# set erb vars
# endpoint name for fuseki
export EP_NAME=core
export SVC=fuseki
export SVC_DESC="Jena-Fuseki Tomcat container"
export MARPLE_SVC=marple
export MARPLE_SVC_DESC="Marple service for fuseki Lucene indexes"
export JAVA_HOME=`type -p javac|xargs readlink -f|xargs dirname|xargs dirname`
export LUCENE_BO_VER=1.5.0
export LUCENE_BO_JAR="lucene-bo-${LUCENE_BO_VER}.jar"
export LUCENE_BO_REL="https://github.com/buda-base/lucene-bo/releases/download/v${LUCENE_BO_VER}/${LUCENE_BO_JAR}"
export LUCENE_ZH_VER=0.4.1
export LUCENE_ZH_JAR="lucene-zh-${LUCENE_ZH_VER}.jar"
export LUCENE_ZH_REL="https://github.com/buda-base/lucene-zh/releases/download/v${LUCENE_ZH_VER}/${LUCENE_ZH_JAR}"
export LUCENE_SA_VER=1.1.0
export LUCENE_SA_JAR="lucene-sa-${LUCENE_SA_VER}.jar"
export LUCENE_SA_REL="https://github.com/buda-base/lucene-sa/releases/download/v${LUCENE_SA_VER}/${LUCENE_SA_JAR}"
export MARPLE_REL="https://github.com/flaxsearch/marple/releases/download/v1.0/marple-1.0.jar"
if [ -d /mnt/data ] ; then
export DATA_DIR=/mnt/data ;
else
export DATA_DIR=/usr/local ;
fi
echo ">>>> DATA_DIR: " $DATA_DIR
export DOWNLOADS=$DATA_DIR/downloads
export THE_HOME=$DATA_DIR/$SVC
export THE_BASE=$THE_HOME/base
export CAT_HOME=$THE_HOME/tomcat
echo ">>>>>>>> updating {$EP_NAME}.ttl to {$THE_BASE}/configuration/"
erb /vagrant/conf/fuseki/ttl.erb > $THE_BASE/configuration/$EP_NAME.ttl
echo ">>>>>>>> updating qonsole-config.js to {$CAT_HOME}/webapps/fuseki/js/app/"
cp /vagrant/conf/fuseki/qonsole-config.js $CAT_HOME/webapps/fuseki/js/app/
echo ">>>>>>>> updating analyzers to {$CAT_HOME}/webapps/fuseki/WEB-INF/lib/"
# the lucene-bo jar has to be added to fuseki/WEB-INF/lib/ otherwise
# tomcat class loading cannot find rest of Lucene classes
rm -f $CAT_HOME/webapps/fuseki/WEB-INF/lib/lucene-bo-*.jar
rm -f $CAT_HOME/webapps/fuseki/WEB-INF/lib/lucene-sa-*.jar
rm -f $CAT_HOME/webapps/fuseki/WEB-INF/lib/lucene-zh-*.jar
pushd $DOWNLOADS;
# wget -q -c $LUCENE_BO_REL
wget -q $LUCENE_BO_REL -O $LUCENE_BO_JAR
cp $LUCENE_BO_JAR $CAT_HOME/webapps/fuseki/WEB-INF/lib/
wget -q -c $LUCENE_ZH_REL
cp $LUCENE_ZH_JAR $CAT_HOME/webapps/fuseki/WEB-INF/lib/
wget -q -c $LUCENE_SA_REL
cp $LUCENE_SA_JAR $CAT_HOME/webapps/fuseki/WEB-INF/lib/
popd
echo ">>>> restarting ${SVC}"
systemctl start fuseki
systemctl start marple
echo ">>>> ${SVC} service listening on ${MAIN_PORT}"
echo ">>>> Fuseki updating complete"
|
<gh_stars>0
import CardCluster3 from '../../components/CardCluster3';
function Design () {
return (
<CardCluster3 />
);
};
export default Design; |
_pdu() {
local i cur prev opts cmds
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
cmd=""
opts=""
for i in ${COMP_WORDS[@]}
do
case "${i}" in
pdu)
cmd="pdu"
;;
*)
;;
esac
done
case "${cmd}" in
pdu)
opts=" -h -V --top-down --no-sort --silent-errors --progress --help --version --bytes-format --quantity --max-depth --total-width --column-width --min-ratio <files>... "
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
--bytes-format)
COMPREPLY=($(compgen -W "plain metric binary" -- "${cur}"))
return 0
;;
--quantity)
COMPREPLY=($(compgen -W "len blksize blocks" -- "${cur}"))
return 0
;;
--max-depth)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--total-width)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--column-width)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--min-ratio)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
esac
}
complete -F _pdu -o bashdefault -o default pdu
|
./bin/fabric-cli chaincode invoke --local-network=network2 --user=alice mychannel simpleasset GetMyWallet '[]'
./bin/fabric-cli chaincode invoke --local-network=network1 --user=alice mychannel simpleasset GetMyAssets '[]'
./bin/fabric-cli chaincode invoke --local-network=network2 --user=bob mychannel simpleasset GetMyWallet '[]'
./bin/fabric-cli chaincode invoke --local-network=network1 --user=bob mychannel simpleasset GetMyAssets '[]'
./bin/fabric-cli asset exchange-all --network1=network1 --network2=network2 --secret=secrettext --timeout-duration=100 bob:bond01:a04:alice:token1:100
./bin/fabric-cli chaincode invoke --local-network=network2 --user=alice mychannel simpleasset GetMyWallet '[]'
./bin/fabric-cli chaincode invoke --local-network=network1 --user=alice mychannel simpleasset GetMyAssets '[]'
./bin/fabric-cli chaincode invoke --local-network=network2 --user=bob mychannel simpleasset GetMyWallet '[]'
./bin/fabric-cli chaincode invoke --local-network=network1 --user=bob mychannel simpleasset GetMyAssets '[]'
|
#!/bin/bash
$PYTHON -m jcc \
--use_full_names \
--python orekit \
--version ${PKG_VERSION} \
--jar $SRC_DIR/orekit-10.0.jar \
--jar $SRC_DIR/hipparchus-core-1.5.jar \
--jar $SRC_DIR/hipparchus-filtering-1.5.jar \
--jar $SRC_DIR/hipparchus-fitting-1.5.jar \
--jar $SRC_DIR/hipparchus-geometry-1.5.jar \
--jar $SRC_DIR/hipparchus-ode-1.5.jar \
--jar $SRC_DIR/hipparchus-optim-1.5.jar \
--jar $SRC_DIR/hipparchus-stat-1.5.jar \
--jar $SRC_DIR/rugged-2.1.jar \
--package java.io \
--package java.util \
--package java.text \
--package org.orekit \
--package org.orekit.rugged \
java.io.BufferedReader \
java.io.FileInputStream \
java.io.FileOutputStream \
java.io.InputStream \
java.io.InputStreamReader \
java.io.ObjectInputStream \
java.io.ObjectOutputStream \
java.io.PrintStream \
java.io.StringReader \
java.io.StringWriter \
java.lang.System \
java.text.DecimalFormat \
java.text.DecimalFormatSymbols \
java.util.ArrayDeque \
java.util.ArrayList \
java.util.Arrays \
java.util.Collection \
java.util.Collections \
java.util.Date \
java.util.HashMap \
java.util.HashSet \
java.util.List \
java.util.Locale \
java.util.Map \
java.util.Set \
java.util.TreeSet \
java.util.stream.Collectors \
java.util.stream.Stream \
java.util.stream.DoubleStream \
--module $SRC_DIR/pyhelpers.py \
--reserved INFINITE \
--reserved ERROR \
--reserved OVERFLOW \
--reserved NO_DATA \
--reserved NAN \
--reserved min \
--reserved max \
--reserved mean \
--reserved SNAN \
--files 81 \
--build \
--install
# ensure that JCC_JDK is set correctly by invoking an activate script
ACTIVATE_DIR=$PREFIX/etc/conda/activate.d
DEACTIVATE_DIR=$PREFIX/etc/conda/deactivate.d
mkdir -p $ACTIVATE_DIR
mkdir -p $DEACTIVATE_DIR
cp $RECIPE_DIR/scripts/activate.sh $ACTIVATE_DIR/orekit-activate.sh
cp $RECIPE_DIR/scripts/deactivate.sh $DEACTIVATE_DIR/orekit-deactivate.sh |
package cmd
import (
"bufio"
"bytes"
"gitee.com/ddkwork/libraryGo/stream"
"golang.org/x/text/encoding/simplifiedchinese"
"golang.org/x/text/transform"
"io"
"io/ioutil"
"os/exec"
"strings"
)
type charset string
const (
UTF8 = charset("UTF-8")
GB18030 = charset("GB18030")
)
type (
Interface interface {
UTF82GBK(src string) stream.Interface //UTF8转GBK
GBK2UTF8(src []byte) string //GBK转UTF8
CmdBuf2ChineseString(arg interface{}) (str string) //CmdBuf转ChineseString
CmdRunWithCheck(arg string) //输出cmd执行结果
}
object struct {
}
)
func New() Interface { return &object{} }
func (o *object) convertByte2String(byte []byte, charset charset) string {
var str string
switch charset {
case GB18030:
decodeBytes, err := simplifiedchinese.GB18030.NewDecoder().Bytes(byte)
if !W.ErrorCheck.Error(err) {
return "err GB18030.NewDecoder()"
}
str = string(decodeBytes)
case UTF8:
fallthrough
default:
str = string(byte)
}
return str
}
func (o *object) checkCmdResult(arg string, cmd *exec.Cmd) {
outReader, err := cmd.StdoutPipe()
if !W.ErrorCheck.Error(err) {
return
}
errReader, err := cmd.StderrPipe()
if !W.ErrorCheck.Error(err) {
return
}
cmdReader := io.MultiReader(outReader, errReader)
if !W.ErrorCheck.Error(cmd.Start()) {
return
}
Stdin := bufio.NewScanner(cmdReader)
for Stdin.Scan() {
cmdRe := o.convertByte2String(Stdin.Bytes(), GB18030)
cmdRe = strings.Replace(cmdRe, "\r\n", "", -1)
W.Log.Info("cmd命令 "+arg+" 返回:", cmdRe)
}
if !W.ErrorCheck.Error(cmd.Wait()) {
return
}
}
func (o *object) CmdRunWithCheck(arg string) {
o.checkCmdResult(arg, exec.Command("C:\\Windows\\SysWOW64\\cmd.exe", "/C", arg))
}
func (o *object) CmdBuf2ChineseString(arg interface{}) (str string) {
switch arg.(type) {
case string:
str = o.convertByte2String([]byte(arg.(string)), GB18030)
case []byte:
str = o.convertByte2String(arg.([]byte), GB18030)
}
return
}
// UTF82GBK : transform UTF8 rune into GBK byte array
func (o *object) UTF82GBK(src string) stream.Interface {
GB18030 := simplifiedchinese.All[0]
all, err := ioutil.ReadAll(transform.NewReader(bytes.NewReader([]byte(src)), GB18030.NewEncoder()))
if !W.ErrorCheck.Error(err) {
return stream.Default
}
stream.DefaultTue.SetBytes(all)
return stream.Default
}
// GBK2UTF8 : transform GBK byte array into UTF8 string
func (o *object) GBK2UTF8(src []byte) string {
GB18030 := simplifiedchinese.All[0]
all, err := ioutil.ReadAll(transform.NewReader(bytes.NewReader(src), GB18030.NewDecoder()))
if !W.ErrorCheck.Error(err) {
return ""
}
return string(all)
}
|
class Employee:
def __init__(self, name, age, salary, job_title):
self.name = name
self.age = age
self.salary = salary
self.job_title = job_title
employee1 = Employee("John Smith", 25, 45000, "Software Engineer") |
<reponame>NekoOfTheAbyss/lala-discord<gh_stars>0
import Command from "../../structures/Command.js";
export default class LalaCommand extends Command {
constructor(client) {
super(client, {
name: "lala",
description: "General info about lala",
group: "misc",
});
}
async run(message, command) {
const response = `This is **La Lala**, created by [Neko Of The Abyss](https://nekooftheabyss.xyz/). Basically a bot created on the [lala project](https://github.com/NekoOfTheAbyss/lala). I call it **A collection of useless functions compiled into a Discord bot**. The current set of features include a story generator (\`/story\`), chain mail generator (\`/chain\`), random name and string generators (\`/name\`, \`/mob\` and \`/string\`), random email generator (\`/email\`), and an owoifier (\`/owoify\`).\nJoin our support server through [this link](https://discord.gg/G5jpusmDqw).`
const embed = new this.client.util.Embed()
.setColor("#ff00c3")
.setDescription(`${response}`)
.setAuthor(
`${command.author.username}'s chain mail:`,
command.author.iconURL
)
.addField("Guilds serving", `${this.client.guilds.size}`, true)
.addField("Credits", `NeTT#1172 (Neko Of The Abyss)`, true)
return message.createMessage({ embeds: [embed.json()] });
}
};
|
#!/bin/bash
export LIB_FILE_PREFIX="jansson"
export LIB_NAME="lib${LIB_FILE_PREFIX}"
export LIB_OUTPUT="${LIB_NAME}.a"
export LIB_VERSION="2.5"
export LIB_URL="http://www.digip.org/jansson/releases/${LIB_FILE_PREFIX}-${LIB_VERSION}.tar.gz"
./build-lib.sh
|
<reponame>TheDadi/polyfill-library<gh_stars>0
'setPrototypeOf' in Reflect |
#!/bin/bash
aptcmd()
{
echo "Debian like, apt-get commands"
#https://code.visualstudio.com/docs/setup/linux#_debian-and-ubuntu-based-distributions
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.gpg
mv microsoft.gpg /etc/apt/trusted.gpg.d/microsoft.gpg
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/vscode stable main" > /etc/apt/sources.list.d/vscode.list'
apt-get update
apt-get install code -y # or code-insiders
}
dnfcmd()
{
echo "Red Hat like, dnf commands"
#https://code.visualstudio.com/docs/setup/linux#_rhel-fedora-and-centos-based-distributions
rpm --import https://packages.microsoft.com/keys/microsoft.asc
sh -c 'echo -e "[code]\nname=Visual Studio Code\nbaseurl=https://packages.microsoft.com/yumrepos/vscode\nenabled=1\ngpgcheck=1\ngpgkey=https://packages.microsoft.com/keys/microsoft.asc" > /etc/yum.repos.d/vscode.repo'
dnf check-update
dnf install code -y
}
apt-get --help > /dev/null 2>&1
has_apt=$?
dnf --help > /dev/null 2>&1
has_dnf=$?
if [ "$has_apt" -eq "0" ]; then
aptcmd
fi
if [ "$has_dnf" -eq "0" ]; then
dnfcmd
fi
|
#!/bin/bash
# Copyright (c) 2018-2022, NVIDIA CORPORATION.
#
# Adopted from https://github.com/tmcdonell/travis-scripts/blob/dfaac280ac2082cd6bcaba3217428347899f2975/update-accelerate-buildbot.sh
set -e
# Setup 'gpuci_retry' for upload retries (results in 4 total attempts)
export GPUCI_RETRY_MAX=3
export GPUCI_RETRY_SLEEP=30
# Set default label options if they are not defined elsewhere
export LABEL_OPTION=${LABEL_OPTION:-"--label main"}
# Skip uploads unless BUILD_MODE == "branch"
if [ ${BUILD_MODE} != "branch" ]; then
echo "Skipping upload"
return 0
fi
# Skip uploads if there is no upload key
if [ -z "$MY_UPLOAD_KEY" ]; then
echo "No upload key"
return 0
fi
################################################################################
# UPLOAD - Conda packages
################################################################################
gpuci_logger "Starting conda uploads"
if [[ "$BUILD_LIBCUGRAPH" == "1" && "$UPLOAD_LIBCUGRAPH" == "1" ]]; then
LIBCUGRAPH_FILES=$(conda build --no-build-id --croot ${CONDA_BLD_DIR} conda/recipes/libcugraph --output)
echo "Upload libcugraph"
gpuci_retry anaconda -t ${MY_UPLOAD_KEY} upload -u ${CONDA_USERNAME:-rapidsai} ${LABEL_OPTION} --skip-existing --no-progress ${LIBCUGRAPH_FILES}
fi
if [[ "$BUILD_CUGRAPH" == "1" ]]; then
PYLIBCUGRAPH_FILE=$(conda build --croot ${CONDA_BLD_DIR} conda/recipes/pylibcugraph --python=$PYTHON --output)
test -e ${PYLIBCUGRAPH_FILE}
echo "Upload pylibcugraph file: ${PYLIBCUGRAPH_FILE}"
gpuci_retry anaconda -t ${MY_UPLOAD_KEY} upload -u ${CONDA_USERNAME:-rapidsai} ${LABEL_OPTION} --skip-existing ${PYLIBCUGRAPH_FILE} --no-progress
CUGRAPH_FILE=$(conda build --croot ${CONDA_BLD_DIR} conda/recipes/cugraph --python=$PYTHON --output)
test -e ${CUGRAPH_FILE}
echo "Upload cugraph file: ${CUGRAPH_FILE}"
gpuci_retry anaconda -t ${MY_UPLOAD_KEY} upload -u ${CONDA_USERNAME:-rapidsai} ${LABEL_OPTION} --skip-existing ${CUGRAPH_FILE} --no-progress
fi
|
function cylinderVolume(radius,height) {
return Math.PI * radius * radius * height;
}
let radius = 5;
let height = 10;
let volume = cylinderVolume(radius, height);
console.log('The volume of the cylinder is', volume); |
import { ComponentClass, connect, Dispatch, MapDispatchToProps, MapStateToProps } from 'react-redux';
import { State } from '../state';
import { screenModified } from '../actions';
import MapEditor from '../map-editor';
import { Action } from 'redux-actions';
const mapStateToProps: MapStateToProps<any, any, State> = (state: State, ownProps?: any): any => {
return {
game: state.game,
selectedTileIndex: state.selectedTileIndex
};
};
const mapDispatchToProps: MapDispatchToProps<any, any> = (dispatch: Dispatch<Action<any>>, ownProps?: any): any => {
return {
onChange: (row: number, col: number) => {
dispatch(screenModified(row, col));
}
};
};
const VisibleMapEditor: ComponentClass<any> = connect(mapStateToProps, mapDispatchToProps)(MapEditor);
export default VisibleMapEditor;
|
<gh_stars>0
package com.amaljoyc.patterns.structural.facade;
/**
* Created by amaljoyc on 19.07.18.
*/
public class Main {
public static void main(String[] args) {
BankingService service = new BankingService();
service.createAccount(AccountType.CURRENT);
service.createAccount(AccountType.SAVINGS);
}
}
|
#include <gtest/gtest.h>
#include <bcli/bcli.hpp>
using namespace bc;
TEST(Parser, simple)
{
char* argv[] = {"cmd", "-p", "10", "pos1", "-t", "strvalue", "-f", "pos2", "pos3"};
int argc = sizeof(argv)/sizeof(char*);
{
Parser cli("test", "test", "test", "test");
cli.add_param("-p/--param", "help");
cli.add_param("-t", "help");
cli.add_param("-f", "help")->as_flag();
BCLI_PARSE(cli, argc, argv)
EXPECT_EQ(cli.get_positionals()[0], "pos1");
EXPECT_EQ(cli.get_positionals()[1], "pos2");
EXPECT_EQ(cli.get_positionals()[2], "pos3");
int p = cli.getp("p")->as<int>();
EXPECT_EQ(p, 10);
EXPECT_EQ(cli.getp("t")->as<std::string>(), "strvalue");
EXPECT_TRUE(cli.getp("f")->is_set());
}
}
TEST(Parser, multiple)
{
char* argv[] = {"cmd", "cmd1", "-p", "10", "pos1", "pos2", "pos3"};
char* argv1[] = {"cmd", "cmd1", "-p", "10", "pos1", "pos2", "pos3", "pos4", "pos5", "pos6"};
char* argv2[] = {"cmd", "cmd2", "-p", "42", "pos1", "pos2", "pos3"};
int argc = sizeof(argv)/sizeof(char*);
int argc1 = sizeof(argv1)/sizeof(char*);
int argc2 = sizeof(argv2)/sizeof(char*);
{
Parser<1> cli("test", "test", "test", "test");
cmd_t cmd1 = cli.add_command("cmd1", "cmd1 desc");
cmd1->set_positional_bounds(1, 5, "Usage pos", "Help pos");
cmd1->add_param("-p/--param", "help");
cmd_t cmd2 = cli.add_command("cmd2", "cmd2 desc");
EXPECT_THROW(cli.parse(argc1, argv1), ex::PositionalsError);
}
{
Parser<1> cli("test", "test", "test", "test");
cmd_t cmd1 = cli.add_command("cmd1", "cmd1 desc");
cmd1->add_param("-p/--param", "help");
cmd_t cmd2 = cli.add_command("cmd2", "cmd2 desc");
cmd2->add_param("-p/--param", "help");
cli.parse(argc2, argv2);
EXPECT_EQ(cli.getp("p")->as<int>(), 42);
cli.parse(argc, argv);
EXPECT_EQ(cli.getp("p")->as<int>(), 10);
}
}
|
class CustomError:
def __init__(self):
self.errorcode = {}
def add_error_code(self, code, description):
self.errorcode[code] = description
def raise_custom_error(self, code):
if code in self.errorcode:
raise Exception(self.errorcode[code])
else:
raise Exception("Unknown error code")
# Demonstration of usage
custom_error = CustomError()
custom_error.add_error_code("EEXIST", "File already exists")
custom_error.add_error_code("ENOENT", "No such file or directory")
try:
custom_error.raise_custom_error("EEXIST")
except Exception as e:
print(f"Custom Error: {e}")
try:
custom_error.raise_custom_error("ENOENT")
except Exception as e:
print(f"Custom Error: {e}")
try:
custom_error.raise_custom_error("EINVAL")
except Exception as e:
print(f"Custom Error: {e}") |
#!/bin/bash
# Copyright 2020 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
_XTRACE_ZOOKEEPER=$(set +o | grep xtrace)
set +o xtrace
function is_zookeeper_enabled {
is_service_enabled monasca-zookeeper && return 0
return 1
}
function clean_zookeeper {
if is_zookeeper_enabled; then
echo_summary "Cleaning Monasca Zookeeper"
sudo systemctl disable zookeeper
sudo systemctl stop zookeeper
sudo rm -rf /var/log/zookeeper
sudo rm -rf /var/lib/zookeeper
sudo rm -rf /opt/zookeeper-${ZOOKEEPER_VERSION}
sudo rm -rf /opt/zookeeper
sudo rm -rf /etc/systemd/system/zookeeper.service
sudo systemctl daemon-reload
fi
}
function install_zookeeper {
if is_zookeeper_enabled; then
echo_summary "Install Monasca Zookeeper"
local zookeeper_tarball=zookeeper-${ZOOKEEPER_VERSION}.tar.gz
local zookeeper_tarball_url=${APACHE_ARCHIVES}zookeeper/zookeeper-${ZOOKEEPER_VERSION}/${zookeeper_tarball}
local zookeeper_tarball_dest
zookeeper_tarball_dest=`get_extra_file ${zookeeper_tarball_url}`
sudo groupadd --system zookeeper || true
sudo useradd --system -g zookeeper zookeeper || true
sudo tar -xzf ${zookeeper_tarball_dest} -C /opt
sudo ln -sf /opt/zookeeper-${ZOOKEEPER_VERSION} /opt/zookeeper
sudo cp $PLUGIN_FILES/zookeeper/* /opt/zookeeper/conf
sudo chown -R zookeeper:zookeeper /opt/zookeeper/
sudo mkdir /var/log/zookeeper
sudo chown -R zookeeper:zookeeper /var/log/zookeeper
sudo mkdir /var/lib/zookeeper
sudo chown -R zookeeper:zookeeper /var/lib/zookeeper
sudo cp -f "${MONASCA_API_DIR}"/devstack/files/zookeeper/zookeeper.service /etc/systemd/system/zookeeper.service
sudo chown root:root /etc/systemd/system/kafka.service
sudo chmod 644 /etc/systemd/system/zookeeper.service
sudo systemctl daemon-reload
sudo systemctl enable zookeeper
sudo systemctl start zookeeper || sudo systemctl restart zookeeper
fi
}
$_XTRACE_ZOOKEEPER
|
<gh_stars>0
import { NbMenuItem } from '@nebular/theme';
export const MENU_ITEMSADM: NbMenuItem[] = [
{
title: 'Dashboard',
icon: 'nb-home',
link: '/pages/dashboard',
home: true,
},
{
title: 'Features',
group: true,
},
{
title: 'View Centre',
icon: 'nb-tables',
link: '/pages/manage'
},
{
title: 'Manage User',
icon: 'nb-tables',
link: '/pages/user'
},
{
title: 'Add Booking',
icon: 'nb-compose',
children: [
{
title: 'Pickup and Dropoff',
link: '/pages/add-booking/Servicing',
},
{
title: 'Chauffeur',
link: '/pages/add-booking/Chauffeur',
},
{
title: 'RSA',
link: '/pages/add-booking/RSA',
}
],
},
{
title: 'Active',
icon: 'nb-skip-forward',
children: [
{
title: 'Pickup',
link: '/pages/Active/pickup',
},
{
title: 'At Centre',
link: '/pages/Active/at-centre',
},
{
title: 'Drop-off',
link: '/pages/Active/drop-off',
},
{
title: 'RSA',
link: '/pages/Active/rsa',
},
{
title: 'Chauffeur',
link: '/pages/Active/chauffeur',
}
],
},
{
title: 'Upcoming Booking',
icon: 'nb-tables',
link: '/pages/upcoming',
},
{
title: 'Upcoming RSA',
icon: 'nb-tables',
link: '/pages/upcomingrsa',
},
{
title: 'Paused',
icon: 'nb-pause',
link: '/pages/paused',
},
{
title: 'Cancelled',
icon: 'nb-close',
link: '/pages/cancelled',
},
{
title: 'Completed',
icon: 'nb-locked',
link: '/pages/completed',
},
{
title: 'Mishaps',
icon: 'nb-alert',
link: '/pages/mishaps'
},
{
title: 'Reports',
icon: 'nb-tables',
link: '/pages/reports'
},
{
title: 'Settlement Advice',
icon: 'nb-tables',
link: '/pages/payment',
hidden:false
},
{
title: 'Payment-Gateway',
icon: 'nb-tables',
children: [
{
title: 'Add Booking',
link: '/pages/payment-gateway/Add Booking',
},
{
title: 'Transactions',
link: '/pages/payment-gateway/Transactions',
}
],
},
{
title: 'Wallet',
icon: 'nb-tables',
children: [
{
title: 'Add Credit',
link: '/pages/wallet/add-credit',
},
{
title: 'Account Statement',
link: '/pages/wallet/account-statement',
},
{
title: 'Pending Approval',
link: '/pages/wallet/pending-approval',
}
],
},
// {
// title: 'Unconfirmed',
// icon: 'nb-tables',
// link: '/pages/unconfirmed',
// },
{
title: 'CRE Reports',
icon: 'nb-tables',
link: '/pages/cre-reports',
},
{
title: 'Escalation Matrix',
icon: 'nb-tables',
link: '/pages/matrix',
}
];
export const MENU_ITEMSUSR: NbMenuItem[] = [
{
title: 'Dashboard',
icon: 'nb-home',
link: '/pages/dashboard',
home: true,
},
{
title: 'Features',
group: true,
},
{
title: 'View Centre',
icon: 'nb-tables',
link: '/pages/manage'
},
{
title: 'Manage User',
icon: 'nb-tables',
link: '/pages/user'
},
{
title: 'Add Booking',
icon: 'nb-compose',
children: [
{
title: 'Pickup and Dropoff',
link: '/pages/add-booking/Servicing',
},
{
title: 'Chauffeur',
link: '/pages/add-booking/Chauffeur',
},
{
title: 'RSA',
link: '/pages/add-booking/RSA',
}
],
},
{
title: 'Active',
icon: 'nb-skip-forward',
children: [
{
title: 'Pickup',
link: '/pages/Active/pickup',
},
{
title: 'At Centre',
link: '/pages/Active/at-centre',
},
{
title: 'Drop-off',
link: '/pages/Active/drop-off',
},
{
title: 'RSA',
link: '/pages/Active/rsa',
},
{
title: 'Chauffeur',
link: '/pages/Active/chauffeur',
}
],
},
{
title: 'Upcoming Booking',
icon: 'nb-tables',
link: '/pages/upcoming',
},
{
title: 'Upcoming RSA',
icon: 'nb-tables',
link: '/pages/upcomingrsa',
},
{
title: 'Paused',
icon: 'nb-pause',
link: '/pages/paused',
},
{
title: 'Cancelled',
icon: 'nb-close',
link: '/pages/cancelled',
},
{
title: 'Completed',
icon: 'nb-locked',
link: '/pages/completed',
},
{
title: 'Mishaps',
icon: 'nb-alert',
link: '/pages/mishaps'
},
{
title: 'Reports',
icon: 'nb-tables',
link: '/pages/reports'
},
{
title: 'Settlement Advice',
icon: 'nb-tables',
link: '/pages/payment',
hidden:true
},
{
title: 'Payment-Gateway',
icon: 'nb-tables',
children: [
{
title: 'Add Booking',
link: '/pages/payment-gateway/Add Booking',
},
{
title: 'Transactions',
link: '/pages/payment-gateway/Transactions',
}
],
},
{
title: 'Wallet',
icon: 'nb-tables',
children: [
{
title: 'Add Credit',
link: '/pages/wallet/add-credit',
},
{
title: 'Account Statement',
link: '/pages/wallet/account-statement',
},
{
title: 'Pending Approval',
link: '/pages/wallet/pending-approval',
}
],
},
// {
// title: 'Unconfirmed',
// icon: 'nb-tables',
// link: '/pages/unconfirmed',
// },
{
title: 'Cre Reports',
icon: 'nb-tables',
link: '/pages/cre-reports',
},
{
title: 'Escalation Matrix',
icon: 'nb-tables',
link: '/pages/matrix',
}
];
export const MENU_INSURANCESUSR: NbMenuItem[] = [
{
title: 'Dashboard',
icon: 'nb-home',
link: '/pages/dashboard',
home: true,
},
{
title: 'Features',
group: true,
},
{
title: 'Manage User',
icon: 'nb-tables',
link: '/pages/user'
},
{
title: 'Add Booking',
icon: 'nb-compose',
children: [
{
title: 'Pickup and Dropoff',
link: '/pages/add-booking/Servicing',
},
{
title: 'Chauffeur',
link: '/pages/add-booking/Chauffeur',
},
{
title: 'RSA',
link: '/pages/add-booking/RSA',
},
],
},
{
title: 'Active',
icon: 'nb-skip-forward',
children: [
{
title: 'Pickup',
link: '/pages/Active/pickup',
},
{
title: 'At Centre',
link: '/pages/Active/at-centre',
},
{
title: 'Drop-off',
link: '/pages/Active/drop-off',
},
{
title: 'Chauffeur',
link: '/pages/Active/chauffeur',
},
{
title: 'RSA',
link: '/pages/Active/rsa',
}
],
},
{
title: 'Upcoming Booking',
icon: 'nb-tables',
link: '/pages/upcoming',
},
{
title: 'Upcoming RSA',
icon: 'nb-tables',
link: '/pages/upcomingrsa',
},
{
title: 'Paused',
icon: 'nb-pause',
link: '/pages/paused',
},
{
title: 'Cancelled',
icon: 'nb-close',
link: '/pages/cancelled',
},
{
title: 'Completed',
icon: 'nb-locked',
link: '/pages/completed',
},
// {
// title: 'Payment-Gateway',
// icon: 'nb-tables',
// children: [
// {
// title: 'Add Booking',
// link: '/pages/payment-gateway/Add Booking',
// },
// {
// title: 'Transactions',
// link: '/pages/payment-gateway/Transactions',
// }
// ],
// },
// {
// title: 'Wallet',
// icon: 'nb-tables',
// children: [
// {
// title: 'Add Credit',
// link: '/pages/wallet/add-credit',
// },
// {
// title: 'Account Statement',
// link: '/pages/wallet/account-statement',
// },
// {
// title: 'Pending Approval',
// link: '/pages/wallet/pending-approval',
// }
// ],
// },
{
title: 'Reports',
icon: 'nb-tables',
link: '/pages/reports'
},
// {
// title: 'Unconfirmed',
// icon: 'nb-tables',
// link: '/pages/unconfirmed',
// },
{
title: 'Escalation Matrix',
icon: 'nb-tables',
link: '/pages/matrix',
}
];
|
#!/usr/bin/env bash
# Copyright 2013-2018 Fabian Groffen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXEC=../relay
SMEXEC=../sendmetric
EFLAGS="-Htest.hostname -t"
DIFF="diff -Nu"
POST="cat"
CNFCLN=( sed -e '/^configuration:/,/^parsed configuration follows:/d'
-e '/starting carbon-c-relay v/d'
-e 's/^\[[0-9][0-9\-]* [0-9][0-9:]*\] //'
-e 's/_stub_[0-9a-fx][0-9a-fx]*__/_stub_0xc0d3__/')
export DYLD_FORCE_FLAT_NAMESPACE=1
export DYLD_INSERT_LIBRARIES=../.libs/libfaketime.dylib
export LD_PRELOAD=../.libs/libfaketime.so
run_configtest() {
local eflags="$1"
local test=${2%.*}
local conf="${test}.conf"
local tdiff
[[ -e ${conf} ]] || conf="../issues/${conf}"
echo -n "${test}: "
tdiff=$(cat ${2} \
| ( ${EXEC} ${eflags} -f "${conf}" ; trigger_bash_segv_print=$?) 2>&1 \
| "${CNFCLN[@]}" \
| ${DIFF} "${test}.out" - --label "${test}.out" --label "${test}.out" \
| ${POST} \
; exit ${PIPESTATUS[3]})
if [[ $? == 0 ]] ; then
echo "PASS"
return 0
else
echo "FAIL"
echo "${tdiff}"
return 1
fi
}
run_servertest() {
local mode=SINGLE
local tmpdir=$(mktemp -d)
local output=
local pidfile=
local unixsock=
local port=
local output2=
local pidfile2=
local port2=
local dataout="${tmpdir}"/data.out
local confarg=$1
local payload=$2
local payloadexpect="${payload}out"
local test=${confarg%.*}
local confarg2=${test}-2.${confarg##*.}
[[ -e ${confarg2} ]] && mode=DUAL
local start_server_result
local start_server_lastport=3020 # TODO
start_server() {
local id=$1
local remoteport=$2
local confarg=$3
# determine a free port to use
local port=${start_server_lastport} # TODO
: $((start_server_lastport++)) # TODO
local unixsock="${tmpdir}/sock.${port}"
local cert="${test}.cert"
local ca="${test}.cert"
local conf="${tmpdir}"/conf
local output="${tmpdir}"/relay-${id}.out
local pidfile="${tmpdir}"/pidfile-${id}
local relayargs=
[[ -e ${test}.args ]] && relayargs=$(< ${test}.args)
[[ -e ${test}-${id}.args ]] && relayargs=$(< ${test}-${id}.args)
[[ -e ${ca} ]] && relayargs+=" -C ${ca}"
# write config file with known listener
{
echo "# relay ${id}, mode ${mode}"
echo "listen type linemode"
echo " ${unixsock} proto unix"
echo " ;"
echo
echo "cluster default"
echo " file ${dataout}"
echo " ;"
echo
if [[ -n ${relayargs} ]] ; then
echo "# extra arguments given to ${EXEC}:"
echo "# ${relayargs}"
echo
fi
echo "# contents from ${confarg} below this line"
sed \
-e "s/@port@/${port}/g" \
-e "s/@remoteport@/${remoteport}/g" \
-e "s/@cert@/${cert}/g" \
"${confarg}"
} > "${conf}"
${EXEC} -d -w 1 -f "${conf}" -Htest.hostname -s -D \
-l "${output}" -P "${pidfile}" ${relayargs}
if [[ $? != 0 ]] ; then
# hmmm
echo "failed to start relay ${id} in ${PWD}:"
echo ${EXEC} -d -f "${conf}" -Htest.hostname -s -D -l \
"${output}" -P "${pidfile}" ${relayargs}
echo "=== ${conf} ==="
cat "${conf}"
echo "=== ${output} ==="
cat "${output}"
return 1
fi
echo -n "relay ${id} "
start_server_result=( ${port} ${unixsock} ${pidfile} ${output} )
}
echo -n "${test}: "
start_server 1 "" ${confarg} || return 1
port=${start_server_result[0]}
unixsock=${start_server_result[1]}
pidfile=${start_server_result[2]}
output=${start_server_result[3]}
if [[ ${mode} == DUAL ]] ; then
if ! start_server 2 ${port} ${confarg2} ; then
kill -KILL $(< ${pidfile})
return 1
fi
port2=${start_server_result[0]}
unixsock=${start_server_result[1]}
pidfile2=${start_server_result[2]}
output2=${start_server_result[3]}
fi
${SMEXEC} "${unixsock}" < "${payload}"
if [[ $? != 0 ]] ; then
# hmmm
echo "failed to send payload"
return 1
fi
# allow everything to be processed
sleep 2
# kill and wait for relay to come down
local pids=$(< "${pidfile}")
[[ ${mode} == DUAL ]] && pids+=" $(< "${pidfile2}")"
kill ${pids}
local i=10
while [[ ${i} -gt 0 ]] ; do
ps -p ${pids} >& /dev/null || break
echo -n "."
sleep 1
: $((i--))
done
# if it didn't yet die, make it so
[[ ${i} == 0 ]] && kill -KILL ${pids}
# add errors to the mix
sed -n 's/^.*(ERR)/relay 1:/p' ${output} >> "${dataout}"
[[ -n ${output2} ]] && \
sed -n 's/^.*(ERR)/relay 2:/p' ${output2} >> "${dataout}"
# compare some notes
local ret
tdiff=$(${DIFF} "${payloadexpect}" "${dataout}" \
--label "${payloadexpect}" --label "${payloadexpect}" \
| ${POST} \
; exit ${PIPESTATUS[0]})
if [[ $? == 0 ]] ; then
echo "PASS"
ret=0
else
echo "FAIL"
echo "${tdiff}"
ret=1
fi
if [[ -n ${RUN_TEST_DROP_IN_SHELL} ]] ; then
echo "dropping shell in ${tmpdir}"
( unset DYLD_FORCE_FLAT_NAMESPACE DYLD_INSERT_LIBRARIES LD_PRELOAD;
cd ${tmpdir} && ${SHELL} )
fi
# cleanup
rm -Rf "${tmpdir}"
return ${ret}
}
while [[ -n $1 ]] ; do
case "$1" in
--approve|-a)
POST="patch"
;;
--)
shift
break
;;
*)
break
;;
esac
shift
done
tstcnt=0
tstfail=0
for t in $* ; do
if [[ -e ${t}.tst ]] ; then
: $((tstcnt++))
run_configtest "${EFLAGS}" "${t}.tst" || : $((tstfail++))
elif [[ -e ${t}.dbg ]] ; then
: $((tstcnt++))
run_configtest "${EFLAGS} -d" "${t}.dbg" || : $((tstfail++))
elif [[ -e ${t}.stst ]] ; then
: $((tstcnt++))
run_servertest "${t}.stst" "${t}.payload" || : $((tstfail++))
fi
done
echo "Ran ${tstcnt} tests with ${tstfail} failing"
exit ${tstfail}
|
cd build/linux-arm-debug/bin
scp goahead-test libgo.so root@192.168.3.100:/run/media/mmcblk0p1/goahead
|
<filename>packages/web/src/components/Post/PostForm/PostFormPage.tsx<gh_stars>0
import React, { FC, useContext, useState, useEffect } from 'react';
import { RouteComponentProps } from 'react-router-dom';
import axios from 'axios';
import PostFormValues from 'types/PostFormValues';
import PopulatedPost from '@rddt/common/types/PopulatedPost';
import RootStoreContext from 'stores/RootStore/RootStore';
import PostForm from './PostForm';
interface MatchProps {
communityId?: string;
postId?: string;
}
const PostFormPage: FC<RouteComponentProps<MatchProps>> = ({
match,
history,
}) => {
const { token } = useContext(RootStoreContext).authStore.authState;
const [formType, setFormType] = useState<'Add' | 'Edit'>('Add');
const [editPost, setEditPost] = useState<PopulatedPost>();
useEffect(() => {
if (history.location.state && history.location.state.post) {
setEditPost(history.location.state.post);
setFormType('Edit');
}
}, []);
const addImagePostHandler = async (formData: any) => {
try {
console.log(formData, 'Axios');
const request = await axios.post(
`http://localhost:8080/communities/${communityId}/posts`,
formData,
{
headers: {
Authorization: 'bearer ' + token,
'Content-Type': 'multipart/form-data',
},
},
);
const { post } = request.data.data;
console.log(post);
} catch (err) {
console.log(err);
}
};
const addPostHandler = async (values: PostFormValues) => {
try {
const request = await axios.post(
`http://localhost:8080/communities/${communityId}/posts`,
values,
{
headers: { Authorization: 'bearer ' + token },
},
);
const { post } = request.data.data;
console.log(post);
} catch (err) {
console.log(err);
}
};
const editPostHandler = async (values: PostFormValues) => {
try {
if (!editPost) {
throw '';
}
const request = await axios.patch(
`http://localhost:8080/posts/${editPost._id}`,
values,
{
headers: { Authorization: 'bearer ' + token },
},
);
const { post } = request.data.data;
} catch (err) {
console.log(err);
}
};
const cancelHandler = () => {};
const { communityId } = match.params;
return (
<div
style={{
width: '100vw',
height: '90vh',
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
}}
>
<PostForm
cancelHandler={cancelHandler}
type={formType}
addPostHandler={addPostHandler}
editPostHandler={editPostHandler}
editPost={editPost}
addImagePostHandler={addImagePostHandler}
/>
</div>
);
};
export default PostFormPage;
|
<gh_stars>100-1000
"""
Code illustration: 1.01
Your first GUI application - the top level window
Tkinter GUI Application Development Blueprints
"""
import tkinter as tk
root = tk.Tk()
root.mainloop()
|
<reponame>khepherer/java_lleida_01_06_2017
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package handlers;
import java.util.Collections;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.namespace.QName;
import javax.xml.soap.SOAPBody;
import javax.xml.soap.SOAPException;
import javax.xml.soap.SOAPMessage;
import javax.xml.ws.handler.MessageContext;
import javax.xml.ws.handler.soap.SOAPHandler;
import javax.xml.ws.handler.soap.SOAPMessageContext;
import org.w3c.dom.Node;
/**
*
* @author usuario
*/
public class EjemploMessageHandler implements SOAPHandler<SOAPMessageContext> {
private static final Logger LOG = Logger.getLogger(EjemploMessageHandler.class.getName());
@Override
public boolean handleMessage(SOAPMessageContext contexto) {
try {
final SOAPMessage mensaje = contexto.getMessage();
final SOAPBody cuerpo = mensaje.getSOAPBody();
final String nombreLocal = cuerpo.getFirstChild().getLocalName();
if ("fechaResponse".equals(nombreLocal)) {
final Node nodoResponse = cuerpo.getFirstChild();
final Node nodoReturn = nodoResponse.getFirstChild();
final Node nodoFecha = nodoReturn.getFirstChild();
LOG.log(Level.INFO, "Nombre local: {0}. Fecha: {1}", new Object[]{nombreLocal, nodoFecha.getNodeValue()});
}
return true;
} catch (SOAPException ex) {
LOG.log(Level.SEVERE, null, ex);
return false;
}
}
@Override
public Set<QName> getHeaders() {
return Collections.EMPTY_SET;
}
@Override
public boolean handleFault(SOAPMessageContext messageContext) {
return true;
}
@Override
public void close(MessageContext context) {
}
}
|
<gh_stars>0
export const apiServer = '172.16.31.10'
export const apiPort = 8000
|
#! /usr/bin/env bash
# Copyright (c) 2018 Herbert Shen <ishbguy@hotmail.com> All Rights Reserved.
# Released under the terms of the MIT License.
export BACON_UTILS_ABS_SRC="$(readlink -f "${BASH_SOURCE[0]}")"
export BACON_UTILS_ABS_DIR="$(dirname "$BACON_UTILS_ABS_SRC")"
# global variable interfaces
declare -g BACON_NO_ENSURE=""
declare -g BACON_DEBUG=""
declare -ga BACON_LIB_DIR=()
declare -gA BACON_COLOR=()
bacon_has_map() {
local -n map="$1"; shift
[[ -n $1 && -n ${map[$1]} ]]
}
# bacon_color_init [ --setaf | --setab | --misc ] var
# Assigns the selected set of escape mappings to the given associative array names.
bacon_color_init() {
local -a fg_clrs bg_clrs msc
local x
fg_clrs=(black red green yellow blue magenta cyan grey darkgrey ltred ltgreen ltyellow ltblue ltmagenta ltcyan white)
bg_clrs=($(IFS=,; eval "echo bg_{${fg_clrs[*]}}"))
msc=(sgr0 bold dim smul blink rev invis)
while ! ${2:+false}; do
case ${1#--} in
setaf)
for x in "${!fg_clrs[@]}"; do
eval "$2"'[${fg_clrs[x]}]=$(tput "${1#--}" "$x")'
done
eval "$2[default]=[39m"
;;
setab)
for x in "${!bg_clrs[@]}"; do
eval "$2"'[${bg_clrs[x]}]=$(tput "${1#--}" "$x")'
done
eval "$2[bg_default]=[49m"
;;
misc)
for x in "${msc[@]}"; do
eval "$2"'[$x]=$(tput "$x")'
done
eval "$2[reset]=[0m"
eval "$2[none]=[0m"
;;
*)
return 1
esac
shift 2
done
}
bacon_color_init --setaf BACON_COLOR --setab BACON_COLOR --misc BACON_COLOR
bacon_set_color() {
local color c
for c in "$@"; do
bacon_has_map BACON_COLOR "$c" && color+="${BACON_COLOR[$c]}"
done
printf '%s' "$color"
}
bacon_putc() {
local color
while bacon_has_map BACON_COLOR "$1"; do
color+="${BACON_COLOR[$1]}"; shift
done
color="${color:-${BACON_COLOR[default]}}"
local IFS=' '
printf "${color}%s${BACON_COLOR[reset]}\n" "$*"
}
bacon_puts() {
local IFS=' '
printf "%s\n" "$*"
}
bacon_debug() {
[[ -z $BACON_DEBUG ]] || bacon_puts "[DEBUG]" "$@" >&2
}
bacon_info() {
bacon_putc yellow "[INFO]" "$@" >&2
}
bacon_warn() {
bacon_putc red "[WARN]" "$@" >&2
return 1
}
bacon_die() {
bacon_putc red "[ERROR]" "$@" >&2
exit 1
}
bacon_defined() {
local usage="Usage: ${FUNCNAME[0]} <var-name>"
bacon_ensure "[[ $# == 1 && -n $1 ]]" "$usage"
declare -p "$1" &>/dev/null
}
bacon_definedf() {
local usage="Usage: ${FUNCNAME[0]} <func-name>"
bacon_ensure "[[ $# == 1 && -n $1 ]]" "$usage"
declare -f "$1" &>/dev/null
}
bacon_typeof() {
local usage="Usage: ${FUNCNAME[0]} <string>"
bacon_ensure "[[ $# == 1 && -n $1 ]]" "$usage"
# shellcheck disable=SC2034
if ! bacon_defined BACON_TYPE; then
declare -gA BACON_TYPE=()
BACON_TYPE[-]="normal"
BACON_TYPE[a]="array"
BACON_TYPE[A]="map"
BACON_TYPE[i]="integer"
BACON_TYPE[l]="lower"
BACON_TYPE[u]="upper"
BACON_TYPE[n]="reference"
BACON_TYPE[x]="export"
BACON_TYPE[f]="function"
# BACON_TYPE[r]="readonly"
# BACON_TYPE[g]="global"
fi
# [[ $# == 1 && -n $1 ]] || return 1
if declare -p "$1" &>/dev/null; then
local IFS=' '
# shellcheck disable=SC2207
local -a out=($(declare -p "$1"))
local type="${out[1]}"
[[ $type =~ -([-aAilunx]) ]]
echo "${BACON_TYPE[${BASH_REMATCH[1]}]}"
elif declare -F "$1" &>/dev/null; then
echo "function"
# check for alias, keyword, builtin and file|cmd
elif type -t "$1" &>/dev/null; then
type -t "$1"
else
return 1
fi
return 0
}
bacon_tmpfd() {
basename <(:)
}
bacon_is_running() {
ps -p "$1" &>/dev/null
}
bacon_is_sourced() {
[[ -n ${FUNCNAME[1]} && ${FUNCNAME[1]} != "main" ]]
}
bacon_is_array() {
local -a def=($(declare -p "$1" 2>/dev/null))
[[ ${def[1]} =~ a ]]
}
bacon_is_map() {
local -a def=($(declare -p "$1" 2>/dev/null))
[[ ${def[1]} =~ A ]]
}
bacon_has_cmd() {
command -v "$1" &>/dev/null
}
bacon_is_exist() {
[[ -e $1 ]] &>/dev/null
}
bacon_ensure() {
# shellcheck disable=SC2015
[[ -z $BACON_NO_ENSURE ]] || return 0
local cmd="$1"; shift
local IFS=' '
# shellcheck disable=SC2207
local -a info=($(caller 0))
local info_str="${info[2]}:${info[0]}:${info[1]}"
if ! (eval "$cmd" &>/dev/null); then
bacon_die "$info_str: ${FUNCNAME[0]} '$cmd' failed." "$@"
fi
}
bacon_datecmp() {
echo "$(($(date -d "$1" +%s) - $(date -d "$2" +%s)))"
}
bacon_encode() {
if [[ $# == 0 ]]; then
sed -r 's/[^[:alnum:]]/_/g'
else
local IFS=' '
echo "${*//[^[:alnum:]]/_}"
fi
}
bacon_tolower() {
if [[ $# == 0 ]]; then
tr '[:upper:]' '[:lower:]'
else
local IFS=' '
echo "${*,,}"
fi
}
bacon_toupper() {
if [[ $# == 0 ]]; then
tr '[:lower:]' '[:upper:]'
else
local IFS=' '
echo "${*^^}"
fi
}
bacon_pargs() {
local usage="Usage: ${FUNCNAME[0]} <opt-map> <arg-map> <optstring> [args...]"
bacon_ensure "[[ $# -ge 3 ]]" "$usage"
bacon_ensure "[[ -n $1 && -n $2 && -n $3 ]]" "$usage"
bacon_ensure "bacon_is_map $1 && bacon_is_map $2" "$usage"
local -n __opt="$1"
local -n __arg="$2"
local optstr="$3"
shift 3
OPTIND=1
while getopts "$optstr" opt; do
if [[ $opt == ":" || $opt == "?" ]]; then
# the HELP must be initialized by caller
bacon_warn "$HELP" || return 1
fi
# shellcheck disable=SC2034
__opt[$opt]=1
# shellcheck disable=SC2034
__arg[$opt]="$OPTARG"
done
}
bacon_popts() {
local usage="Usage: ${FUNCNAME[0]} <opt-map> <arg-map> <remain-args-array> <optstr-map> [args...]"
bacon_ensure "[[ $# -ge 4 ]]" "$usage"
bacon_ensure "bacon_is_map $1 && bacon_is_map $2 && bacon_is_map $4" "$usage"
bacon_ensure "bacon_is_array $3" "$usage"
local -n __opts="$1"
local -n __args="$2"
local -n __rargs="$3"
local -n __optstr="$4"
shift 4
local -a soa=() loa=()
local -A som=() lom=()
local sos los tmp o m n
# construct optstrings
for o in "${!__optstr[@]}"; do
if [[ $o =~ ^(:)?([a-zA-Z0-9]([^|]*)?)$ ]]; then
n="${BASH_REMATCH[1]}"; m="${BASH_REMATCH[2]}"
if [[ $m =~ ^[a-zA-Z0-9]$ ]]; then
soa+=("$m$n"); som["$m"]="x$n"
else
loa+=("$m$n"); lom["$m"]="x$n"
fi
elif [[ $o =~ ^(:)?([a-zA-Z0-9])\|([a-zA-Z0-9].*)$ ]]; then
soa+=("${BASH_REMATCH[2]}${BASH_REMATCH[1]}")
som["${BASH_REMATCH[2]}"]="x${BASH_REMATCH[1]}"
loa+=("${BASH_REMATCH[3]}${BASH_REMATCH[1]}")
lom["${BASH_REMATCH[3]}"]="x${BASH_REMATCH[1]}"
fi
done
sos="$(IFS= ; echo "${soa[*]}")"
los="$(IFS=,; echo "${loa[*]}")"
# parse options
tmp="$(getopt -o "$sos" --long "$los" -- "$@")"
[[ $? == 0 ]] || bacon_warn "getopt error..." || return 1
# quote tmp for prevent eating empty string
eval set -- "$tmp"
while true; do
if [[ $1 =~ ^--?([a-zA-Z].*)$ ]]; then
if [[ -n "${som[${BASH_REMATCH[1]}]}" || -n "${lom[${BASH_REMATCH[1]}]}" ]]; then
if [[ "${som[${BASH_REMATCH[1]}]}" != "x" && "${lom[${BASH_REMATCH[1]}]}" != "x" ]]; then
__opts[${BASH_REMATCH[1]}]=1
__args[${BASH_REMATCH[1]}]="$2"
shift 2; continue
else
__opts[${BASH_REMATCH[1]}]=1
shift; continue
fi
fi
elif [[ $1 == '--' ]]; then
shift; break
else
bacon_warn "Internal error..." || return 1
fi
done
__rargs=("$@")
}
bacon_popts_help() {
local usage="Usage: ${FUNCNAME[0]} <optstr-map> [args...]"
bacon_ensure "[[ $# -ge 1 ]] && bacon_is_map $1" "$usage"
local -n __optstr="$1"; shift
local -A om=() am=()
local o m a len
for o in "${!__optstr[@]}"; do
if [[ $o =~ ^(:)?([a-zA-Z0-9]([^|]*)?)$ ]]; then
m="${BASH_REMATCH[2]}"
[[ -n ${BASH_REMATCH[1]} ]] && a="<arg>" || a=""
if [[ $m =~ ^[a-zA-Z0-9]$ ]]; then
om["-$m"]="${__optstr[$o]}"
am["-$m"]="$a"
else
om[" --$m"]="${__optstr[$o]}"
am[" --$m"]="$a"
fi
elif [[ $o =~ ^(:)?([a-zA-Z0-9])\|([a-zA-Z0-9].*)$ ]]; then
[[ -n ${BASH_REMATCH[1]} ]] && a="<arg>" || a=""
om["-${BASH_REMATCH[2]} | --${BASH_REMATCH[3]}"]="${__optstr[$o]}"
am["-${BASH_REMATCH[2]} | --${BASH_REMATCH[3]}"]="$a"
fi
done
len=0
for o in "${!om[@]}"; do
(( ${#o} > len )) && len="${#o}"
done
local help=$(cat <<EOF
$(if [[ $# -gt 0 ]]; then bacon_puts "$@"; fi)
Options:
$(for o in "${!om[@]}"; do printf "\t%-${len}s%6s\t%s\n" "$o" "${am[$o]}" "${om[$o]}"; done)
EOF
)
echo "$help"
}
bacon_require_base() {
local usage="Usage: ${FUNCNAME[0]} <func> <msg> [args...]"
bacon_ensure "[[ $# -gt 2 ]]" "$usage"
bacon_ensure "bacon_definedf $1" "$usage"
local -a miss
local cmd="$1"
local msg="$2"
shift 2
local obj
for obj in "$@"; do
"$cmd" "$obj" || miss+=("$obj")
done
[[ ${#miss[@]} -eq 0 ]] || bacon_die "$msg: ${miss[*]}."
}
bacon_require_var() {
bacon_require_base bacon_defined "You need to define vars" "$@"
}
bacon_require_func() {
bacon_require_base bacon_definedf "You need to define funcs" "$@"
}
bacon_require_cmd() {
bacon_require_base bacon_has_cmd "You need to install cmds" "$@"
}
bacon_require() {
bacon_require_base bacon_is_exist "No such files or dirs" "$@"
}
bacon_abspath() {
readlink -f "$1"
}
bacon_self() {
bacon_abspath "${BASH_SOURCE[1]}"
}
bacon_lib() {
bacon_abspath "${BASH_SOURCE[0]%/*}"
}
bacon_load() {
local usage="Usage: ${FUNCNAME[0]} <filename>"
bacon_ensure "[[ $# == 1 && -n $1 ]]" "$usage"
# shellcheck disable=SC2155
bacon_defined __BACON_LOADED_FILE || declare -gA __BACON_LOADED_FILE=()
local dir
for dir in "${BACON_LIB_DIR[@]}"; do
# shellcheck disable=SC1090,SC2034
if [[ -f $dir/$1 ]]; then
if ! bacon_has_map __BACON_LOADED_FILE "$dir/$1"; then
source "$dir/$1" && __BACON_LOADED_FILE["$dir/$1"]="$dir/$1"
return 0
fi
fi
done
return 1
}
bacon_push() {
local usage="Usage: ${FUNCNAME[0]} <array> [args..]"
bacon_ensure "(($# >= 2))" "$usage"
bacon_ensure "[[ $(bacon_typeof "$1") == array ]]" "$usage"
local -n __array=$1; shift
__array+=("$@")
}
bacon_pop() {
local usage="Usage: ${FUNCNAME[0]} <array>"
bacon_ensure "(($# == 1))" "$usage"
bacon_ensure "[[ $(bacon_typeof "$1") == array ]]" "$usage"
local -n __array=$1; shift
local last=""
[[ ${#__array[@]} == 0 ]] && return 1
last="${__array[$((${#__array[@]}-1))]}"
__array=("${__array[@]:0:$((${#__array[@]}-1))}")
echo "$last"
}
bacon_unshift() {
local usage="Usage: ${FUNCNAME[0]} <array> [args..]"
bacon_ensure "(($# >= 2))" "$usage"
bacon_ensure "[[ $(bacon_typeof "$1") == array ]]" "$usage"
local -n __array=$1; shift
__array=("$@" "${__array[@]}")
}
bacon_shift() {
local usage="Usage: ${FUNCNAME[0]} <array>"
bacon_ensure "(($# == 1))" "$usage"
bacon_ensure "[[ $(bacon_typeof "$1") == array ]]" "$usage"
local -n __array=$1; shift
local last=""
[[ ${#__array[@]} == 0 ]] && return 1
last="${__array[0]}"
__array=("${__array[@]:1}")
echo "$last"
}
bacon_filter() {
local usage="Usage: ${FUNCNAME[0]} <out-array> <pattern> <args...>"
bacon_ensure "(($# >= 2))" "$usage"
bacon_ensure "[[ $(bacon_typeof "$1") == array ]]" "$usage"
local -n __array="$1"
local p="$2"
shift 2
local arg
for arg in "$@"; do
[[ $arg =~ $p ]] && __array+=("$arg") || true
done
}
bacon_map() {
local usage="Usage: ${FUNCNAME[0]} <out> <func> <args...>"
bacon_ensure "(($# >= 2))" "$usage"
bacon_ensure "[[ $(bacon_typeof "$1") == array ]]" "$usage"
bacon_ensure "[[ $(bacon_typeof "$2") == function ]]" "$usage"
local -n __array="$1"
local func="$2"
shift 2
local i
__array+=("$@")
for i in "${__array[@]}"; do
"$func" "__array[$i]"
done
}
# alias @export='bacon_export || return 0'
bacon_export() {
local src="$(bacon_abspath "${BASH_SOURCE[1]}")"
local dir="$(dirname "$src")"
local -u ns="${1:-$(basename "$src" .bash | bacon_encode)}"
# source export guard
# eval "[[ -z \$BACON_SOURCE_${ns}_ABS_SRC ]]" || return 1
eval "export BACON_SOURCE_${ns}_ABS_SRC=$src"
eval "export BACON_SOURCE_${ns}_ABS_DIR=$dir"
}
bacon_addprefix() {
local usage="Usage: ${FUNCNAME[0]} <prefix> [args..]"
bacon_ensure "(($# >=1 ))" "$usage"
local p=$1; shift
[[ $# == 0 ]] && return 0
case $p in
[\(\)]) p="\\$p" ;;
[\<\>]) p="\\$p" ;;
\") p='\"' ;;
\') p="\\'" ;;
esac
local IFS=,
if [[ $# == 1 ]]; then
echo "${p}$1"
else
eval echo "${p}{$*}"
fi
}
bacon_addsuffix() {
local usage="Usage: ${FUNCNAME[0]} <suffix> [args..]"
bacon_ensure "(($# >=1 ))" "$usage"
local s=$1; shift
[[ $# == 0 ]] && return 0
case $s in
[\(\)]) s="\\$s" ;;
[\<\>]) s="\\$s" ;;
\") s='\"' ;;
\') s="\\'" ;;
esac
local IFS=,
if [[ $# == 1 ]]; then
echo "$1${s}"
else
eval echo "{$*}${s}"
fi
}
bacon_wrap() {
local usage="Usage: ${FUNCNAME[0]} <suffix> [args..]"
bacon_ensure "(($# >=1 ))" "$usage"
local w=$1; shift
local w1 w2
case $w in
[\(\)]) w1='\('; w2='\)' ;;
[\{\}]) w1='{'; w2='}' ;;
[\[\]]) w1='['; w2=']' ;;
[\<\>]) w1='\<'; w2='\>' ;;
\") w1='\"'; w2='\"' ;;
\') w1="\\'"; w2="\\'" ;;
*) w1="$w"; w2="$w" ;;
esac
[[ $# == 0 ]] && return 0
local IFS=,
if [[ $# == 1 ]]; then
echo "${w1}$1${w2}"
else
eval echo "${w1}{$*}${w2}"
fi
}
bacon_quote() { printf "'%s'" "$*"; }
# vim:set ft=sh ts=4 sw=4:
|
<reponame>OSADP/C2C-RI
/*
Jameleon - An automation testing tool..
Copyright (C) 2003-2006 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.jameleon.plugin.junit;
import net.sf.jameleon.MockSessionTag;
/**
* Does a simple boolean test -- used for unit testing.
* @jameleon.function name="junit-test-begin-session" type="validation"
*/
public class TestSessionTagBeginSession extends JUnitFunctionTag {
/**
* @jameleon.attribute
*/
private boolean shouldStart;
/**
* @jameleon.attribute
*/
private boolean fail;
public void testBlock(){
MockSessionTag mst = (MockSessionTag)findAncestorWithClass(MockSessionTag.class);
String msg;
if (shouldStart) {
msg = "should have started.";
}else{
msg = "should not have started.";
}
assertEquals(msg, shouldStart, mst.applicationStarted);
assertFalse("you told me to fail", fail);
}
}
|
import { Rooms, Messages } from '../../../../models';
import { authenticated } from '../../helpers/authenticated';
import schema from '../../schemas/messages/messages.graphqls';
const resolver = {
Query: {
messages: authenticated((root, args, { user }) => {
const messagesQuery = {};
const messagesOptions = {
sort: { ts: -1 },
};
const channelQuery = {};
const isPagination = !!args.cursor || args.count > 0;
let cursor;
if (args.channelId) {
// channelId
channelQuery._id = args.channelId;
} else if (args.directTo) {
// direct message where directTo is a user id
channelQuery.t = 'd';
channelQuery.usernames = { $all: [args.directTo, user.username] };
} else if (args.channelName) {
// non-direct channel
channelQuery.t = { $ne: 'd' };
channelQuery.name = args.channelName;
} else {
console.error('messages query must be called with channelId or directTo');
return null;
}
const channel = Rooms.findOne(channelQuery);
let messagesArray = [];
if (channel) {
// cursor
if (isPagination && args.cursor) {
const cursorMsg = Messages.findOne(args.cursor, { fields: { ts: 1 } });
messagesQuery.ts = { $lt: cursorMsg.ts };
}
// search
if (typeof args.searchRegex === 'string') {
messagesQuery.msg = {
$regex: new RegExp(args.searchRegex, 'i'),
};
}
// count
if (isPagination && args.count) {
messagesOptions.limit = args.count;
}
// exclude messages generated by server
if (args.excludeServer === true) {
messagesQuery.t = { $exists: false };
}
// look for messages that belongs to specific channel
messagesQuery.rid = channel._id;
const messages = Messages.find(messagesQuery, messagesOptions);
messagesArray = messages.fetch();
if (isPagination) {
// oldest first (because of findOne)
messagesOptions.sort.ts = 1;
const firstMessage = Messages.findOne(messagesQuery, messagesOptions);
const lastId = (messagesArray[messagesArray.length - 1] || {})._id;
cursor = !lastId || lastId === firstMessage._id ? null : lastId;
}
}
return {
cursor,
channel,
messagesArray,
};
}),
},
};
export {
schema,
resolver,
};
|
#!/bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]} " )" && pwd )
cd $DIR
#get number of lines of code in src directory that isn't external
find ../src -type f ! -name '.DS_Store' \
! -path '*/docs/*' \
| xargs wc -l
exit
|
<filename>open-sphere-base/core/src/test/java/io/opensphere/core/util/collections/petrifyable/PetrifyableTShortArrayListTest.java<gh_stars>10-100
package io.opensphere.core.util.collections.petrifyable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.junit.Assert;
import org.junit.Test;
import gnu.trove.function.TShortFunction;
import gnu.trove.iterator.TShortIterator;
import gnu.trove.list.TShortList;
import gnu.trove.list.array.TShortArrayList;
import gnu.trove.procedure.TShortProcedure;
/**
* Test for {@link PetrifyableTShortArrayList}.
*/
@SuppressWarnings("PMD.AvoidUsingShortType")
public class PetrifyableTShortArrayListTest
{
/** A petrified list for testing. */
private static final PetrifyableTShortArrayList PETRIFIED_LIST = getPetrifiedList();
/** A short for testing. */
private static final short TEST_SHORT = (short)10;
/** A collection of Shorts for testing. */
private static final Collection<Short> TEST_SHORT_COLLECTION = Collections.singleton(Short.valueOf(TEST_SHORT));
/** A short array for testing. */
private static final short[] TEST_SHORT_ARRAY = new short[] { TEST_SHORT };
/** A short array list for testing. */
private static final TShortArrayList TEST_SHORT_ARRAY_LIST = TShortArrayList.wrap(TEST_SHORT_ARRAY);
/**
* Get a petrified list.
*
* @return The list
*/
private static PetrifyableTShortArrayList getPetrifiedList()
{
int capacity = 20;
PetrifyableTShortArrayList list = new PetrifyableTShortArrayList(capacity);
for (int index = 0; index < capacity; ++index)
{
list.add((short)index);
}
Assert.assertFalse(list.isPetrified());
list.petrify();
Assert.assertTrue(list.isPetrified());
return list;
}
/** General test. */
@Test
public void test()
{
int capacity = 20;
short offset = (short)7;
PetrifyableTShortArrayList list = new PetrifyableTShortArrayList(capacity);
for (int index = 0; index < capacity; ++index)
{
list.add((short)(index + offset));
}
Assert.assertFalse(list.isPetrified());
list.petrify();
Assert.assertTrue(list.isPetrified());
short[] arr1 = list.toArray();
short[] arr2 = list.toArray();
Assert.assertNotSame(arr1, arr2);
Assert.assertEquals(capacity, arr1.length);
Assert.assertTrue(Arrays.equals(arr1, arr2));
for (int index = 0; index < capacity; ++index)
{
Assert.assertEquals(index + offset, arr1[index]);
}
Assert.assertEquals(TEST_SHORT - offset, list.binarySearch(TEST_SHORT));
Assert.assertEquals(TEST_SHORT - offset, list.indexOf(TEST_SHORT));
Assert.assertEquals(TEST_SHORT - offset, list.lastIndexOf(TEST_SHORT));
Assert.assertTrue(list.contains(TEST_SHORT));
Assert.assertTrue(list.containsAll(TEST_SHORT_ARRAY));
Assert.assertTrue(list.containsAll(TEST_SHORT_ARRAY_LIST));
list.forEach(new TShortProcedure()
{
@Override
public boolean execute(short value)
{
return true;
}
});
list.forEachDescending(new TShortProcedure()
{
@Override
public boolean execute(short value)
{
return true;
}
});
Assert.assertEquals(offset, list.get(0));
TShortList grepResult = list.grep(new TShortProcedure()
{
@Override
public boolean execute(short value)
{
return value == TEST_SHORT;
}
});
Assert.assertEquals(1, grepResult.size());
Assert.assertEquals(TEST_SHORT, grepResult.get(0));
TShortList grepResult2 = list.inverseGrep(new TShortProcedure()
{
@Override
public boolean execute(short value)
{
return value == TEST_SHORT;
}
});
Assert.assertEquals(capacity - 1, grepResult2.size());
Assert.assertFalse(grepResult2.contains(TEST_SHORT));
Assert.assertFalse(list.isEmpty());
for (TShortIterator iter = list.iterator(); iter.hasNext();)
{
iter.next();
}
Assert.assertEquals((short)(capacity - 1 + offset), list.max());
Assert.assertEquals(offset, list.min());
Assert.assertEquals(5, list.subList(0, 5).size());
Assert.assertEquals((short)(190 + offset * capacity), list.sum());
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testAddAfterPetrify1()
{
PETRIFIED_LIST.add(TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testAddAfterPetrify2()
{
PETRIFIED_LIST.add(TEST_SHORT_ARRAY);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testAddAfterPetrify3()
{
PETRIFIED_LIST.add(TEST_SHORT_ARRAY, 0, 1);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testAddAllAfterPetrify1()
{
PETRIFIED_LIST.addAll(TEST_SHORT_ARRAY);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testAddAllAfterPetrify2()
{
PETRIFIED_LIST.addAll(TEST_SHORT_COLLECTION);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testAddAllAfterPetrify3()
{
PETRIFIED_LIST.addAll(TEST_SHORT_ARRAY_LIST);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testClearAfterPetrify()
{
PETRIFIED_LIST.clear();
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testFillAfterPetrify1()
{
PETRIFIED_LIST.fill(TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testFillAfterPetrify2()
{
PETRIFIED_LIST.fill(0, 1, TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testInsertAfterPetrify1()
{
PETRIFIED_LIST.insert(0, TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testInsertAfterPetrify2()
{
PETRIFIED_LIST.insert(0, TEST_SHORT_ARRAY);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testInsertAfterPetrify3()
{
PETRIFIED_LIST.insert(0, TEST_SHORT_ARRAY, 0, 1);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testIteratorRemoveAfterPetrify()
{
PETRIFIED_LIST.iterator().remove();
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRemoveAfterPetrify1()
{
PETRIFIED_LIST.remove(TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRemoveAfterPetrify2()
{
PETRIFIED_LIST.remove(0, 1);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRemoveAllAfterPetrify1()
{
PETRIFIED_LIST.removeAll(TEST_SHORT_ARRAY);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRemoveAllAfterPetrify2()
{
PETRIFIED_LIST.removeAll(TEST_SHORT_COLLECTION);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRemoveAllAfterPetrify3()
{
PETRIFIED_LIST.removeAll(TEST_SHORT_ARRAY_LIST);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRemoveAtAfterPetrify()
{
PETRIFIED_LIST.removeAt(1);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testReplaceAfterPetrify()
{
PETRIFIED_LIST.replace(0, TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRetainAllAfterPetrify1()
{
PETRIFIED_LIST.retainAll(TEST_SHORT_ARRAY);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRetainAllAfterPetrify2()
{
PETRIFIED_LIST.retainAll(TEST_SHORT_COLLECTION);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testRetainAllAfterPetrify3()
{
PETRIFIED_LIST.retainAll(TEST_SHORT_ARRAY_LIST);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testReverseAfterPetrify1()
{
PETRIFIED_LIST.reverse();
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testReverseAfterPetrify2()
{
PETRIFIED_LIST.reverse(0, 5);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testSetAfterPetrify1()
{
PETRIFIED_LIST.set(0, TEST_SHORT);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testSetAfterPetrify2()
{
PETRIFIED_LIST.set(0, TEST_SHORT_ARRAY);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testSetAfterPetrify3()
{
PETRIFIED_LIST.set(0, TEST_SHORT_ARRAY, 0, 1);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testSortAfterPetrify1()
{
PETRIFIED_LIST.sort();
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testSortAfterPetrify2()
{
PETRIFIED_LIST.sort(0, 5);
}
/** Test mutation operations after petrify. */
@Test(expected = UnsupportedOperationException.class)
public void testTransformValuesAfterPetrify2()
{
PETRIFIED_LIST.transformValues(new TShortFunction()
{
@Override
public short execute(short value)
{
return 0;
}
});
}
}
|
#!/bin/sh
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
rm -fr build && mkdir build
cd build
# By default LazyJIT is enabled, to disable it:
# cmake .. -DWAMR_BUILD_JIT=1 -DWAMR_BUILD_LAZY_JIT=0
cmake .. -DWAMR_BUILD_JIT=1
make -j ${nproc}
cd ..
|
#!/bin/bash
MY_MOUNT_DIR="/mnt"
for MY_PARTITION in `fdisk -l | grep -v swap | egrep -o /dev/sd.[0-9]+` ; do
mount | grep -q "^$MY_PARTITION"
if [ $? -eq 1 ] ; then
echo "Partition: ${MY_PARTITION}"
blkid -full $MY_PARTITION
if [ $? -eq 0 ] ; then
MOUNTPOINT=$MY_MOUNT_DIR/`echo $MY_PARTITION | egrep -o sd.[0-9]+`
echo "Mount point: ${MOUNTPOINT}"
mkdir -p $MOUNTPOINT
mount $MY_PARTITION $MOUNTPOINT
fi
fi
done
|
package collins.kent.tutor.arithmetic;
import java.util.Random;
import collins.kent.tutor.IFTutor;
import collins.kent.tutor.Problem;
import collins.kent.tutor.Meta;
@Meta(skill="Identify integer mod zero as an error")
public class IntegerModuloZeroProblem implements Problem {
Problem original;
@Override
public Problem generate(Random rng) {
original = new IntegerModuloProblem();
original.generate(rng);
return this;
}
@Override
public String getStatement() {
// drop all after / and insert 0
int spaceIndex = original.getStatement().lastIndexOf(" ");
String root = original.getStatement().substring(0, spaceIndex);
return root + " 0";
}
@Override
public String getAnswer() {
return IFTutor.getExceptionSymbol();
}
@Override
public boolean isCorrect(String response) {
return IFTutor.getExceptionSymbol().equals(response);
}
@Override
public String getFeedback(String response) {
if (isCorrect(response)) {
return "Correct.";
}
return "Incorrect. An attempt to take modulo 0 results in an arithmetic exception.";
}
}
|
<reponame>Kosmos-Community/desma
import React from 'react';
import { Card, Row, Text } from '@nextui-org/react';
import { IColor } from '../../interfaces/IDesign';
import { HiOutlinePlusCircle } from 'react-icons/hi';
const ColorCard = ({ hexCode, rgbCode }: IColor) => {
const isColorCard = Boolean(hexCode);
return isColorCard ? (
<Card clickable bordered shadow={false} css={{ width: '200px', height: '170px' }}>
<Card.Body css={{ p: 0, bgColor: hexCode }} />
<Card.Footer
css={{ '*': { fontSize: '$xs' }, paddingTop: '$4', paddingBottom: '$4' }}
>
<Row wrap="wrap">
<Row justify="space-between">
<Text>HEX</Text>
<Text>{hexCode}</Text>
</Row>
<Row justify="space-between">
<Text>RGB</Text>
<Text>{rgbCode}</Text>
</Row>
</Row>
</Card.Footer>
</Card>
) : (
<Card clickable bordered shadow={false} css={{ width: '200px', height: '170px' }}>
<Card.Body
css={{
p: 0,
bgColor: '$white',
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
}}
>
<HiOutlinePlusCircle color="#C1C1C1" size="2.5rem" />
</Card.Body>
</Card>
);
};
export default ColorCard;
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
docker run --rm -it --hostname lwip-jpeg-autorotate -v $DIR:/app -v $HOME/.gitconfig:/etc/gitconfig -v $HOME/.ssh/id_rsa:/etc/ssh/ssh_id_rsa -v $HOME/.ssh:/home/dev/.ssh -v $HOME/.npmrc:/home/dev/.npmrc -w /app -t toubiweb/docker-node-dev-tools:lts bash
|
import React from "react"
import styled from 'styled-components'
const Section = styled.section`
padding-bottom:100px;
`
const Center = styled.div`
margin-left: auto;
margin-right: auto;
width: 600px;
max-width:90vw;
text-align:center;
`
const Title = styled.h2`
padding-bottom: 20px;
font-weight: 100;
line-height: 1.1;
max-width: 90%;
color: #444;
::after{
content: '';
width: 300px;
position: absolute;
left: 50%;
margin-top: 55px;
height: 1px;
margin-left: -150px;
background: rebeccapurple;
};
`
const Ul = styled.ul`
margin: 0 auto;
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: justify;
-ms-flex-pack: justify;
justify-content: space-between;
-ms-flex-wrap: wrap;
flex-wrap: wrap;
align-items: center;
max-width: 1120px;
padding: 50px 0 0;
text-align: center;
`
const Li = styled.li`
display: inline-block;
max-width: 275px;
vertical-align: top;
margin-right:auto;
margin-left:auto;
margin-bottom:40px;
/* Telefones em paisagem e abaixo */
@media (max-width: 480px) {
}
`
const Strong = styled.strong`
font-size: 45px;
font-weight: 400;
line-height: 1.42857;
color:rebeccapurple;
`
const P = styled.p`
font-size: 18px;
color: #666;
margin-bottom: 12px;
padding: 0 3px;`
const Span = styled.span`
font-size: 14px;
color: #999;`
function Motivos() {
return (
<Section>
<Center><Title>Motivos para fazer um site agora mesmo</Title></Center>
<Ul>
<Li>
<Strong>116 milhões</Strong>
<P>de brasileiros acessam a internet</P>
<Span>Fonte: IBGE, 2018</Span>
</Li>
<Li>
<Strong>90%</Strong>
<P>dos consumidores pesquisam na internet antes de fechar negócio</P>
<Span>Fonte: Boston Consulting Group (BCG), 2018</Span>
</Li>
<Li>
<Strong>13 milhões</Strong>
<P>das micro e pequenas empresas brasileiras já possuem site</P>
<Span>Fonte: E-commerce Brasil, 2017</Span>
</Li>
</Ul>
</Section>
)
}
export default Motivos; |
// Function signature for networkConfigSet
async function networkConfigSet({ accountId, config }) {
try {
// Simulate network configuration update
// Replace this with actual network configuration update logic
console.log(`Updating network configuration for account ${accountId}...`);
console.log('New network configuration:', config);
// Simulate network configuration update completion
console.log('Network configuration updated successfully.');
} catch (error) {
console.error('Error updating network configuration:', error);
}
} |
#!/bin/bash
source HOME_DIR_INSTALLATION/miniconda3/etc/profile.d/conda.sh
conda activate protoinfomax_env
cd ../src
# Evaluate ProtoInfoMax on sentiment classification K=100
python eval_imax.py -config ../config/config_sentiment -section test-run > ../eval/eval_imax_sentiment.txt
|
<reponame>lucas54neves/my-uri-solutions<gh_stars>0
funcionario = gets.to_i
horas = gets.to_i
valor = gets.to_f
salario = horas * valor
puts "NUMBER = #{funcionario}"
puts "SALARY = U$ %.2f" % salario |
######################################################################
### threads
sudo ps u -L -p ${PID}
### show threads, as if they were processes
# sudo ps u H -L -p ${PID}
######################################################################
### kill
sudo pkill -9 httpd
sudo pkill -9 java
sudo pkill --pidfile ${COREDNS_HOME}/coredns.pid
sudo kill -9 $(cat ${COREDNS_HOME}/coredns.pid)
sudo pkill -9 --echo --count --exact httpd
sudo pkill -9 --echo --count --exact java
sudo pkill -9 --echo --count --full "org.apache.zookeeper.server.quorum.QuorumPeerMain"
sudo pkill -9 --echo --count --full "kafka.Kafka"
sudo pkill -9 --echo --count --full "io.confluent.kafka.schemaregistry.rest.SchemaRegistryMain"
sudo pkill -9 --echo --count --full "org.apache.kafka.connect.cli.ConnectDistributed"
sudo pkill -9 --echo --count --full "io.confluent.kafkarest.KafkaRestMain"
sudo pkill -9 --echo --count --full "io.confluent.ksql.rest.server.KsqlServerMain"
sudo pkill -9 --echo --count --full "io.confluent.controlcenter.ControlCenter"
|
'use strict';
var literalizer = require('literalizer');
var promiseback = require('promiseback');
var STYLES = {
multi: '/**/',
none: null,
single: '//',
singleMulti: '///**/'
};
Object.freeze(STYLES);
var helpers = {
endsWithNewline: function (chunk) {
return chunk && (/\n+$/).test(chunk.val);
},
isComment: function (chunk) {
return chunk && chunk.type === literalizer.SEGMENT.COMMENT;
},
isNewline: function (chunk) {
return chunk && chunk.val === '\n';
},
startsWithNewline: function (chunk) {
return chunk && (/^\n+/).test(chunk.val);
},
startsWithSingleLineComment: function (chunk) {
return chunk && (/^[^\S\n]*\/\//).test(chunk.val);
}
};
/*
var mapComments = function (chunks, iterator) {
return chunks.map(function (chunk) {
var newChunk = chunk;
if (helpers.isComment(chunk)) {
newChunk = iterator(assign({}, chunk));
}
return newChunk;
});
};
*/
var groupComments = function (chunks) {
var groupedChunks = [];
var inCommentBlock = false;
chunks.forEach(function (chunk) {
var isComment = helpers.isComment(chunk);
var isNewline = helpers.isNewline(chunk);
if (isComment) {
inCommentBlock = true;
} else if (inCommentBlock && !isNewline) {
inCommentBlock = false;
}
var previous = groupedChunks[groupedChunks.length - 1];
var previousIsComment = helpers.isComment(previous);
var previousIsArray = Array.isArray(previous);
if (groupedChunks.length > 0 && inCommentBlock && (previousIsComment || previousIsArray)) {
if (previousIsArray) {
previous.push(chunk);
} else if (previousIsComment) {
groupedChunks[groupedChunks.length - 1] = [].concat(previous, chunk);
}
} else {
groupedChunks.push(chunk);
}
});
return groupedChunks;
};
var ungroupComments = function (groupedChunks) {
return groupedChunks.reduce(function (chunks, group) {
return chunks.concat(group);
}, []);
};
var transformers = {};
transformers[STYLES.none] = function identity(chunks) {
return chunks;
};
var transformChunkToSingle = function transformChunkToSingle(chunk) {
if (!helpers.startsWithSingleLineComment(chunk)) {
var lines = chunk.val.split('\n');
chunk.val = lines.map(function (line) {
return line.replace('/*', '//').replace('*/', '')
.replace(/[^\S\n]+$/, '')
.replace(/^([^\S\n]*)(?!\/\/)/, '//$1').trim();
}).join('\n');
}
return chunk;
};
transformers[STYLES.single] = function makeSingleLineComments(chunks) {
return chunks.map(function (chunk, index) {
var prevChunk = chunks[index - 1];
var nextChunk = chunks[index + 1];
var isComment = helpers.isComment(chunk);
var prevIsNotSameLineCode = !prevChunk || helpers.endsWithNewline(prevChunk) || helpers.isComment(prevChunk);
if (isComment && prevIsNotSameLineCode) {
if (helpers.startsWithNewline(nextChunk)) {
chunk = transformChunkToSingle(chunk);
} else {
var lines = chunk.val.split('\n');
var temp = transformChunkToSingle({ val: lines.slice(0, -1).join('\n') });
chunk.val = temp.val.split('\n').concat('/*' + lines.slice(-1)[0]).join('\n');
}
}
return chunk;
});
};
var transformGroupToMulti = function (lines) {
var first = lines[0];
var last = lines[lines.length - 1];
first.val = first.val.replace(/^([^\S\n]*)(?:\/\/|\/\*)?/, '/*$1').replace(/[^\S\n]+$/, '');
lines.slice(1).forEach(function (line) {
line.val = line.val.replace(/(\/\/|\/\*|\*\/)/g, '');
});
var endsInFlushStarSlash = (/\n\*\/$/).test(last.val);
last.val = last.val.replace(/(?:\*\/)?[^\S\n]*$/, '').replace(/[^\S\n]+$/, '');
last.val += (endsInFlushStarSlash ? '' : ' ') + '*/';
};
transformers[STYLES.multi] = function makeMultiLineComments(chunks) {
var groupedChunks = groupComments(chunks);
groupedChunks.forEach(function (group) {
if (Array.isArray(group) || helpers.isComment(group)) {
var lines = Array.isArray(group) ? group : [group];
if (lines.length > 0) {
while (helpers.isNewline(lines[lines.length - 1])) {
lines = lines.slice(0, -1);
}
transformGroupToMulti(lines);
}
}
});
return ungroupComments(groupedChunks);
};
transformers[STYLES.singleMulti] = function makeSingleMultiLineCommentCombos(chunks) {
var groupedChunks = groupComments(chunks);
groupedChunks.forEach(function (group, index) {
var groupArray = Array.isArray(group) ? group : null;
if (helpers.isComment(group)) {
var lines = group.val.split('\n');
if (lines.length === 1) {
var nextChunk = groupedChunks[index + 1];
if (helpers.startsWithNewline(nextChunk)) {
transformChunkToSingle(group);
}
} else {
groupArray = [group];
}
}
if (Array.isArray(groupArray)) {
if (groupArray.length > 0) {
while (helpers.isNewline(groupArray[groupArray.length - 1])) {
groupArray = groupArray.slice(0, -1);
}
if (groupArray.length === 1 && groupArray[0].val.split('\n').length === 1) {
transformChunkToSingle(groupArray[0]);
} else {
transformGroupToMulti(groupArray);
}
}
}
});
return ungroupComments(groupedChunks);
};
var transform = function transform(code, style, callback) {
if (arguments.length === 2) {
callback = style;
style = undefined;
}
var transformer = transformers[style] || transformers[STYLES.none];
var parsed = literalizer.lex(String(code));
var chunks = transformer(parsed);
return promiseback(literalizer.generate(chunks), callback);
};
transform.STYLES = STYLES;
module.exports = transform;
|
/*
* Copyright 2016 <EMAIL>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.queue.impl.single;
import net.openhft.chronicle.bytes.MappedBytes;
import net.openhft.chronicle.queue.impl.AbstractChronicleQueueBuilder;
import net.openhft.chronicle.queue.impl.RollingChronicleQueue;
import net.openhft.chronicle.wire.Wire;
import net.openhft.chronicle.wire.WireType;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import static net.openhft.chronicle.core.pool.ClassAliasPool.CLASS_ALIASES;
import static net.openhft.chronicle.wire.WireType.DEFAULT_ZERO_BINARY;
public class SingleChronicleQueueBuilder extends AbstractChronicleQueueBuilder<SingleChronicleQueueBuilder, SingleChronicleQueue> {
static {
CLASS_ALIASES.addAlias(WireType.class);
CLASS_ALIASES.addAlias(SCQRoll.class, "SCQSRoll");
CLASS_ALIASES.addAlias(SCQIndexing.class, "SCQSIndexing");
CLASS_ALIASES.addAlias(SingleChronicleQueueStore.class, "SCQStore");
CLASS_ALIASES.addAlias(TimedStoreRecovery.class);
}
@SuppressWarnings("unchecked")
@Deprecated
public SingleChronicleQueueBuilder(@NotNull String path) {
this(new File(path));
}
@SuppressWarnings("unchecked")
@Deprecated
public SingleChronicleQueueBuilder(@NotNull File path) {
super(path);
storeFactory(SingleChronicleQueueBuilder::createStore);
}
public static void init() {
// make sure the static block has been called.
}
@NotNull
public static SingleChronicleQueueBuilder binary(@NotNull String basePath) {
return binary(new File(basePath));
}
@NotNull
public static SingleChronicleQueueBuilder defaultZeroBinary(@NotNull String basePath) {
return defaultZeroBinary(new File(basePath));
}
@NotNull
public static SingleChronicleQueueBuilder binary(@NotNull File basePathFile) {
return new SingleChronicleQueueBuilder(basePathFile)
.wireType(WireType.BINARY_LIGHT);
}
@NotNull
public static SingleChronicleQueueBuilder defaultZeroBinary(@NotNull File basePathFile) {
return new SingleChronicleQueueBuilder(basePathFile)
.wireType(DEFAULT_ZERO_BINARY);
}
@Deprecated
@NotNull
public static SingleChronicleQueueBuilder text(@NotNull File name) {
return new SingleChronicleQueueBuilder(name)
.wireType(WireType.TEXT);
}
// *************************************************************************
//
// *************************************************************************
@NotNull
static SingleChronicleQueueStore createStore(RollingChronicleQueue queue, Wire wire) {
final SingleChronicleQueueStore wireStore = new SingleChronicleQueueStore(
queue.rollCycle(),
queue.wireType(),
(MappedBytes) wire.bytes(),
queue.epoch(),
queue.indexCount(),
queue.indexSpacing(),
queue.recoverySupplier().apply(queue.wireType()));
wire.writeEventName(MetaDataKeys.header).typedMarshallable(wireStore);
return wireStore;
}
// *************************************************************************
//
// *************************************************************************
@NotNull
public SingleChronicleQueue build() {
if (buffered())
getLogger().warn("Buffering is only supported in Chronicle Queue Enterprise");
return new SingleChronicleQueue(this);
}
@NotNull
@SuppressWarnings("CloneDoesntDeclareCloneNotSupportedException")
@Override
public SingleChronicleQueueBuilder clone() {
try {
return (SingleChronicleQueueBuilder) super.clone();
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
}
|
import speech_recognition as sr
import webbrowser
# Initialize the recognizer
r = sr.Recognizer()
# Create and initialize a list
lst = []
# Start the loop
while True:
# Listen for input
with sr.Microphone() as source:
print('Say something: ')
audio = r.listen(source)
# Recognize the input
try:
text = r.recognize_google(audio)
print('You said: ' + text)
except sr.UnknownValueError:
print('Sorry could not recognize your voice')
continue
# Process the input
if text == 'exit':
break
# Add the item to the list
lst.append(text)
# Show the to-do list
print('To-do list:')
for item in lst:
print('- ' + item) |
#!/usr/bin/env bats
# These tests setup containers required for backups to work and test functionality.
# Order of tests matter in this bats.
load "$BATS_TEST_DIRNAME/bats_functions.bash"
@test "Build required containers for backup tests" {
cd "$WORKING"
# NOTICE: Due to SSH server IP changing and no support for automatic fingerprint acceptance
# this test does not use the SSH server, but instead uses a local directory for backup tests.
#
#echo "Start backup destination server running ssh"
#docker build -t "decompose-backup-ssh-server-tester" -f dockerfiles/Dockerfile.ssh-server .
#docker run --name decompose-backup-ssh-server-tester-instance -d decompose-backup-ssh-server-tester
echo "Start mysql compatible server"
docker run --name decompose-backup-mariadb-tester-instance \
-e MYSQL_ROOT_PASSWORD=my-secret-pw \
-e MYSQL_DATABASE=test_db \
-e MYSQL_USER=tester \
-e MYSQL_PASSWORD=password \
-d mariadb
# TODO: Populate test_db database with some data.
echo "Build backup source container"
#echo "PROJECT_BACKUP_TARGET=\"ssh://tester@$(echo_ssh_ip)/backup\"" >> "$WORKING/.decompose/elements"
#echo "PROJECT_BACKUP_CONFIG_TARGET=\"tester@$(echo_ssh_ip):backup_configuration\"" >> "$WORKING/.decompose/elements"
decompose --build
cp client_files/gpgkey.* containers/backup/.duply/site_data
#cp client_files/id_rsa* containers/backup/.ssh
# TODO: Generate known_hosts file with ssh-keyscan
#cp client_files/known_hosts containers/backup/.ssh
docker build -t "decompose-backup-source-tester" containers/backup/.
}
@test "Can ping test services by IP" {
#ping -c 1 $(echo_ssh_ip)
docker run --rm --link "decompose-backup-mariadb-tester-instance:db" decompose-backup-source-tester ping -c 1 $(echo_mariadb_ip)
}
@test "Run backup" {
cd "$WORKING"
# TODO: Create volume for backup so backup results can be checked more easily.
docker run --rm --link "decompose-backup-mariadb-tester-instance:db" decompose-backup-source-tester bash -c "mkdir -p /tmp/{backup_test,config_backup_test} && duply site_data backup && ls -alh /tmp/backup_test && ls -alh /tmp/config_backup_test"
}
@test "Remove docker containers created for tests" {
#skip "For local debugging of tests"
#docker rm -fv decompose-backup-ssh-server-tester-instance
docker rm -fv decompose-backup-mariadb-tester-instance
}
function echo_ssh_ip() {
echo_container_name_ip decompose-backup-ssh-server-tester-instance
}
function echo_mariadb_ip() {
echo_container_name_ip decompose-backup-mariadb-tester-instance
}
# Return the IP for a container name.
# PARAM 1: Container name or CID
function echo_container_name_ip() {
echo $(docker inspect --format '{{ .NetworkSettings.IPAddress }}' $1)
}
function setup() {
setup_testing_environment
}
function teardown() {
teardown_testing_environment
}
# vim:syntax=sh tabstop=2 shiftwidth=2 expandtab
|
<reponame>mpberk/cse531-projects
// (c) <NAME> 2009
// permission to use and distribute granted.
#include <pthread.h>
pthread_t start_thread(void *func, int *arg)
{
pthread_t thread_id;
int rc;
printf("In main: creating thread\n");
rc = pthread_create(&thread_id, NULL, func, arg);
if (rc){
printf("ERROR; return code from pthread_create() is %d\n", rc);
exit(-1);
}
return(thread_id);
}
|
<gh_stars>1-10
import * as assert from 'assert';
import { HD } from '../src';
import { bip32 } from 'bitcoinjs-lib';
describe('HDWallet', function () {
it('From mnemonic and get first address', async () => {
let wallet = await HD.HDWallet.from('rookie message fee quarter dream shoulder frown lady best wisdom hurdle hub');
assert.strict.equal(wallet.firstAddress, '1AgCEXtJURBQZw9Ds8ziQsGo1dUdbT64Lo');
});
});
describe('HDMWallet', function () {
it('From mnemonic and get first address of multisig', async () => {
let multisigFixture = {
"expectedFirstAddress": "37ovvbYcDEzteTPPfaNk6KUq5t7Zp6kNHY",
"cosigner-0": {
"mnemonic": "ignore off draw destroy obtain resist absent machine modify gym brave glow photo upon creek",
"keys": {
"m/45'": "<KEY>",
"m/45'/0-prv": "<KEY>",
"m/45'/0-pub": "<KEY>"
}
},
"cosigner-1": {
"mnemonic": "aware roast goose prosper dismiss reason runway stick subway rhythm fold girl salon edit ramp absurd double stuff neither scout topic animal brick benefit",
"keys": {
"m/45'": "<KEY>",
"m/45'/1-prv": "<KEY>",
"m/45'/1-pub": "<KEY>"
}
},
"cosigner-2": {
"mnemonic": "brass innocent roof corn bounce project other hundred pause luggage pull panic immense praise join meadow album fiscal door around crunch trap infant report",
"keys": {
"m/45'": "<KEY>",
"m/45'/2-prv": "<KEY>",
"m/45'/2-pub": "<KEY>"
}
}
}
let wallet = await HD.HDWallet.from(multisigFixture["cosigner-0"]["mnemonic"]);
let cosigner1 = multisigFixture["cosigner-1"];
let cosigner2 = multisigFixture["cosigner-2"];
let cosignerStrKeys = [cosigner1.keys["m/45'/1-pub"], cosigner2.keys["m/45'/2-pub"]];
let cosignerKeys = cosignerStrKeys.map(k => bip32.fromBase58(k));
let mwallet = new HD.HDMWallet(wallet, cosignerKeys);
assert.strict.equal(mwallet.firstAddress, multisigFixture["expectedFirstAddress"]);
});
});
|
#create training data at lower resolutions.
#to run this script, you need to have already run the main create training data scripts to generate the profiles etc.
#SliceDir="/data1/users/kwagstyl/bigbrain/Slice_Verification/"
#Slices="1066 2807 3300 3863 4366 4892 5431 1582 1600 4080"
#Slices="1066 2807 3300 3863 4366 4892 5431 1582 1600 4080 0301 1066v1"
Slices="1066 2807 3300 3863 4366 4892 5431 0301 1066v1 1582 1600 4080 3380 5431a 6316"
#Slices="4080"
#original section: 1066 2807 3300 3863 4366 4892 5431
#added in march 2018: 1582 1600 4080
#V1 only 0301 1066v1
#Slices="0301
#NOTE TO SELF: FOR SOME REASON, ADDING THE SLICES BELOW WORSENED NETWORK PERFORMANCE
#CONSIDER REGENERATING TRAINING DATA WITHOUT THESE? CHECK TO MAKE SURE REGISTRATION AND SEGMENTATION ARE OK
#Slices="3380 5431a 6316"
#create manually labelled sample
SD="/data1/users/kwagstyl/bigbrain/NeuralNetworks/"
TrainingDir="$SD"TrainingData/
PyDir="/data1/users/kwagstyl/KWquarantines/"
resolutions="20 40 100 200 300 400 1000"
bindir="/data1/users/kwagstyl/quarantines/Linux-x86_64/bin/"
#resolutions="20"
#resolutions="100 200 300 400 1000"
#resolutions="100"
for s in $Slices
do
echo "running section :"
echo "$s"
#keep cls label for duplicate sections
cls_label=$s
if [[ "$s" != "1582" && "$s" != "1600" && "$s" != "4080" && "$s" != "3380" && "$s" != "5431a" && "$s" != "6316" && "$s" != "0301" && "$s" != "1066v1" ]]
then
SliceDir=/data1/users/kwagstyl/bigbrain/Slice_Verification/Slice_"$s"/
NNSliceDir="$TrainingDir"Slice_"$s"/
elif [[ "$s" != "1582" && "$s" != "1600" && "$s" != "4080" && "$s" != "3380" && "$s" != "5431a" && "$s" != "6316" ]]
then
#V1 sections
if [[ "$s" == "1066v1" ]]
then
s="1066"
fi
SliceDir=/data1/users/kwagstyl/bigbrain/Slice_Verification/V1/Slice_"$s"/
NNSliceDir="$TrainingDir"V1/Slice_"$s"/
else
#march 2018 sections
if [[ "$s" == "5431a" ]]
then
s="5431"
fi
SliceDir=/data1/users/kwagstyl/bigbrain/Slice_Verification/new_sections_03-18/Slice_"$s"/
NNSliceDir="$TrainingDir"new_sections_03-18/Slice_"$s"/
fi
for r in $resolutions
do
echo "processing "$r"um resolution"
volumedir="$NNSliceDir"resolution_"$r"um/
if [ ! -d "$volumedir" ]; then
mkdir "$volumedir"
fi
blockdir="$NNSliceDir"block_blocks/
if [ ! -d "$blockdir" ]; then
mkdir "$blockdir"
fi
cd "$NNSliceDir"
if [[ $r == "40" ]] || [[ $r == "20" ]] ; then
rm glim
echo " /data1/users/kwagstyl/bigbrain/volumes/legend1000.mnc" > "$NNSliceDir"glim
echo " /data1/users/kwagstyl/bigbrain/volumes/legend1000.mnc" >> "$NNSliceDir"glim
"$bindir"print_world_values_nearest glim all_profiles_3d.dat all_profiles_3d_blocks.dat >/dev/null
bn="$(python "$PyDir"convert_printed_intensities_NN_print_blocks.py all_profiles_3d_blocks.dat all_profiles_3d_blocks_data.dat)"
#clear slices
rm slice_blocks.mnc slice_blocks_geo.mnc "$volumedir"slice_blocks.mnc "$volumedir"slice_blocks_geo.mnc
#crop section for each
for b in $bn;
do
while [[ ${#b} -lt 4 ]] ; do
b="0${b}"
done
cd "$volumedir"
if [[ $r == "40" ]];
then
block="/data2/blocks40/"
inv="-inv"
else
block="/data1/users/kwagstyl/bigbrain/blocks20/"
inv=""
fi
#make blank full sized section to fill in with bits of blocks
xstart="$(mincinfo -attvalue xspace:start "$block"block"$r"-0001"$inv".mnc)"
zstart="$(mincinfo -attvalue zspace:start "$block"block"$r"-0001"$inv".mnc)"
xel=6572
zel=5711
ystart=$( bc <<< "-70 + ("$s"-1) * 0.02 ")
yel=1
#if [ ! -f block20-"$b"_slice.mnc ]; then
mincresample -clobber -quiet -start $xstart $ystart $zstart -nelements $xel $yel $zel "$block"block"$r"-"$b""$inv".mnc "$volumedir"block"$r"-"$b"_slice.mnc
mincresample -clobber -quiet -start $xstart $ystart $zstart -nelements $xel $yel $zel "$block"block"$r"-"$b""$inv"_geo.mnc "$volumedir"block"$r"-"$b"_geo_slice.mnc
#fi
if [ ! -f "$volumedir"slice_blocks.mnc ]; then
cp "$volumedir"block"$r"-"$b"_slice.mnc "$volumedir"slice_blocks.mnc
cp "$volumedir"block"$r"-"$b"_geo_slice.mnc "$volumedir"slice_blocks_geo.mnc
fi
minccalc -clobber -quiet -expr 'if(A[0]>0) {A[0]} else {A[1]}' block"$r"-"$b"_slice.mnc slice_blocks.mnc tmp.mnc
mv tmp.mnc slice_blocks.mnc
#make minislab otherwise slight coordinate errors mess things up
minccalc -clobber -quiet -expr 'if(A[0]>0) {A[0]} else {A[1]}' block"$r"-"$b"_geo_slice.mnc slice_blocks_geo.mnc tmp.mnc
mv tmp.mnc slice_blocks_geo.mnc
done
#fi
#cd ..
rm glimblock
echo " "$volumedir"slice_blocks.mnc " > glimblock
echo " "$volumedir"slice_blocks_geo.mnc " >> glimblock
"$bindir"print_world_values_nearest glimblock "$NNSliceDir"all_profiles_3d.dat concat_values_blocks.dat >/dev/null
python "$PyDir"convert_printed_intensities_NN.py concat_values_blocks.dat concat_blocks_data.dat
python "$PyDir"columns_2_profiles.py concat_blocks_data.dat "$volumedir"training_"$cls_label"_"$r"_raw.txt "$volumedir"training_"$cls_label"_"$r"_geo.txt 200
else
#volume data
cd "$volumedir"
volumes="/data1/users/kwagstyl/bigbrain/volumes/"
rm glimblock
echo " "$volumes"full8_"$r"um.mnc " > glimblock
echo " "$volumes"full8_"$r"um_geo.mnc " >> glimblock
"$bindir"print_world_values_nearest glimblock "$NNSliceDir"all_profiles_3d.dat concat_values_blocks.dat >/dev/null
python "$PyDir"convert_printed_intensities_NN.py concat_values_blocks.dat concat_blocks_data.dat
python "$PyDir"columns_2_profiles.py concat_blocks_data.dat "$volumedir"training_"$cls_label"_"$r"_raw.txt "$volumedir"training_"$cls_label"_"$r"_geo.txt 200
fi
cd ..
if [ ! -d "$TrainingDir"TrainingData_lowres ]; then
mkdir "$TrainingDir"TrainingData_lowres
fi
cd "$TrainingDir"TrainingData_lowres
#take classified profiles and masks from blockdir as they're the same independent of resolution
python "$PyDir"clean_up_profiles_filter_sixlayers.py "$blockdir"training_"$cls_label"_cls.txt "$volumedir"training_"$cls_label"_"$r"_raw.txt "$volumedir"training_"$cls_label"_"$r"_geo.txt \
"$blockdir"training_"$cls_label"_masks.txt training_"$cls_label"_cls.txt training_"$cls_label"_"$r"_raw.txt training_"$cls_label"_"$r"_geo.txt \
training_"$cls_label"_masks.txt "$NNSliceDir"filtered_indices.txt
# change white to highest index, not zeros.
mv training_"$cls_label"_cls.txt prewhite.txt
python "$PyDir"convert_profiles_white.py prewhite.txt training_"$cls_label"_cls.txt
#create combined training dataset
done
done
cd "$TrainingDir"TrainingData_lowres
for r in $resolutions
do
rm training_"$r"_raw.txt training_"$r"_geo.txt
cat training*_"$r"_geo.txt > training_"$r"_geo.txt
cat training*_"$r"_raw.txt > training_"$r"_raw.txt
done
rm training_cls.txt training_regions.txt
cat training*cls.txt > training_cls.txt
cat training*masks.txt > training_regions.txt
|
public static TestObject createRandomObject() {
TestObject obj = new TestObject();
obj.value1 = ThreadLocalRandom.current().nextInt();
obj.value2 = ThreadLocalRandom.current().nextFloat();
obj.value3 = ThreadLocalRandom.current().nextBoolean();
return obj;
} |
#!/bin/bash
#当前路径,不需要修改
cur_path=`pwd`
#集合通信参数,不需要修改
export RANK_SIZE=1
export JOB_ID=10087
RANK_ID_START=0
# 数据集路径,保持为空,不需要修改
data_path=""
#设置默认日志级别,不需要修改
#export ASCEND_GLOBAL_LOG_LEVEL=3
#基础参数,需要模型审视修改
#网络名称,同目录名称
Network="EfficientNet-B3_ID0450_for_PyTorch"
#训练epoch
train_epochs=100
#训练batch_size
batch_size=128
#训练step
#train_steps=`expr 1281167 / ${batch_size}`
#学习率
learning_rate=0.15
#TF2.X独有,需要模型审视修改
#export NPU_LOOP_SIZE=${train_steps}
#维测参数,precision_mode需要模型审视修改
#precision_mode="allow_mix_precision"
#维持参数,以下不需要修改
over_dump=False
data_dump_flag=False
data_dump_step="10"
profiling=False
# 帮助信息,不需要修改
if [[ $1 == --help || $1 == -h ]];then
echo"usage:./train_performance_1P.sh <args>"
echo " "
echo "parameter explain:
--precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision)
--over_dump if or not over detection, default is False
--data_dump_flag data dump flag, default is False
--data_dump_step data dump step, default is 10
--profiling if or not profiling for performance debug, default is False
--data_path source data of training
-h/--help show help message
"
exit 1
fi
#参数校验,不需要修改
for para in $*
do
if [[ $para == --precision_mode* ]];then
precision_mode=`echo ${para#*=}`
elif [[ $para == --over_dump* ]];then
over_dump=`echo ${para#*=}`
over_dump_path=${cur_path}/output/overflow_dump
mkdir -p ${over_dump_path}
elif [[ $para == --data_dump_flag* ]];then
data_dump_flag=`echo ${para#*=}`
data_dump_path=${cur_path}/output/data_dump
mkdir -p ${data_dump_path}
elif [[ $para == --data_dump_step* ]];then
data_dump_step=`echo ${para#*=}`
elif [[ $para == --profiling* ]];then
profiling=`echo ${para#*=}`
profiling_dump_path=${cur_path}/output/profiling
mkdir -p ${profiling_dump_path}
elif [[ $para == --data_path* ]];then
data_path=`echo ${para#*=}`
fi
done
#校验是否传入data_path,不需要修改
if [[ $data_path == "" ]];then
echo "[Error] para \"data_path\" must be confing"
exit 1
fi
#训练开始时间,不需要修改
start_time=$(date +%s)
#进入训练脚本目录,需要模型审视修改
cd $cur_path/..
for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++));
do
#设置环境变量,不需要修改
echo "Device ID: $ASCEND_DEVICE_ID"
export RANK_ID=$RANK_ID
#创建DeviceID输出目录,不需要修改
if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then
rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID}
mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt
else
mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt
fi
# 绑核,不需要的绑核的模型删除,需要的模型审视修改
#let a=RANK_ID*12
#let b=RANK_ID+1
#let c=b*12-1
#执行训练脚本,以下传参不需要修改,其他需要模型审视修改
#--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path
python3 ${cur_path}/../examples/imagenet/main.py \
--data=${data_path} \
--arch=efficientnet-b3 \
--batch-size=${batch_size} \
--lr=${learning_rate} \
--momentum=0.9 \
--epochs=${train_epochs} \
--autoaug \
--amp \
--pm=O1 \
--loss_scale=32 \
--val_feq=10 \
--npu=$ASCEND_DEVICE_ID > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 &
done
wait
#训练结束时间,不需要修改
end_time=$(date +%s)
e2e_time=$(( $end_time - $start_time ))
#结果打印,不需要修改
echo "------------------ Final result ------------------"
#输出性能FPS,需要模型审视修改
FPS=`grep -a 'FPS' $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk -F " " '{print $NF}'|awk 'END {print}'`
#打印,不需要修改
echo "Final Performance images/sec : $FPS"
#输出训练精度,需要模型审视修改
train_accuracy=`grep -a '* Acc@1' $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print}'|awk -F "Acc@1" '{print $NF}'|awk -F " " '{print $1}'`
#打印,不需要修改
echo "Final Train Accuracy : ${train_accuracy}"
echo "E2E Training Duration sec : $e2e_time"
#性能看护结果汇总
#训练用例信息,不需要修改
BatchSize=${batch_size}
DeviceType=`uname -m`
CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf'
##获取性能数据,不需要修改
#吞吐量
ActualFPS=${FPS}
#单迭代训练时长
TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'*1000/'${FPS}'}'`
#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视
grep Epoch: $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F "Loss" '{print $NF}' | awk -F " " '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt
#最后一个迭代loss值,不需要修改
ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`
#关键信息打印到${CaseName}.log中,不需要修改
echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "TrainAcuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log
|
<reponame>alex-dorokhov/gdx-pay
package com.badlogic.gdx.pay.android.googleplay.testdata;
import com.badlogic.gdx.pay.PurchaseManagerConfig;
import static com.badlogic.gdx.pay.PurchaseManagerConfig.STORE_NAME_ANDROID_GOOGLE;
import static com.badlogic.gdx.pay.android.googleplay.testdata.OfferObjectMother.offerConsumable;
import static com.badlogic.gdx.pay.android.googleplay.testdata.OfferObjectMother.offerFullEditionEntitlement;
import static com.badlogic.gdx.pay.android.googleplay.testdata.OfferObjectMother.offerSubscription;
public class PurchaseManagerConfigObjectMother {
public static PurchaseManagerConfig managerConfigGooglePlayOneOfferBuyFullEditionProduct() {
PurchaseManagerConfig config = new PurchaseManagerConfig();
config.addStoreParam(STORE_NAME_ANDROID_GOOGLE, "kbiosdfjoifjkldsfjowei8rfjiwfklmujwemflksdfjmsdklfj/sdifjsdlfkjsdfksd");
config.addOffer(offerFullEditionEntitlement());
return config;
}
public static PurchaseManagerConfig managerConfigGooglePlayOneOfferConsumbableProduct() {
PurchaseManagerConfig config = new PurchaseManagerConfig();
config.addStoreParam(STORE_NAME_ANDROID_GOOGLE, "kbiosdfjoifjkldsfjowei8rfjiwfklmujwemflksdfjmsdklfj/sdifjsdlfkjsdfksd");
config.addOffer(offerConsumable());
return config;
}
public static PurchaseManagerConfig managerConfigGooglePlayOneOfferSubscriptionProduct() {
PurchaseManagerConfig config = new PurchaseManagerConfig();
config.addStoreParam(STORE_NAME_ANDROID_GOOGLE, "kbiosdfjoifjkldsfjowei8rfjiwfklmujwemflksdfjmsdklfj/sdifjsdlfkjsdfksd");
config.addOffer(offerSubscription());
return config;
}
}
|
<gh_stars>10-100
'use strict';
const AWS = require('aws-sdk');
const path = require('path');
const { updateLabels } = require('../lib/database');
const rekognition = new AWS.Rekognition();
/**
* Handles label request to Amazon Rekognition
* @param event
* @param context
* @param callback
* @returns {Promise.<TResult>}
*/
module.exports.handler =
(event, context, callback) => {
const message = JSON.parse(event.Records[0].Sns.Message);
const { bucket, object } = message.Records[0].s3;
const { base, dir } = path.parse(object.key);
const id = dir.replace(/captures\//, '');
const params = {
Image: {
S3Object: {
Bucket: bucket.name,
Name: object.key,
},
},
MaxLabels: 100,
MinConfidence: 50,
};
return rekognition.detectLabels(params).promise()
.then(data => {
const labels = data.Labels || [];
return updateLabels(Object.assign({ id, frame: base }, { labels }));
})
.then(() => callback(null))
.catch((error) => {
console.log(JSON.stringify(error));
callback(error);
});
};
|
#!/bin/bash
# OpenShift namespaced objects:
# oc get --raw /oapi/v1/ | python -c 'import json,sys ; resources = "\n".join([o["name"] for o in json.load(sys.stdin)["resources"] if o["namespaced"] and "create" in o["verbs"] and "delete" in o["verbs"] ]) ; print resources'
# Kubernetes namespaced objects:
# oc get --raw /api/v1/ | python -c 'import json,sys ; resources = "\n".join([o["name"] for o in json.load(sys.stdin)["resources"] if o["namespaced"] and "create" in o["verbs"] and "delete" in o["verbs"] ]) ; print resources'
set -eo pipefail
die(){
echo "$1"
exit $2
}
usage(){
echo "$0 <projectname>"
echo " projectname The OCP project to be exported"
echo "Examples:"
echo " $0 myproject"
}
exportlist(){
if [ "$#" -lt "3" ]; then
echo "Invalid parameters"
return
fi
KIND=$1
BASENAME=$2
DELETEPARAM=$3
echo "Exporting '${KIND}' resources to ${PROJECT}/${BASENAME}.json"
BUFFER=$(oc get ${KIND} --export -o json -n ${PROJECT} || true)
# return if resource type unknown or access denied
if [ -z "${BUFFER}" ]; then
echo "Skipped: no data"
return
fi
# return if list empty
if [ "$(echo ${BUFFER} | jq '.items | length > 0')" == "false" ]; then
echo "Skipped: list empty"
return
fi
echo ${BUFFER} | jq ${DELETEPARAM} > ${PROJECT}/${BASENAME}.json
}
ns(){
echo "Exporting namespace to ${PROJECT}/ns.json"
oc get --export -o=json ns ${PROJECT} | jq '
del(.status,
.metadata.uid,
.metadata.selfLink,
.metadata.resourceVersion,
.metadata.creationTimestamp,
.metadata.generation
)' > ${PROJECT}/ns.json
}
rolebindings(){
exportlist \
rolebindings \
rolebindings \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp)'
}
serviceaccounts(){
exportlist \
serviceaccounts \
serviceaccounts \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp)'
}
secrets(){
exportlist \
secrets \
secrets \
'del('\
'.items[]|select(.type=='\
'"'\
'kubernetes.io/service-account-token'\
'"'\
'))|'\
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.annotations.'\
'"'\
'kubernetes.io/service-account.uid'\
'"'\
')'
}
dcs(){
echo "Exporting deploymentconfigs to ${PROJECT}/dc_*.json"
DCS=$(oc get dc -n ${PROJECT} -o jsonpath="{.items[*].metadata.name}")
for dc in ${DCS}; do
oc get --export -o=json dc ${dc} -n ${PROJECT} | jq '
del(.status,
.metadata.uid,
.metadata.selfLink,
.metadata.resourceVersion,
.metadata.creationTimestamp,
.metadata.generation,
.spec.triggers[].imageChangeParams.lastTriggeredImage
)' > ${PROJECT}/dc_${dc}.json
if [ !$(cat ${PROJECT}/dc_${dc}.json | jq '.spec.triggers[].type' | grep -q "ImageChange") ]; then
for container in $(cat ${PROJECT}/dc_${dc}.json | jq -r '.spec.triggers[] | select(.type == "ImageChange") .imageChangeParams.containerNames[]'); do
echo "Patching DC..."
OLD_IMAGE=$(cat ${PROJECT}/dc_${dc}.json | jq --arg cname ${container} -r '.spec.template.spec.containers[] | select(.name == $cname)| .image')
NEW_IMAGE=$(cat ${PROJECT}/dc_${dc}.json | jq -r '.spec.triggers[] | select(.type == "ImageChange") .imageChangeParams.from.name // empty')
sed -e "s#$OLD_IMAGE#$NEW_IMAGE#g" ${PROJECT}/dc_${dc}.json >> ${PROJECT}/dc_${dc}_patched.json
done
fi
done
}
bcs(){
exportlist \
bc \
bcs \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.generation,'\
'.items[].spec.triggers[].imageChangeParams.lastTriggeredImage)'
}
builds(){
exportlist \
builds \
builds \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
is(){
exportlist \
is \
iss \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation,'\
'.items[].metadata.annotations."openshift.io/image.dockerRepositoryCheck")'
}
rcs(){
exportlist \
rc \
rcs \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
svcs(){
echo "Exporting services to ${PROJECT}/svc_*.json"
SVCS=$(oc get svc -n ${PROJECT} -o jsonpath="{.items[*].metadata.name}")
for svc in ${SVCS}; do
oc get --export -o=json svc ${svc} -n ${PROJECT} | jq '
del(.status,
.metadata.uid,
.metadata.selfLink,
.metadata.resourceVersion,
.metadata.creationTimestamp,
.metadata.generation,
.spec.clusterIP
)' > ${PROJECT}/svc_${svc}.json
if [[ $(cat ${PROJECT}/svc_${svc}.json | jq -e '.spec.selector.app') == "null" ]]; then
if [[ $(oc get endpoints ${svc} -n ${PROJECT} -o name | wc -l) -gt 0 ]]; then
oc get --export -o json endpoints ${svc} -n ${PROJECT}| jq '
del(.status,
.metadata.uid,
.metadata.selfLink,
.metadata.resourceVersion,
.metadata.creationTimestamp,
.metadata.generation
)' > ${PROJECT}/endpoint_${svc}.json
fi
fi
done
}
pods(){
exportlist \
po \
pods \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
cms(){
exportlist \
cm \
cms \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
pvcs(){
exportlist \
pvc \
pvcs \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation,'\
'.items[].metadata.annotations['\
'"'\
'pv.kubernetes.io/bind-completed'\
'"'\
'],'\
'.items[].metadata.annotations['\
'"'\
'pv.kubernetes.io/bound-by-controller'\
'"'\
'],'\
'.items[].metadata.annotations['\
'"'\
'volume.beta.kubernetes.io/storage-provisioner'\
'"'\
'],'\
'.items[].spec.volumeName)'
}
pvcs_attachment(){
exportlist \
pvc \
pvcs_attachment \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
routes(){
exportlist \
routes \
routes \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
templates(){
exportlist \
templates \
templates \
'del('\
'.items[].status,'\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation)'
}
egressnetworkpolicies(){
exportlist \
egressnetworkpolicies \
egressnetworkpolicies \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp)'
}
imagestreamtags(){
exportlist \
imagestreamtags \
imagestreamtags \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].tag.generation)'
}
rolebindingrestrictions(){
exportlist \
rolebindingrestrictions \
rolebindingrestrictions \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp)'
}
limitranges(){
exportlist \
limitranges \
limitranges \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp)'
}
resourcequotas(){
exportlist \
resourcequotas \
resourcequotas \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].status)'
}
podpreset(){
exportlist \
podpreset \
podpreset \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp)'
}
cronjobs(){
exportlist \
cronjobs \
cronjobs \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].status)'
}
statefulsets(){
exportlist \
statefulsets \
statefulsets \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].status)'
}
hpas(){
exportlist \
hpa \
hpas \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].status)'
}
deployments(){
exportlist \
deploy \
deployments \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation,'\
'.items[].status)'
}
replicasets(){
exportlist \
replicasets \
replicasets \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation,'\
'.items[].status,'\
'.items[].ownerReferences.uid)'
}
poddisruptionbudget(){
exportlist \
poddisruptionbudget \
poddisruptionbudget \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation,'\
'.items[].status)'
}
daemonset(){
exportlist \
daemonset \
daemonset \
'del('\
'.items[].metadata.uid,'\
'.items[].metadata.selfLink,'\
'.items[].metadata.resourceVersion,'\
'.items[].metadata.creationTimestamp,'\
'.items[].metadata.generation,'\
'.items[].status)'
}
if [[ ( $@ == "--help") || $@ == "-h" ]]
then
usage
exit 0
fi
if [[ $# -lt 1 ]]
then
usage
die "projectname not provided" 2
fi
for i in jq oc
do
command -v $i >/dev/null 2>&1 || die "$i required but not found" 3
done
PROJECT=${1}
mkdir -p ${PROJECT}
ns
rolebindings
serviceaccounts
secrets
dcs
bcs
builds
is
imagestreamtags
rcs
svcs
pods
podpreset
cms
egressnetworkpolicies
rolebindingrestrictions
limitranges
resourcequotas
pvcs
pvcs_attachment
routes
templates
cronjobs
statefulsets
hpas
deployments
replicasets
poddisruptionbudget
daemonset
exit 0
|
import { GET_ERRORS, CLEAR_ERRORS, } from '../actions/types';
const initialState = {};
export default function(state = initialState, action) {
switch(action.type) {
case GET_ERRORS:
return action.payload;
case CLEAR_ERRORS:
return {};
default:
return state;
}
}
/*
To take the state and just add to it (NOT permanently alter it!),
the use the spread operator (...)
*/
|
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Espruino devices publish a UART service by default.
const nordicUARTService = getEspruinoPrimaryService();
const ageServiceID = '00002a80-0000-1000-8000-00805f9b34fb';
const ageCharacteristicID = '00002a80-0000-1000-8000-00805f9b34fb';
const heartRateServiceID = '0000180d-0000-1000-8000-00805f9b34fb';
const heartRateMaxCharacteristicID = '00002a8d-0000-1000-8000-00805f9b34fb';
let gattServer = undefined;
/**
* Load the device code to the Espruino IDE.
*/
function loadEspruinoDeviceCode() {
fetch('device_code.js').then(response => response.text()).then(data => {
let url = 'http://www.espruino.com/webide?code=' + encodeURIComponent(data);
window.open(url, '_window');
});
}
function onGattDisconnected(evt) {
const device = evt.target;
logInfo(`Disconnected from GATT on device ${device.name}.`);
assertFalse(gattServer.connected, 'Server connected');
}
async function startTest() {
clearStatus();
logInfo('Starting test');
$('btn_start_test').disabled = true;
$('btn_load_code').disabled = true;
try {
const testAgeService = async (gettServer) => {
logInfo(`Requesting age service: ${ageServiceID}...`);
const service = await gattServer.getPrimaryService(ageServiceID);
assertEquals(device, service.device, 'service device mismatch');
assertEquals(ageServiceID, service.uuid, 'incorrect service UUID');
logInfo(`Connected to service uuid:${service.uuid}, primary:${service.isPrimary}`);
logInfo(`Requesting characteristic ${ageCharacteristicID}...`);
const characteristic = await service.getCharacteristic(ageCharacteristicID);
assertEquals(characteristic.service, service,
'characteristic service mismatch');
assertEquals(ageCharacteristicID, characteristic.uuid,
'incorrect characteristic UUID');
let dataView = await characteristic.readValue();
let val = dataView.getUint8(0, /*littleEndian=*/true);
logInfo(`Starting age is ${val}`);
logInfo(`Setting age to 25`);
await characteristic.writeValueWithResponse(new Uint8Array([25]));
dataView = await characteristic.readValue();
val = dataView.getUint8(0, /*littleEndian=*/true);
assertEquals(25, val, 'incorrect age');
logInfo('readback success.');
logInfo(`Setting to 73`);
await characteristic.writeValueWithResponse(new Uint8Array([73]));
dataView = await characteristic.readValue();
val = dataView.getUint8(0, /*littleEndian=*/true);
assertEquals(73, val, 'incorrect age');
logInfo('readback success.');
}
const testHeartRateService = async (gettServer) => {
logInfo(`Requesting service: ${heartRateServiceID}...`);
const service = await gattServer.getPrimaryService(heartRateServiceID);
assertEquals(device, service.device, 'service device mismatch');
assertEquals(heartRateServiceID, service.uuid, 'incorrect service UUID');
logInfo(`Connected to service uuid:${service.uuid}, primary:${service.isPrimary}`);
logInfo(`Requesting characteristic ${heartRateMaxCharacteristicID}...`);
const characteristic = await service.getCharacteristic(heartRateMaxCharacteristicID);
assertEquals(characteristic.service, service,
'characteristic service mismatch');
assertEquals(heartRateMaxCharacteristicID, characteristic.uuid,
'incorrect characteristic UUID');
let dataView = await characteristic.readValue();
let val = dataView.getUint8(0, /*littleEndian=*/true);
logInfo(`Starting max hart rate is ${val}`);
logInfo(`Setting maximum heart rate to 67`);
await characteristic.writeValueWithResponse(new Uint8Array([67]));
dataView = await characteristic.readValue();
val = dataView.getUint8(0, /*littleEndian=*/true);
assertEquals(67, val, 'incorrect maximum heart rate');
logInfo('readback success.');
logInfo(`Setting to 110`);
await characteristic.writeValueWithResponse(new Uint8Array([110]));
dataView = await characteristic.readValue();
val = dataView.getUint8(0, /*littleEndian=*/true);
assertEquals(110, val, 'incorrect maximum heart rate');
logInfo('readback success.');
}
const options = {
filters: [{ services: [nordicUARTService] }],
optionalServices: [ageServiceID,
BluetoothUUID.getService(heartRateServiceID)]
};
logInfo(`Requesting Bluetooth device with service ${nordicUARTService}`);
const device = await navigator.bluetooth.requestDevice(options);
device.addEventListener('gattserverdisconnected', onGattDisconnected);
logInfo(`Connecting to GATT server for device \"${device.name}\"...`);
gattServer = await device.gatt.connect();
assertEquals(device, gattServer.device, 'server device mismatch');
assertTrue(gattServer.connected, 'server.connected should be true');
await testAgeService(gattServer);
await testHeartRateService(gattServer);
logInfo('Test passed.');
} catch (error) {
logError(`Unexpected failure: ${error}`);
}
testDone();
$('btn_start_test').disabled = false;
$('btn_load_code').disabled = false;
if (gattServer) {
logInfo('Disconnecting from GATT.');
gattServer.disconnect();
}
}
async function init() {
if (!isBluetoothSupported()) {
console.log('Bluetooth not supported.');
$('bluetooth_available').style.display = 'none';
if (window.isSecureContext == 'https') {
$('bluetooth_none').style.visibility = 'visible';
} else {
$('bluetooth_insecure').style.visibility = 'visible';
}
return;
}
const available = await navigator.bluetooth.getAvailability();
if (!available) {
$('bluetooth_available').style.display = 'none';
$('bluetooth_unavailable').style.visibility = 'visible';
}
}
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.util.internal
import java.lang.reflect.{ Method, ParameterizedType, Modifier }
import com.tzavellas.coeus.FrameworkException
/**
* Various utility methods for working with reflection APIs.
*/
object ReflectionHelper {
/**
* Tests whether the specified class is an abstract class or interface.
*/
def isAbstract(klass: Class[_]) = {
val modifiers = klass.getModifiers
Modifier.isAbstract(modifiers) || Modifier.isInterface(modifiers)
}
/**
* Tries to find the type argument of the collection or array that gets returned
* from the given method.
*
* @param collectionGetter a method that returns a collection or array
* @throws FrameworkException if a type argument cannot be found
*/
def getTypeArgumentsOfCollection(collectionGetter: Method): Array[Class[_]] = {
collectionGetter.getGenericReturnType match {
case paramType: ParameterizedType =>
paramType.getActualTypeArguments.collect { case c: Class[_] => c }
case array: Class[_] if array.isArray =>
Array(array.getComponentType)
case unknown =>
throw new FrameworkException("Error while reading generic type argument from type: " + unknown)
}
}
/**
* Updates an array using {@link java.lang.reflect.Array} methods.
*
* @param array the array to update
* @param index the index of the element to update
* @param value the new value of the element
*/
def updateArray(array: Array[_], index: Int, value: Any) {
import java.lang.reflect.Array._
value match {
case b: Boolean => setBoolean(array, index, b)
case b: Byte => setByte(array, index, b)
case c: Char => setChar(array, index, c)
case d: Double => setDouble(array, index, d)
case f: Float => setFloat(array, index, f)
case i: Int => setInt(array, index, i)
case l: Long => setLong(array, index, l)
case s: Short => setShort(array, index, s)
case _ => set(array, index, value)
}
}
}
|
<reponame>aaawoyucheng/wayne
package base
import (
"fmt"
"strconv"
"strings"
"github.com/aaawoyucheng/wayne/src/backend/common"
"github.com/aaawoyucheng/wayne/src/backend/util/snaker"
)
type ParamBuilderController struct {
ResultHandlerController
}
func (c *ParamBuilderController) BuildQueryParam() *common.QueryParam {
no, size := c.buildPageParam()
qmap := map[string]interface{}{}
deletedStr := c.Input().Get("deleted")
if deletedStr != "" {
deleted, err := strconv.ParseBool(deletedStr)
if err != nil {
c.AbortBadRequest("Invalid deleted in query.")
}
qmap["deleted"] = deleted
}
filter := c.Input().Get("filter")
if filter != "" {
filters := strings.Split(filter, ",")
for _, param := range filters {
params := strings.Split(param, "=")
if len(params) != 2 {
continue
}
key, value := params[0], params[1]
// 兼容在filter中使用deleted参数
if key == "deleted" {
deleted, err := strconv.ParseBool(value)
if err != nil {
continue
}
qmap[key] = deleted
continue
}
qmap[params[0]] = params[1]
}
}
relate := ""
if c.Input().Get("relate") != "" {
relate = c.Input().Get("relate")
}
return &common.QueryParam{
PageNo: no,
PageSize: size,
Query: qmap,
Sortby: snaker.CamelToSnake(c.Input().Get("sortby")),
Relate: relate}
}
func (c *ParamBuilderController) BuildKubernetesQueryParam() *common.QueryParam {
no, size := c.buildPageParam()
qmap := map[string]interface{}{}
filter := c.Input().Get("filter")
if filter != "" {
filters := strings.Split(filter, ",")
for _, param := range filters {
params := strings.Split(param, "=")
if len(params) != 2 {
continue
}
qmap[params[0]] = params[1]
}
}
return &common.QueryParam{
PageNo: no,
PageSize: size,
Query: qmap,
Sortby: c.Input().Get("sortby"),
LabelSelector: c.Input().Get("labelSelector")}
}
func (c *ParamBuilderController) buildPageParam() (no int64, size int64) {
pageNo := c.Input().Get("pageNo")
pageSize := c.Input().Get("pageSize")
if pageNo == "" {
pageNo = strconv.Itoa(defaultPageNo)
}
if pageSize == "" {
pageSize = strconv.Itoa(defaultPageSize)
}
no, err := strconv.ParseInt(pageNo, 10, 64)
// pageNo must bigger than zero.
if err != nil || no < 1 {
c.AbortBadRequest("Invalid pageNo in query.")
}
// pageSize must bigger than zero.
size, err = strconv.ParseInt(pageSize, 10, 64)
if err != nil || size < 1 {
c.AbortBadRequest("Invalid pageSize in query.")
}
return
}
func (c *ParamBuilderController) GetIDFromURL() int64 {
return c.GetIntParamFromURL(":id")
}
func (c *ParamBuilderController) GetIntParamFromURL(param string) int64 {
paramStr := c.Ctx.Input.Param(param)
if len(paramStr) == 0 {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in URL", param))
}
paramInt, err := strconv.ParseInt(paramStr, 10, 64)
if err != nil || paramInt < 0 {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in URL", param))
}
return paramInt
}
func (c *ParamBuilderController) GetIntParamFromQuery(param string) int64 {
paramStr := c.Input().Get(param)
if len(paramStr) == 0 {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in Query", param))
}
paramInt, err := strconv.ParseInt(paramStr, 10, 64)
if err != nil || paramInt < 0 {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in Query", param))
}
return paramInt
}
func (c *ParamBuilderController) GetBoolParamFromQuery(param string) bool {
paramStr := c.Input().Get(param)
if len(paramStr) == 0 {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in Query", param))
}
paramBool, err := strconv.ParseBool(paramStr)
if err != nil {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in Query", param))
}
return paramBool
}
func (c *ParamBuilderController) GetBoolParamFromQueryWithDefault(param string, defaultValue bool) bool {
paramStr := c.Input().Get(param)
if len(paramStr) == 0 {
return defaultValue
}
paramBool, err := strconv.ParseBool(paramStr)
if err != nil {
c.AbortBadRequest(fmt.Sprintf("Invalid %s in Query", param))
}
return paramBool
}
func (c *ParamBuilderController) GetDeleteFromQuery() bool {
return c.GetBoolParamFromQueryWithDefault("deleted", false)
}
func (c *ParamBuilderController) GetLogicalFromQuery() bool {
return c.GetBoolParamFromQueryWithDefault("logical", true)
}
func (c *ParamBuilderController) GetIsOnlineFromQuery() bool {
return c.GetBoolParamFromQueryWithDefault("isOnline", false)
}
|
from selenium import webdriver
class WebInteraction:
def __init__(self, app):
self.app = app
def load_home_page(self):
wd = self.app.wd
if len(wd.find_elements_by_link_text("Last name")) > 0:
return
wd.find_element_by_link_text("home").click()
def select_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
# Example usage
# Assuming 'app' is an instance of the web application
# interaction = WebInteraction(app)
# interaction.load_home_page()
# interaction.select_by_index(2) |
<reponame>fim/purefap
from django.conf.urls import patterns, include, url
from filebrowser.sites import site
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'purefap.views.home', name='home'),
# url(r'^purefap/', include('purefap.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^filebrowser/', include(site.urls)),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^', include(admin.site.urls)),
)
|
<reponame>tdm1223/Algorithm<filename>acmicpc.net/source/5575.cpp
// 5575. 타임 카드
// 2019.05.21
// 구현
#include<iostream>
using namespace std;
int main()
{
int h1, h2, m1, m2, s1, s2;
for (int i = 0; i < 3; i++)
{
// 출퇴근 시간 입력받음
cin >> h1 >> m1 >> s1 >> h2 >> m2 >> s2;
// 초->분->시간 순으로 차례대로 처리한다.
int s = s2 - s1;
if (s < 0)
{
m2--;
s += 60;
if (m2 < 0)
{
h2--;
m2 += 60;
}
}
int m = m2 - m1;
if (m < 0)
{
h2--;
m += 60;
}
int h = h2 - h1;
cout << h << " " << m << " " << s << endl;
}
return 0;
}
|
<reponame>vfishv/DanmakuFlameMaster
package tv.cjump.ndkbitmap_armv7a;
public class Pragma {
}
|
// See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
// RUN: not %target-swift-frontend %s -parse
class MyClass { // Completed class declaration
func g() { // Added method declaration
// Method body
}
}
let d = MyClass() // Instantiating the class |
package io.dronefleet.mavlink.uavionix;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
* Status flags for ADS-B transponder dynamic output
*/
@MavlinkEnum
public enum UavionixAdsbRfHealth {
/**
*
*/
@MavlinkEntryInfo(0)
UAVIONIX_ADSB_RF_HEALTH_INITIALIZING,
/**
*
*/
@MavlinkEntryInfo(1)
UAVIONIX_ADSB_RF_HEALTH_OK,
/**
*
*/
@MavlinkEntryInfo(2)
UAVIONIX_ADSB_RF_HEALTH_FAIL_TX,
/**
*
*/
@MavlinkEntryInfo(16)
UAVIONIX_ADSB_RF_HEALTH_FAIL_RX
}
|
/* jshint indent: 1 */
module.exports = function(sequelize, DataTypes) {
return sequelize.define('reason', {
recid: {
type: DataTypes.INTEGER,
allowNull: false,
field: 'RECID'
},
proptype: {
type: DataTypes.CHAR,
allowNull: false,
primaryKey: true,
field: 'PROPTYPE'
},
reasontype: {
type: DataTypes.CHAR,
allowNull: false,
primaryKey: true,
field: 'REASONTYPE'
},
reasoncode: {
type: DataTypes.CHAR,
allowNull: false,
primaryKey: true,
field: 'REASONCODE'
},
reason: {
type: DataTypes.CHAR,
allowNull: true,
field: 'REASON'
},
assmtlst: {
type: DataTypes.CHAR,
allowNull: true,
field: 'ASSMTLST'
},
coaType: {
type: DataTypes.CHAR,
allowNull: true,
field: 'COA_TYPE'
},
digestVal: {
type: DataTypes.BOOLEAN,
allowNull: true,
defaultValue: '0',
field: 'DIGEST_VAL'
}
}, {
tableName: 'REASON',
timestamps: false
});
};
|
<gh_stars>0
#! /usr/bin/env python3
stddev = ( (1./(5.-1.)) * (
(0.60984565298-(-0.45298911858)**2) +
(0.61641291630-(-0.45481953564)**2) +
(1.35860151160-(-0.48066105923)**2) +
(0.78720769003-(-0.47316713469)**2) +
(0.56393677687-(-0.46204733302)**2)
)
)**0.5
print stddev
|
<reponame>chylex/Hardcore-Ender-Expansion
package chylex.hee.block;
import java.util.List;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import chylex.hee.HardcoreEnderExpansion;
import chylex.hee.block.material.MaterialDungeonPuzzle;
import chylex.hee.entity.fx.FXHelper;
import chylex.hee.entity.fx.FXHelper.Axis;
import chylex.hee.entity.fx.FXType;
import chylex.hee.entity.technical.EntityTechnicalPuzzleChain;
import chylex.hee.entity.technical.EntityTechnicalPuzzleSolved;
import chylex.hee.item.block.ItemBlockWithSubtypes.IBlockSubtypes;
import chylex.hee.packets.PacketPipeline;
import chylex.hee.packets.client.C20Effect;
import chylex.hee.system.abstractions.Pos;
import chylex.hee.system.abstractions.Pos.PosMutable;
import chylex.hee.system.abstractions.entity.EntitySelector;
import chylex.hee.system.abstractions.facing.Facing4;
import chylex.hee.system.collections.CollectionUtil;
import chylex.hee.system.logging.Stopwatch;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class BlockDungeonPuzzle extends Block implements IBlockSubtypes{
private static final Material dungeonPuzzle = new MaterialDungeonPuzzle();
public static final byte minDungeonSize = 7, maxDungeonSize = 13;
public static final byte metaTriggerUnlit = 0, metaTriggerLit = 1, metaChainedUnlit = 2, metaChainedLit = 3,
metaDistributorSpreadUnlit = 4, metaDistributorSpreadLit = 5,
metaDistributorSquareUnlit = 6, metaDistributorSquareLit = 7,
metaPortal = 11, metaDisabled = 12, metaWall = 13, metaRock = 14, metaCeiling = 15;
public static final byte[] icons = new byte[]{ 2, 3, 4, 5, 6, 7, 8, 9, 3, 3, 3, 10, 10, 0, 1, 3 };
public static final String[] names = new String[]{
"trigger.unlit", "trigger.lit", "chained.unlit", "chained.lit", "distr.spread.unlit", "distr.spread.lit", "distr.square.unlit", "distr.square.lit",
null, null, null, null, "disabled", "wall", "rock", "ceiling"
};
public static final boolean canTrigger(int meta){
return meta == metaTriggerUnlit || meta == metaTriggerLit;
}
public static final int toggleState(int meta){
if (meta == metaWall || meta == metaRock || meta == metaCeiling || meta == metaDisabled || meta == metaPortal)return meta;
else return (meta&1) == 0 ? meta+1 : meta-1;
}
public static final boolean isLit(int meta){
return (meta&1) != 0;
}
public static final int getUnlit(int meta){
return (meta&1) != 0 ? meta-1 : meta;
}
@SideOnly(Side.CLIENT)
private IIcon[] iconArray;
public BlockDungeonPuzzle(){
super(dungeonPuzzle);
}
/**
* Update chain from the entity, return false to stop the chain.
*/
public boolean updateChain(World world, Pos pos, Facing4 chainDir){
int meta = pos.getMetadata(world), toggled = toggleState(meta);
if (meta != toggled){
pos.setMetadata(world, toggled);
int unlit = getUnlit(meta);
if (unlit == metaDistributorSpreadUnlit){
for(Facing4 facing:Facing4.list){
if (facing == chainDir.opposite())continue;
Pos offPos = pos.offset(facing);
int distrMeta = offPos.getMetadata(world);
int distrToggled = toggleState(distrMeta);
if (distrToggled != distrMeta && offPos.getBlock(world) == this){
world.spawnEntityInWorld(new EntityTechnicalPuzzleChain(world, pos, facing));
}
}
}
else if (unlit == metaDistributorSquareUnlit){
for(int xx = -1; xx <= 1; xx++){
for(int zz = -1; zz <= 1; zz++){
if (xx == 0 && zz == 0)continue;
Pos offPos = pos.offset(xx, 0, zz);
int distrMeta = offPos.getMetadata(world);
int distrToggled = toggleState(distrMeta);
if (distrToggled != distrMeta && offPos.getBlock(world) == this){
PacketPipeline.sendToAllAround(world.provider.dimensionId, offPos, 64D, new C20Effect(FXType.Basic.DUNGEON_PUZZLE_BURN, offPos));
offPos.setMetadata(world, distrToggled);
}
}
}
}
else return true;
PacketPipeline.sendToAllAround(world.provider.dimensionId, pos, 64D, new C20Effect(FXType.Basic.DUNGEON_PUZZLE_BURN, pos));
}
checkWinConditions(world, pos);
return false;
}
public void checkWinConditions(World world, Pos pos){
if (EntitySelector.type(world, EntityTechnicalPuzzleChain.class, Pos.getBoundingBox(pos, pos).expand(maxDungeonSize-0.5D, 0D, maxDungeonSize-0.5D)).size() == 1){
int y = pos.getY(), x = pos.getX(), z = pos.getZ(), minX = x, minZ = z, maxX = x, maxZ = z, cnt = 0;
PosMutable mpos = new PosMutable();
boolean isFinished = true;
Stopwatch.time("BlockDungeonPuzzle - win detection - coords");
while(mpos.set(--minX, y, z).getBlock(world) == this);
while(mpos.set(x, y, --minZ).getBlock(world) == this);
while(mpos.set(++maxX, y, z).getBlock(world) == this);
while(mpos.set(x, y, ++maxZ).getBlock(world) == this);
++minX;
++minZ;
--maxX;
--maxZ;
for(int px = minX, pz = z; px <= maxX; px++){
while(mpos.set(px, y, --pz).getBlock(world) == this);
if (pz+1 < minZ)minZ = pz+1;
pz = z;
while(mpos.set(px, y, ++pz).getBlock(world) == this);
if (pz-1 > maxZ)maxZ = pz-1;
}
for(int pz = minZ, px = x; pz <= maxZ; pz++){
while(mpos.set(--px, y, pz).getBlock(world) == this);
if (px+1 < minX)minX = px+1;
px = x;
while(mpos.set(++px, y, pz).getBlock(world) == this);
if (px-1 > maxX)maxX = px-1;
}
++minX;
++minZ;
--maxX;
--maxZ;
Stopwatch.finish("BlockDungeonPuzzle - win detection - coords");
Stopwatch.time("BlockDungeonPuzzle - win detection - conditions");
for(int xx = minX; xx <= maxX; xx++){
for(int zz = minZ; zz <= maxZ; zz++){
if (mpos.set(xx, y, zz).getBlock(world) != this)continue;
++cnt;
if (isLit(toggleState(mpos.getMetadata(world)))){
isFinished = false;
xx = maxX+1;
break;
}
}
}
Stopwatch.finish("BlockDungeonPuzzle - win detection - conditions");
if (isFinished && cnt > (maxX-minX+1)*(maxZ-minZ+1)*0.9D){
world.spawnEntityInWorld(new EntityTechnicalPuzzleSolved(world, minX+((maxX-minX+1)>>1), y, minZ+((maxZ-minZ+1)>>1), minX, minZ, maxX, maxZ));
}
}
}
@Override
public int getLightValue(IBlockAccess world, int x, int y, int z){
return Pos.at(x, y, z).getMetadata(world) == metaPortal ? 15 : super.getLightValue(world, x, y, z);
}
@Override
public boolean onBlockEventReceived(World world, int x, int y, int z, int eventID, int eventData){
if (eventID == 69){
FXHelper.create("flame")
.pos(x+0.5D, y+(eventData == 0 ? 1.15D : 1D+world.rand.nextDouble()*2D), z+0.5D)
.fluctuatePos((rand, axis) -> axis == Axis.Y ? 0D : rand.nextDouble()-0.5D)
.fluctuateMotion(0.05D)
.spawn(world.rand, eventData == 0 ? 3 : 25);
world.playSoundEffect(x+0.5D, y+0.5D, z+0.5D, "random.fizz", 0.5F, 2.6F+(world.rand.nextFloat()-world.rand.nextFloat())*0.8F);
return true;
}
else return false;
}
@Override
@SideOnly(Side.CLIENT)
public void randomDisplayTick(World world, int x, int y, int z, Random rand){
if (Pos.at(x, y, z).getMetadata(world) == metaPortal){
for(int a = 0; a < 18; a++)HardcoreEnderExpansion.fx.global("portal", x+0.5D+(rand.nextDouble()-0.5D)*0.3D, y+1D+rand.nextDouble()*2D, z+0.5D+(rand.nextDouble()-0.5D)*0.3D, (rand.nextDouble()-0.5D)*0.8D, (rand.nextDouble()-0.5D)*0.2D, (rand.nextDouble()-0.5D)*0.8D, 0.6289F, 0.3359F, 0.0391F);
HardcoreEnderExpansion.fx.global("portal", x+0.5D+(rand.nextDouble()-0.5D)*0.3D, y+1D+rand.nextDouble()*2D, z+0.5D+(rand.nextDouble()-0.5D)*0.3D, (rand.nextDouble()-0.5D)*0.8D, (rand.nextDouble()-0.5D)*0.2D, (rand.nextDouble()-0.5D)*0.8D, 1F, 1F, 1F);
}
}
@Override
public Item getItemDropped(int meta, Random rand, int fortune){
return null;
}
@Override
protected ItemStack createStackedBlock(int meta){
return null;
}
@Override
public ItemStack getPickBlock(MovingObjectPosition target, World world, int x, int y, int z, EntityPlayer player){
int meta = Pos.at(x, y, z).getMetadata(world);
if (meta == metaPortal)meta = metaDisabled;
return new ItemStack(this, 1, meta);
}
@Override
public String getUnlocalizedName(ItemStack is){
String name = CollectionUtil.getClamp(names, is.getItemDamage());
return name == null ? "" : "tile.dungeonPuzzle."+name;
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(int side, int meta){
return meta >= 0 && meta < icons.length ? iconArray[icons[meta]] : iconArray[3];
}
@Override
@SideOnly(Side.CLIENT)
public void getSubBlocks(Item item, CreativeTabs tab, List list){
for(byte meta:new byte[]{
metaWall, metaRock, metaCeiling, metaDisabled, metaTriggerUnlit, metaTriggerLit, metaChainedUnlit, metaChainedLit,
metaDistributorSpreadUnlit, metaDistributorSpreadLit, metaDistributorSquareUnlit, metaDistributorSquareLit,
})list.add(new ItemStack(item, 1, meta));
}
@Override
@SideOnly(Side.CLIENT)
public void registerBlockIcons(IIconRegister iconRegister){
iconArray = new IIcon[11];
iconArray[0] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_wall");
iconArray[1] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_wall_rock");
iconArray[2] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_trigger_unlit");
iconArray[3] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_trigger_lit");
iconArray[4] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_chained_unlit");
iconArray[5] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_chained_lit");
iconArray[6] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_distributor_spread_unlit");
iconArray[7] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_distributor_spread_lit");
iconArray[8] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_distributor_square_unlit");
iconArray[9] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_distributor_square_lit");
iconArray[10] = iconRegister.registerIcon("hardcoreenderexpansion:dungeon_puzzle_disabled");
}
}
|
import { createStore, applyMiddleware } from 'redux';
import thunkMiddleware from 'redux-thunk';
// create a reducer
const reducer = (state = {}, action) => {
switch (action.type) {
case 'setState':
return {...state, ...action.state};
default:
return state;
}
};
// create and export a store with redux and thunk middleware
export const store = createStore(reducer, applyMiddleware(thunkMiddleware)); |
import { browser, isDev } from '@/utils/env';
const url2userDefinedUrl: (host: string) => string = host => {
return host;
};
export type urlInfo = {
concernedProtocol: boolean;
protocol: string;
host?: string;
url?: string;
favIconUrl?: string;
displayName: string;
};
const vendors: Array<string> = ['chrome', 'firefox', 'edge'];
export const getUrlInfoInActiveTab: () => Promise<urlInfo> = async () => {
if (isDev) {
return {
concernedProtocol: true,
protocol: 'http',
host: 'pabbit.club',
url: 'pabbit.club',
displayName: 'pabbit.club',
};
}
let tabs = await browser.tabs.query({ active: true, currentWindow: true });
const originUrl: string = tabs[0].url as string;
const protocolReg = /^([A-Za-z]+)(?=:\/{2})/.exec(originUrl);
const protocol = protocolReg ? protocolReg[0] : '';
switch (protocol) {
case 'http':
case 'https':
case 'chrome':
// firefox & edge are not tested
case 'firefox':
case 'edge':
const hostReg = /(?<=:\/{2})[^\r\n\t\f\v\/]+(?=\/?)/.exec(originUrl);
const host = hostReg ? hostReg[0] : '';
const url = vendors.includes(protocol) ? `${protocol}://${host}` : host;
const favIconUrl = tabs[0].favIconUrl;
return {
concernedProtocol: true,
protocol: protocol,
host: host,
url: url,
favIconUrl: favIconUrl && favIconUrl !== '' ? favIconUrl : undefined,
displayName: url2userDefinedUrl(url),
};
case 'file':
return {
concernedProtocol: false,
protocol: protocol,
displayName: protocol.charAt(0).toUpperCase() + protocol.slice(1),
};
default:
return {
concernedProtocol: false,
protocol: 'other',
displayName: 'Other',
};
}
};
export type urlState = {
tracked: boolean;
limited: boolean;
maxLimitTime?: number;
currentlyUsedTime?: number;
openedTimes?: number;
};
export const getUrlState: (
urlInfo: urlInfo,
) => Promise<urlState> = async urlInfo => {
if (isDev) {
return {
tracked: true,
limited: true,
currentlyUsedTime: 1000,
maxLimitTime: 45,
openedTimes: 12,
};
}
return await browser.runtime.sendMessage({ key: 'popup/info/urlState' });
};
|
try:
# code goes here
result = my_function()
except Exception as e:
# deal with errors
print("Error encountered: {}".format(e))
finally:
# execute this block of code regardless
print("Done!") |
package br.edu.ifpb.hefastos_android.activities.delete;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.NavUtils;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.widget.ArrayAdapter;
import android.widget.Button;
import com.weiwangcn.betterspinner.library.material.MaterialBetterSpinner;
import br.edu.ifpb.hefastos_android.activities.MainActivity;
import br.edu.ifpb.hefastos_android.R;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class DeleteQuestaoActivity extends AppCompatActivity {
@BindView(R.id.toolbar)
protected Toolbar toolbar;
@BindView(R.id.btCancelar)
protected Button btCancelar;
@BindView(R.id.btDeletar)
protected Button btDeletar;
@BindView(R.id.spAssunto)
protected MaterialBetterSpinner spAssunto;
@BindView(R.id.spDisciplina)
protected MaterialBetterSpinner spDisciplina;
@BindView(R.id.spQuestao)
protected MaterialBetterSpinner spQuestao;
private Intent intent;
String[] SPINNERLIST = {"Android Material Design", "Material Design Spinner", "Spinner Using Material Library", "Material Spinner Example"};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_delete_questao);
ButterKnife.bind(this);
toolbar.setTitle("Nova Questão");
setSupportActionBar(toolbar);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
}
ArrayAdapter<String> arrayAdapter = new ArrayAdapter<>(this, android.R.layout.simple_dropdown_item_1line, SPINNERLIST);
spAssunto.setAdapter(arrayAdapter);
spDisciplina.setAdapter(arrayAdapter);
spQuestao.setAdapter(arrayAdapter);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
NavUtils.navigateUpFromSameTask(this);
finish();
break;
}
return super.onOptionsItemSelected(item);
}
@OnClick(R.id.btCancelar)
public void btCancelarOnClick(Button b) {
intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}
@OnClick(R.id.btDeletar)
public void btDeletarOnClick(Button b) {
// Deletar no servidor
}
}
|
package com.thinkaurelius.titan.diskstorage.keycolumnvalue.keyvalue;
import com.google.common.base.Preconditions;
import java.nio.ByteBuffer;
public class LimitedSelector implements KeySelector {
private final int limit;
private int count;
public LimitedSelector(int limit) {
Preconditions.checkArgument(limit > 0, "The count limit needs to be positive. Given: " + limit);
this.limit = limit;
count = 0;
}
public static final LimitedSelector of(int limit) {
return new LimitedSelector(limit);
}
@Override
public boolean include(ByteBuffer key) {
count++;
return true;
}
@Override
public boolean reachedLimit() {
if (count >= limit) return true;
else return false;
}
}
|
package it.sad.sii.AndroidPassReminder;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
/**
* Created by mmutschl on 18/08/15.
*/
public class Reminder {
private long checktime;
private String cnumber;
private String cardid;
private String ctype;
private String name;
private int causalId;
private Map<Integer, String> causalText;
public Reminder(long checktime, String cnumber, String name, int causalId, String cardid, String ctype) {
this.checktime = checktime;
this.causalId = causalId;
this.name = name;
this.cnumber = cnumber;
this.cardid = cardid;
this.ctype = ctype;
causalText = new HashMap<Integer, String>();
if (Locale.getDefault().getLanguage().equals("de")) {
causalText.put(1, "Strafe");
causalText.put(2, "Nicht entwertet");
causalText.put(3, "Option fehlt");
causalText.put(4, "Doppelcheck-in");
} else {
causalText.put(1, "Multato");
causalText.put(2, "Non obliterato");
causalText.put(3, "Senza opzioni");
causalText.put(4, "Doppio check-in");
}
}
public long getChecktime() {
return checktime;
}
public void setChecktime(long checktime) {
this.checktime = checktime;
}
public String getCnumber() {
return cnumber;
}
public void setCnumber(String cnumber) {
this.cnumber = cnumber;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getCausalId() {
return causalId;
}
public void setCausalId(int causalId) {
this.causalId = causalId;
}
public String getCardid() {
return cardid;
}
public void setCardid(String cardid) {
this.cardid = cardid;
}
public String getCtype() {
return ctype;
}
public void setCtype(String ctype) {
this.ctype = ctype;
}
@Override
public String toString() {
DateTimeFormatter dateFormatter = DateTimeFormat.forPattern("dd/MM/yy hh:mm");
return dateFormatter.print(checktime) + " - " + causalText.get(causalId);
}
}
|
import { SaveParticipantRepository } from '@/data/protocols/db'
import { SaveParticipant, SaveParticipantParams } from '@/domain/usecases'
import { ParticipantModel } from '@/domain/models'
export class DbSaveParticipant implements SaveParticipant {
constructor (
private readonly saveParticipantRepository: SaveParticipantRepository
) {}
async save (participant: SaveParticipantParams): Promise<ParticipantModel> {
const participantSave = await this.saveParticipantRepository.save(participant)
return participantSave
}
}
|
#!/bin/bash
input=""
for f in recipes/*.png; do
input+=","$f
done
echo ${input:1}
ktech ${input:1} --atlas ../mod_client/images/dsmmo_recipes.xml
|
/*
* File created on Mar 8, 2019
*
* Copyright (c) 2019 <NAME>, Jr
* and others as noted
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.soulwing.jwt.api.jose4j;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import java.time.Instant;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import org.soulwing.jwt.api.Claims;
/**
* Unit tests for {@link Jose4jClaimsBuilder}.
*
* @author <NAME>
*/
public class Jose4jClaimsBuilderTest {
private static final String ID = "id";
private static final String ISSUER = "issuer";
private static final String SUBJECT = "subject";
private static final String AUDIENCE = "audience";
private static final String OTHER_AUDIENCE = "otherAudience";
private static final String CLAIM_NAME = "claimName";
private static final String STRING_VALUE = "stringValue";
private static final Number NUMBER_VALUE = 42;
private static final Boolean BOOLEAN_VALUE = true;
private static final Instant ISSUED_AT = Instant.ofEpochSecond(
Instant.now().getEpochSecond());
private static final Instant EXPIRES_AT = ISSUED_AT.plusSeconds(30);
private Jose4jClaimsBuilder builder = new Jose4jClaimsBuilder();
@Test
public void testBuildWithId() throws Exception {
assertThat(builder.id(ID).build().id().orElse(null),
is(equalTo(ID)));
}
@Test
public void testBuildWithIssuer() throws Exception {
assertThat(builder.issuer(ISSUER).build().issuer().orElse(null),
is(equalTo(ISSUER)));
}
@Test
public void testBuildWithSubject() throws Exception {
assertThat(builder.subject(SUBJECT).build().subject().orElse(null),
is(equalTo(SUBJECT)));
}
@Test
public void testBuildWithOneAudience() throws Exception {
assertThat(builder.audience(AUDIENCE).build().getAudience().contains(AUDIENCE),
is(true));
}
@Test
public void testBuildWithMultipleAudiences() throws Exception {
final Claims claims = builder.audience(AUDIENCE, OTHER_AUDIENCE).build();
assertThat(claims.getAudience().contains(AUDIENCE), is(true));
assertThat(claims.getAudience().contains(OTHER_AUDIENCE), is(true));
}
@Test
public void testBuildWithIssuedAt() throws Exception {
assertThat(builder.issuedAt(ISSUED_AT).build().issuedAt().orElse(null),
is(equalTo(ISSUED_AT)));
}
@Test
public void testBuildWithExpiresAt() throws Exception {
assertThat(builder.expiresAt(EXPIRES_AT).build().expiresAt().orElse(null),
is(equalTo(EXPIRES_AT)));
}
@Test
public void testBuildWithStringClaim() throws Exception {
assertThat(builder.set(CLAIM_NAME, STRING_VALUE).build()
.claim(CLAIM_NAME, String.class).orElse(null), is(equalTo(STRING_VALUE)));
}
@Test
public void testBuildWithNumberClaim() throws Exception {
assertThat(builder.set(CLAIM_NAME, NUMBER_VALUE).build()
.claim(CLAIM_NAME, Integer.class).orElse(null),
is(equalTo(NUMBER_VALUE.intValue())));
}
@Test
public void testBuildWithBooleanClaim() throws Exception {
assertThat(builder.set(CLAIM_NAME, BOOLEAN_VALUE).build()
.claim(CLAIM_NAME, Boolean.class).orElse(null),
is(equalTo(BOOLEAN_VALUE)));
}
@Test
public void testBuildWithVarArgsClaim() throws Exception {
assertThat(builder.set(CLAIM_NAME, STRING_VALUE, NUMBER_VALUE).build()
.claim(CLAIM_NAME, Object[].class).orElse(null),
is(equalTo(new Object[] { STRING_VALUE, NUMBER_VALUE })));
}
@Test
public void testBuildWithArrayClaim() throws Exception {
final String[] values = {STRING_VALUE};
assertThat(builder.set(CLAIM_NAME, values).build()
.claim(CLAIM_NAME, String[].class).orElse(null), is(equalTo(values)));
}
@Test
public void testBuildWithListClaim() throws Exception {
final List values = Collections.singletonList(STRING_VALUE);
assertThat(builder.set(CLAIM_NAME, values).build()
.claim(CLAIM_NAME, List.class).orElse(null), is(equalTo(values)));
}
@Test
public void testBuildWithSetClaim() throws Exception {
final Set values = Collections.singleton(STRING_VALUE);
assertThat(builder.set(CLAIM_NAME, values).build()
.claim(CLAIM_NAME, Set.class).orElse(null), is(equalTo(values)));
}
@Test
public void testBuildWithSingleObjectValue() throws Exception {
final Object value = new Object();
assertThat(builder.set(CLAIM_NAME, value).build()
.claim(CLAIM_NAME, Object.class).orElse(null), is(sameInstance(value)));
}
@Test
public void testBuildWithMultipleObjectValue() throws Exception {
final Object value = new Object();
final Object otherValue = new Object();
final Claims claims = builder.set(CLAIM_NAME, value, otherValue).build();
assertThat(claims.claim(CLAIM_NAME, Object.class).orElse(null),
is(instanceOf(List.class)));
final List<?> actual = (List<?>)
claims.claim(CLAIM_NAME, List.class).orElse(null);
assertThat(actual, contains(value, otherValue));
}
} |
#!/bin/bash
set -x
/sbin/ip netns add swns
/sbin/ip netns exec swns /sbin/ifconfig lo up
/sbin/ip netns add nonet
/sbin/ip netns exec nonet /sbin/ifconfig lo up
# Calculate the max interface available
default_max=7
# Reconfigure the ports.yaml
yaml_file=/etc/libreswitch/platform/Generic-x86/X86-64/ports.yaml
new_max=$(awk '/eth.*:/ { print $1 }' /proc/net/dev | sed -e 's/://' -e 's/eth//' | sort -h | tail -n 1)
sed -i -e "s/\(number_ports:.*\)${default_max}/\1${new_max}/" $yaml_file
sed -i '/^ports:/q' $yaml_file
for iface in `seq 1 $new_max` ; do
cat <<EOF >>$yaml_file
- name: eth${iface}
switch_device: 0
switch_device_port: ${iface}
pluggable: False
connector: RJ45
max_speed: 1000
speeds: [1000] # supported speeds in Mb/S
capabilities: [enet1G]
subports: []
supported_modules: [TBD]
EOF
done
# On Appliance machine we need to move the interfaces into the swns
for iface in `seq 1 $new_max` ; do
/sbin/ip link set eth$iface netns swns
done
# If we are in container environment, disable the ttys
if systemd-detect-virt -q -c ; then
systemctl disable getty@
systemctl disable serial-getty@
fi
|
import { RuleConfiguration } from '../../../support/Rule'
type Options = ({
before?: boolean
after?: boolean
})[]
type Configuration = RuleConfiguration<'comma-spacing', 'eslint', Options>
export default Configuration |
<gh_stars>0
const lijnColors = {"WI":"FFFFFF","TU":"0099AA","RZ":"FF88AA","OR":"EE8822","RO":"BB0022","PA":"991199","MA":"DD0077","LB":"AACCEE","GE":"FFCC11","GR":"229922","MU":"77CCAA","ZW":"000000","KA":"995511","BL":"1199DD","CR":"C5AA77","ZA":"FFCCAA","GD":"FFDD00","BD":"000099","BO":"771133","KI":"444411","DB":"0044BB","LG":"BBDD00","PE":"005555","ST":"8899AA"};
var direcions;
var routePlanType;
var currentOvRoute;
var knownLijnColors = {};
function getLijnColors(lijnEntity, lijnNum)
{
if (!knownLijnColors || knownLijnColors.length == 0)
{
return {
background: "#333333",
foreground: "#FFFFFF"
};
}
var colors = knownLijnColors.lijnLijnkleurCodesLijst;
var results = colors.filter((e) => e.lijn.entiteitnummer == lijnEntity && e.lijn.lijnnummer.includes(lijnNum));
if (results.length == 0)
{
return {
background: "#333333",
foreground: "#FFFFFF"
};
}
else
{
return {
background: "#" + lijnColors[results[0].lijnkleurCodes.achtergrond.code],
foreground: "#" + lijnColors[results[0].lijnkleurCodes.voorgrond.code]
};
}
}
function getLocation(callback) {
if (navigator.geolocation)
{
navigator.geolocation.getCurrentPosition(callback);
}
else
{
console.warn("Geolocation is not supported by this browser.");
}
}
const dlApiKey = "<KEY>";
function dl(url, success, error, method = "GET")
{
$.ajax({
url: url,
headers: {"Ocp-Apim-Subscription-Key": dlApiKey},
type: method,
dataType: "json",
async: true,
success: (e) => {
success(e);
},
error: (e) => {
console.error("[tm] De Lijn request could not succeed: " + e);
error(e);
}
});
}
function dlSync(url, method = "GET")
{
var value = null;
$.ajax({
url: url,
headers: {"Ocp-Apim-Subscription-Key": dlApiKey},
type: method,
dataType: "json",
async: false,
success: (e) => {
value = e;
},
error: (e) => {
console.error("[tm] De Lijn request could not succeed: " + e);
}
});
return value;
}
function lijnGeocoder(query)
{
var data = dlSync("https://api.delijn.be/DLZoekOpenData/v1/zoek/haltes/" + query);
if (!data)
return;
var matchingFeatures = [];
for(var i = 0; i < data.haltes.length; i++)
{
var lijn = data.haltes[i];
var feature = {};
feature['place_name'] = "Halte " + lijn.omschrijving + " (" + lijn.haltenummer + ")";
feature['center'] = {lng: lijn.geoCoordinaat.longitude, lat: lijn.geoCoordinaat.latitude};
feature['place_type'] = ['place'];
feature["lijn_data"] = lijn;
matchingFeatures.push(feature);
}
return matchingFeatures;
}
function showMapLijnRoute(route)
{
currentOvRoute = route;
var coordsAndColors = [];
for(var i = 0; i < route.reiswegStappen.length; i++)
{
var step = route.reiswegStappen[i];
if (step.type == "WACHTEN")
continue;
var coords = [];
for(var j = 0; j < step.geoCoordinaten.length; j++)
{
var geo = step.geoCoordinaten[j];
coords.push([geo.longitude, geo.latitude]);
}
var color = "#777";
if (step.type != "WANDELEN")
{
color = getLijnColors(step.lijnrichting.entiteitnummer, step.lijnrichting.lijnnummer).background;
}
coordsAndColors.push({
coords: coords,
color: color
});
}
setMapLayerCoords("ov-layer", coordsAndColors);
}
function loadFriendRoutes()
{
var el = document.getElementById("friend-routes");
el.innerHTML = null;
$.ajax({
url: "/api/user/getFriendRoutes",
type: "POST",
async: true,
success: (e) => {
var friends = e.data;
for(var i = 0; i < friends.length; i++)
{
var friend = friends[i];
if (!friend.currentRoute)
continue;
var friendRoute = JSON.parse(friend.currentRoute);
if (friendRoute.type == "none")
continue;
console.log(friendRoute);
var elFriend = document.createElement("li");
var elFriendName = document.createElement("h4");
elFriendName.innerText = friend.firstName + " " + friend.lastName;
elFriend.appendChild(elFriendName);
var elFriendRouteTime = document.createElement("h5");
elFriendRouteTime.innerText = (friendRoute.data.today ? "Vandaag" : "Morgen")
+ " om " + friendRoute.data.time;
elFriend.appendChild(elFriendRouteTime);
var elFriendRouteDescription = document.createElement("p");
elFriendRouteDescription.innerHTML = friendRoute.data.origin
+ " <i class='fas fa-long-arrow-alt-right'></i> "
+ friendRoute.data.destination;
elFriend.appendChild(elFriendRouteDescription);
var elFriendRouteType = document.createElement("h5");
elFriendRouteType.innerText = (friendRoute.type == "car" ? "Met de auto." : "Met het openbaar veroer.");
elFriend.appendChild(elFriendRouteType);
var elFriendSelect = document.createElement("button");
elFriendSelect.classList.add("btn2");
elFriendSelect.innerText = "Bekijk";
function setClickEvent(userId)
{
elFriendSelect.onclick = function(){
loadRoute(userId)
};
}
setClickEvent(friend.id);
elFriend.appendChild(elFriendSelect);
el.appendChild(elFriend);
}
},
error: (e) => {
console.error("Could not load route", e);
}
});
}
var loadedStops = {};
var map;
var start = [-122.662323, 45.523751];
var lijnRoutes = {};
var selectedStop1, selectedStop2;
var isFirstLijnSearch = true;
function showLijnResults(cancelIfNotFirst = false)
{
if (!selectedStop1 || !selectedStop2 || (cancelIfNotFirst && !isFirstLijnSearch))
return;
isFirstLijnSearch = false;
var resultsElement = document.getElementById("route-ov-lijn-results");
resultsElement.innerHTML = null;
setPageLoading(true);
var time = document.getElementById("route-ov-go-time").value;
var arrive = document.getElementById("route-ov-period-go-type-arrive").checked;
var today = document.getElementById("route-ov-go-period-today").checked;
/*console.log("time: " + time);
console.log(arrive ? "arrive" : "depart");
console.log(today ? "today" : "tomorrow");*/
//selectedStop.entiteitnummer
//selectedStop.haltenummer -> {latitude longitude}
//selectedStop.omschrijving
//selectedStop.geoCoordinaat
var startOfDay = moment().startOf('day');
var timeMoment = moment(time,"hh:mm");
startOfDay = startOfDay.add(timeMoment.hours(), 'hours').add(timeMoment.minutes(), 'minutes');
if (!today)
startOfDay = startOfDay.add(1, 'days');
var timeMoment = startOfDay.format("YYYY-MM-DDTHH:mm:ss");
console.log(timeMoment);
var url = "https://api.delijn.be/DLKernOpenData/api/v1/routeplan/"
+ selectedStop1.geoCoordinaat.latitude + "," + selectedStop1.geoCoordinaat.longitude + "/"
+ selectedStop2.geoCoordinaat.latitude + "," + selectedStop2.geoCoordinaat.longitude
+ "?aanvraagType=INITIEEL" + "&tijdstip=" + timeMoment + "&vertrekAankomst=" + (arrive ? "AANKOMST" : "VERTREK");
dl(url, (data) => {
lijnRoutes = {};
var lijnColorQueryKeys = "";
for(var i = 0; i < data.reiswegen.length; i++)
{
var steps = data.reiswegen[i].reiswegStappen;
for(var j = 0; j < steps.length; j++)
{
var step = steps[j];
if (step.type == "VOERTUIG" && step.maatschappijType == "DE_LIJN")
{
lijnColorQueryKeys += "_" + step.lijnrichting.entiteitnummer + "_" + step.lijnrichting.lijnnummer;
}
}
}
if (lijnColorQueryKeys.length > 0) // Remove '_'
lijnColorQueryKeys = lijnColorQueryKeys.substring(1);
var showLijnRoutes = function()
{
var bestRoute = null;
for(var i = 0; i < data.reiswegen.length; i++)
{
var route = data.reiswegen[i];
lijnRoutes[i] = route;
var routeStartTime = moment(route.duurtijd.start, "yyyy-MM-DDTHH:mm:ss").format("HH:mm");
var routeEndTime = moment(route.duurtijd.einde, "yyyy-MM-DDTHH:mm:ss").format("HH:mm");
if (!route.reiswegStappen)
continue;
if (route.bestpassend)
bestRoute = route;
var el = document.createElement("li");
el.style["position"] = "relative";
el.classList.add("route-ov-lijn-result");
var elDescription = document.createElement("h4");
elDescription.innerText = routeStartTime + " - " + routeEndTime;
if (route.bestpassend)
elDescription.innerHTML += " <i class='fas fa-star'></i> Beste";
el.appendChild(elDescription);
var elSelectRoute = document.createElement("button");
elSelectRoute.innerText = "Selecteer";
elSelectRoute.classList.add("btn2");
elSelectRoute.setAttribute("href","#");
elSelectRoute.style["position"] = "absolute";
elSelectRoute.style["top"] = "5px";
elSelectRoute.style["right"] = "5px";
function setClickEvent(clickRoute)
{
elSelectRoute.onclick = () => {
showMapLijnRoute(clickRoute);
saveRoute();
};
}
setClickEvent(route);
el.appendChild(elSelectRoute);
var elStepsList = document.createElement("ul");
for(var j = 0; j < route.reiswegStappen.length; j++)
{
var step = route.reiswegStappen[j];
var stepType = step.type;
var stepStartMoment = moment(step.duurtijd.start, "yyyy-MM-DDTHH:mm:ss")
var stepEndMoment = moment(step.duurtijd.einde, "yyyy-MM-DDTHH:mm:ss")
var stepStartTime = stepStartMoment.format("HH:mm");
var stepEndTime = stepEndMoment.format("HH:mm");
var stepMinutes = stepEndMoment.diff(stepStartMoment,"minutes");
var elStep = document.createElement("li");
var elStepTitle = document.createElement("h6");
elStepTitle.innerText = stepStartTime + " - " + stepEndTime;
elStep.appendChild(elStepTitle);
var elStelDescription = document.createElement("p");
if (stepType == "WANDELEN")
{
elStelDescription.innerText = step.afstand + "m wandelen";
}
else if (stepType == "VOERTUIG")
{
var colors = getLijnColors(step.lijnrichting.entiteitnummer, step.lijnrichting.lijnnummer);
var lijnTagStyles = "background-color: " + colors.background + "; color: " + colors.foreground + ";";
var lijnNumber = step.lijnrichting.lijnnummer.substring(1);
var coloredLijnTag = "<span class='lijn-number-tag' style='" + lijnTagStyles + "'>" + lijnNumber + "</span>";
elStelDescription.innerHTML = "Met de " + coloredLijnTag;
}
else if (stepType == "WACHTEN")
{
elStelDescription.innerText = stepMinutes + " min wachten";
}
elStep.appendChild(elStelDescription);
elStepsList.appendChild(elStep);
}
el.appendChild(elStepsList);
if (route.bestpassend)
resultsElement.insertBefore(el,resultsElement.firstChild);
else
resultsElement.appendChild(el);
}
if (bestRoute)
{
showMapLijnRoute(bestRoute);
saveRoute();
}
document.getElementById("route-ov-seach-results").style.display = "block";
}
dl("https://api.delijn.be/DLKernOpenData/api/v1/lijnen/lijst/" + lijnColorQueryKeys + "/lijnkleuren", (dataColors) => {
knownLijnColors = dataColors;
setPageLoading(false);
showLijnRoutes();
}, (err) => {
setPageLoading(false);
showLijnRoutes();
console.log("Could not get colors.");
});
}, (err) => {
setPageLoading(false);
console.log("Could not get route.");
});
}
$(() => {
var markerTemplate = document.getElementById("marker-template").innerHTML;
function createDlMarker(forStop)
{
var el = document.createElement('div');
el.innerHTML = markerTemplate
.replace("{stop.id}", forStop.id)
.replace("{stop.name}", forStop.naam);
el.className = 'marker';
return el;
}
mapboxgl.accessToken = '<KEY>';
map = new mapboxgl.Map({
container: 'map',
style: 'mapbox://styles/mapbox/streets-v11',
center: [4.4699 , 50.50399],
zoom: 7.2
});
map.on("load", () => {
loadRoute();
loadFriendRoutes();
});
var mapLijnPlannerGeocoder1 = new MapboxGeocoder({
accessToken: mapboxgl.accessToken,
localGeocoder: lijnGeocoder,
zoom: 14,
placeholder: "Vertrekhalte",
mapboxgl: mapboxgl,
countries: 'be'
});
$("#map-lijn-planner-geocoder1").append(mapLijnPlannerGeocoder1.onAdd(map));
mapLijnPlannerGeocoder1.on('result', function(result) {
selectedStop1 = result.result["lijn_data"];
showLijnResults(true);
});
var mapLijnPlannerGeocoder2 = new MapboxGeocoder({
accessToken: mapboxgl.accessToken,
localGeocoder: lijnGeocoder,
zoom: 14,
placeholder: "Aankomsthalte",
mapboxgl: mapboxgl,
countries: 'be'
});
$("#map-lijn-planner-geocoder2").append(mapLijnPlannerGeocoder2.onAdd(map));
mapLijnPlannerGeocoder2.on('result', function(result) {
selectedStop2 = result.result["lijn_data"];
showLijnResults(true);
});
directions = new MapboxDirections({
accessToken: mapboxgl.accessToken,
unit: 'metric'
});
directions.actions.eventSubscribe("route", () => {
//saveRoute();
try {
directions.mapState();
}
catch(e){}
});
var getStopsFunc = (result) => {
const center = map.getCenter();
const zoom = map.getZoom();
if (zoom > 14.0)
{
const region = 3000;
console.log("region: " + region + ", zoom: " + zoom);
dl("https://api.delijn.be/DLKernOpenData/v1/beta/haltes/indebuurt/" + center.lat + "," + center.lng + "?" + region, (result) => {
for(var i = 0; i < result.haltes.length; i++)
{
const stop = result.haltes[i];
const stopId = stop.id;
if (stopId in loadedStops)
continue;
loadedStops[stopId] = stop;
/*var el = document.createElement('div');
el.innerHTML = "<i class='fas fa-map-marker'></i>";//result.haltes[i].naam;
el.className = 'marker';*/
var el = createDlMarker(stop);
// make a marker for each feature and add to the map
var lngLat = result.haltes[i].geoCoordinaat;
new mapboxgl.Marker(el)
.setLngLat({lng: lngLat.longitude, lat: lngLat.latitude})
.setPopup(new mapboxgl.Popup({ offset: 25 }) // add popups
.setHTML("id: " + result.haltes[i].id))
.addTo(map)
}
}, (ex) => { });
}
};
map.on("dragend", getStopsFunc);
map.on("zoomend", getStopsFunc);
getLocation((position) => {
if (position.coords.accuracy <= 10000)
{
map.flyTo({
center: {lng: position.coords.longitude, lat: position.coords.latitude},
zoom: 12.0
})
}
});
console.log("[tm] mapworks loaded. o/");
});
function clearMapLayerCoords(layerName)
{
if (map.getSource(layerName))
{
map.getSource(layerName).setData({});
}
}
function setMapLayerCoords(layerName, coordinatesColorPairs)
{
var geoJson = {
type: "FeatureCollection",
features: []
};
for(var i = 0; i < coordinatesColorPairs.length; i++)
{
geoJson.features.push({
type: "Feature",
properties: {
color: coordinatesColorPairs[i].color
},
geometry: {
type: "LineString",
coordinates: coordinatesColorPairs[i].coords
}
});
}
if (map.getSource(layerName))
{
map.getSource(layerName).setData(geoJson);
}
else
{
map.addLayer({
"id": layerName,
"type": "line",
"source": {
"type": "geojson",
"data": geoJson
},
"layout": {
"line-join": "round",
"line-cap": "round"
},
"paint": {
"line-width": 8,
"line-color": ["get","color"]
}
});
}
}
function saveRoute()
{
console.log("Saving route for " + routePlanType);
$.ajax({
url: "/api/user/setRoute",
type: "POST",
// dataType: "json",
async: true,
data: {
route: JSON.stringify(getRoute())
},
success: (e) => { },
error: (e) => {
console.error("Could not save route",e);
}
});
}
function getRoute()
{
if (routePlanType == null)
{
return {
type: "none",
data: null
}
}
else if (routePlanType == "car")
{
var directionsInputs = document.querySelectorAll(".mapboxgl-ctrl-directions input");
return {
type: "car",
data: {
origin: directionsInputs[0].value,
destination: directionsInputs[1].value,
time: document.getElementById("route-car-go-time").value,
today: document.getElementById("route-car-go-period-today").checked
}
}
}
else if (routePlanType == "ov")
{
return {
type: "ov",
data: {
origin: document.querySelector("#map-lijn-planner-geocoder1 input").value,
destination: document.querySelector("#map-lijn-planner-geocoder2 input").value,
arrive: document.getElementById("route-ov-period-go-type-arrive").checked,
time: document.getElementById("route-ov-go-time").value,
today: document.getElementById("route-ov-go-period-today").checked,
ovRoute: currentOvRoute,
ovLijnColors: knownLijnColors
}
}
}
}
function setRoute(route)
{
console.log("Setting route ", route);
if (!route || route.type == "none")
{
selectTab('searchmenu-tab','searchmenu-tab-no-route');
planSelectRouteType(null);
}
else if (route.type == "car")
{
selectTab('searchmenu-tab','searchmenu-tab-car');
planSelectRouteType('car');
directions.setOrigin(route.data.origin);
directions.setDestination(route.data.destination);
document.getElementById("route-car-go-time").value = route.data.time;
document.getElementById("route-car-go-period-today").checked = route.data.today;
}
else if (route.type == "ov")
{
selectTab('searchmenu-tab','searchmenu-tab-ov');
planSelectRouteType('ov');
document.querySelector("#map-lijn-planner-geocoder1 input").value = route.data.origin;
document.querySelector("#map-lijn-planner-geocoder2 input").value = route.data.destination;
document.getElementById("route-ov-period-go-type-arrive").checked = route.data.arrive;
document.getElementById("route-ov-go-time").value = route.data.time;
document.getElementById("route-ov-go-period-today").checked = route.data.today;
knownLijnColors = route.data.ovLijnColors;
showMapLijnRoute(route.data.ovRoute);
}
}
function loadRoute(userId = null)
{
$.ajax({
url: "/api/user/getRoute",
type: "POST",
// dataType: "json",
async: true,
data: {
userId: userId
},
success: (e) => {
setRoute(e.data);
},
error: (e) => {
console.error("Could not load route", e);
}
});
}
function planSelectRouteType(type)
{
routePlanType = type;
var mapRouteCtrl = document.getElementsByClassName("mapboxgl-ctrl-directions")[0];
if (mapRouteCtrl != null && type != "car")
mapRouteCtrl.style.display = "none";
else if (mapRouteCtrl != null)
mapRouteCtrl.style.display = "inherit";
if (type == "car" && !mapRouteCtrl)
{
map.addControl(directions, 'top-left');
}
else if (type == null)
{
saveRoute();
}
} |
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module AIPlatform
module V1beta1
# Metadata describing the Model's input and output for explanation.
# @!attribute [rw] inputs
# @return [::Google::Protobuf::Map{::String => ::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata}]
# Required. Map from feature names to feature input metadata. Keys are the name of the
# features. Values are the specification of the feature.
#
# An empty InputMetadata is valid. It describes a text feature which has the
# name specified as the key in {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata#inputs ExplanationMetadata.inputs}. The baseline
# of the empty feature is chosen by Vertex AI.
#
# For Vertex AI-provided Tensorflow images, the key can be any friendly
# name of the feature. Once specified,
# {::Google::Cloud::AIPlatform::V1beta1::Attribution#feature_attributions featureAttributions} are keyed by
# this key (if not grouped with another feature).
#
# For custom images, the key must match with the key in
# {::Google::Cloud::AIPlatform::V1beta1::ExplainRequest#instances instance}.
# @!attribute [rw] outputs
# @return [::Google::Protobuf::Map{::String => ::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::OutputMetadata}]
# Required. Map from output names to output metadata.
#
# For Vertex AI-provided Tensorflow images, keys can be any user defined
# string that consists of any UTF-8 characters.
#
# For custom images, keys are the name of the output field in the prediction
# to be explained.
#
# Currently only one key is allowed.
# @!attribute [rw] feature_attributions_schema_uri
# @return [::String]
# Points to a YAML file stored on Google Cloud Storage describing the format
# of the {::Google::Cloud::AIPlatform::V1beta1::Attribution#feature_attributions feature attributions}.
# The schema is defined as an OpenAPI 3.0.2 [Schema
# Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject).
# AutoML tabular Models always have this field populated by Vertex AI.
# Note: The URI given on output may be different, including the URI scheme,
# than the one given on input. The output URI will point to a location where
# the user only has a read access.
class ExplanationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Metadata of the input of a feature.
#
# Fields other than {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#input_baselines InputMetadata.input_baselines} are applicable only
# for Models that are using Vertex AI-provided images for Tensorflow.
# @!attribute [rw] input_baselines
# @return [::Array<::Google::Protobuf::Value>]
# Baseline inputs for this feature.
#
# If no baseline is specified, Vertex AI chooses the baseline for this
# feature. If multiple baselines are specified, Vertex AI returns the
# average attributions across them in {::Google::Cloud::AIPlatform::V1beta1::Attribution#feature_attributions Attribution.feature_attributions}.
#
# For Vertex AI-provided Tensorflow images (both 1.x and 2.x), the shape
# of each baseline must match the shape of the input tensor. If a scalar is
# provided, we broadcast to the same shape as the input tensor.
#
# For custom images, the element of the baselines must be in the same
# format as the feature's input in the
# {::Google::Cloud::AIPlatform::V1beta1::ExplainRequest#instances instance}[]. The schema of any single instance
# may be specified via Endpoint's DeployedModels'
# [Model's][google.cloud.aiplatform.v1beta1.DeployedModel.model]
# [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata]
# {::Google::Cloud::AIPlatform::V1beta1::PredictSchemata#instance_schema_uri instance_schema_uri}.
# @!attribute [rw] input_tensor_name
# @return [::String]
# Name of the input tensor for this feature. Required and is only
# applicable to Vertex AI-provided images for Tensorflow.
# @!attribute [rw] encoding
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::Encoding]
# Defines how the feature is encoded into the input tensor. Defaults to
# IDENTITY.
# @!attribute [rw] modality
# @return [::String]
# Modality of the feature. Valid values are: numeric, image. Defaults to
# numeric.
# @!attribute [rw] feature_value_domain
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::FeatureValueDomain]
# The domain details of the input feature value. Like min/max, original
# mean or standard deviation if normalized.
# @!attribute [rw] indices_tensor_name
# @return [::String]
# Specifies the index of the values of the input tensor.
# Required when the input tensor is a sparse representation. Refer to
# Tensorflow documentation for more details:
# https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor.
# @!attribute [rw] dense_shape_tensor_name
# @return [::String]
# Specifies the shape of the values of the input if the input is a sparse
# representation. Refer to Tensorflow documentation for more details:
# https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor.
# @!attribute [rw] index_feature_mapping
# @return [::Array<::String>]
# A list of feature names for each index in the input tensor.
# Required when the input {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#encoding InputMetadata.encoding} is BAG_OF_FEATURES,
# BAG_OF_FEATURES_SPARSE, INDICATOR.
# @!attribute [rw] encoded_tensor_name
# @return [::String]
# Encoded tensor is a transformation of the input tensor. Must be provided
# if choosing
# {::Google::Cloud::AIPlatform::V1beta1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution}
# or {::Google::Cloud::AIPlatform::V1beta1::ExplanationParameters#xrai_attribution XRAI attribution} and the
# input tensor is not differentiable.
#
# An encoded tensor is generated if the input tensor is encoded by a lookup
# table.
# @!attribute [rw] encoded_baselines
# @return [::Array<::Google::Protobuf::Value>]
# A list of baselines for the encoded tensor.
#
# The shape of each baseline should match the shape of the encoded tensor.
# If a scalar is provided, Vertex AI broadcasts to the same shape as the
# encoded tensor.
# @!attribute [rw] visualization
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::Visualization]
# Visualization configurations for image explanation.
# @!attribute [rw] group_name
# @return [::String]
# Name of the group that the input belongs to. Features with the same group
# name will be treated as one feature when computing attributions. Features
# grouped together can have different shapes in value. If provided, there
# will be one single attribution generated in
# {::Google::Cloud::AIPlatform::V1beta1::Attribution#feature_attributions Attribution.feature_attributions}, keyed by the group name.
class InputMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Domain details of the input feature value. Provides numeric information
# about the feature, such as its range (min, max). If the feature has been
# pre-processed, for example with z-scoring, then it provides information
# about how to recover the original feature. For example, if the input
# feature is an image and it has been pre-processed to obtain 0-mean and
# stddev = 1 values, then original_mean, and original_stddev refer to the
# mean and stddev of the original feature (e.g. image tensor) from which
# input feature (with mean = 0 and stddev = 1) was obtained.
# @!attribute [rw] min_value
# @return [::Float]
# The minimum permissible value for this feature.
# @!attribute [rw] max_value
# @return [::Float]
# The maximum permissible value for this feature.
# @!attribute [rw] original_mean
# @return [::Float]
# If this input feature has been normalized to a mean value of 0,
# the original_mean specifies the mean value of the domain prior to
# normalization.
# @!attribute [rw] original_stddev
# @return [::Float]
# If this input feature has been normalized to a standard deviation of
# 1.0, the original_stddev specifies the standard deviation of the domain
# prior to normalization.
class FeatureValueDomain
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Visualization configurations for image explanation.
# @!attribute [rw] type
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::Visualization::Type]
# Type of the image visualization. Only applicable to
# {::Google::Cloud::AIPlatform::V1beta1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution}.
# OUTLINES shows regions of attribution, while PIXELS shows per-pixel
# attribution. Defaults to OUTLINES.
# @!attribute [rw] polarity
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::Visualization::Polarity]
# Whether to only highlight pixels with positive contributions, negative
# or both. Defaults to POSITIVE.
# @!attribute [rw] color_map
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::Visualization::ColorMap]
# The color scheme used for the highlighted areas.
#
# Defaults to PINK_GREEN for
# {::Google::Cloud::AIPlatform::V1beta1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution},
# which shows positive attributions in green and negative in pink.
#
# Defaults to VIRIDIS for
# {::Google::Cloud::AIPlatform::V1beta1::ExplanationParameters#xrai_attribution XRAI attribution}, which
# highlights the most influential regions in yellow and the least
# influential in blue.
# @!attribute [rw] clip_percent_upperbound
# @return [::Float]
# Excludes attributions above the specified percentile from the
# highlighted areas. Using the clip_percent_upperbound and
# clip_percent_lowerbound together can be useful for filtering out noise
# and making it easier to see areas of strong attribution. Defaults to
# 99.9.
# @!attribute [rw] clip_percent_lowerbound
# @return [::Float]
# Excludes attributions below the specified percentile, from the
# highlighted areas. Defaults to 62.
# @!attribute [rw] overlay_type
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata::Visualization::OverlayType]
# How the original image is displayed in the visualization.
# Adjusting the overlay can help increase visual clarity if the original
# image makes it difficult to view the visualization. Defaults to NONE.
class Visualization
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Type of the image visualization. Only applicable to
# {::Google::Cloud::AIPlatform::V1beta1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution}.
module Type
# Should not be used.
TYPE_UNSPECIFIED = 0
# Shows which pixel contributed to the image prediction.
PIXELS = 1
# Shows which region contributed to the image prediction by outlining
# the region.
OUTLINES = 2
end
# Whether to only highlight pixels with positive contributions, negative
# or both. Defaults to POSITIVE.
module Polarity
# Default value. This is the same as POSITIVE.
POLARITY_UNSPECIFIED = 0
# Highlights the pixels/outlines that were most influential to the
# model's prediction.
POSITIVE = 1
# Setting polarity to negative highlights areas that does not lead to
# the models's current prediction.
NEGATIVE = 2
# Shows both positive and negative attributions.
BOTH = 3
end
# The color scheme used for highlighting areas.
module ColorMap
# Should not be used.
COLOR_MAP_UNSPECIFIED = 0
# Positive: green. Negative: pink.
PINK_GREEN = 1
# Viridis color map: A perceptually uniform color mapping which is
# easier to see by those with colorblindness and progresses from yellow
# to green to blue. Positive: yellow. Negative: blue.
VIRIDIS = 2
# Positive: red. Negative: red.
RED = 3
# Positive: green. Negative: green.
GREEN = 4
# Positive: green. Negative: red.
RED_GREEN = 6
# PiYG palette.
PINK_WHITE_GREEN = 5
end
# How the original image is displayed in the visualization.
module OverlayType
# Default value. This is the same as NONE.
OVERLAY_TYPE_UNSPECIFIED = 0
# No overlay.
NONE = 1
# The attributions are shown on top of the original image.
ORIGINAL = 2
# The attributions are shown on top of grayscaled version of the
# original image.
GRAYSCALE = 3
# The attributions are used as a mask to reveal predictive parts of
# the image and hide the un-predictive parts.
MASK_BLACK = 4
end
end
# Defines how a feature is encoded. Defaults to IDENTITY.
module Encoding
# Default value. This is the same as IDENTITY.
ENCODING_UNSPECIFIED = 0
# The tensor represents one feature.
IDENTITY = 1
# The tensor represents a bag of features where each index maps to
# a feature. {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#index_feature_mapping InputMetadata.index_feature_mapping} must be provided for
# this encoding. For example:
# ```
# input = [27, 6.0, 150]
# index_feature_mapping = ["age", "height", "weight"]
# ```
BAG_OF_FEATURES = 2
# The tensor represents a bag of features where each index maps to a
# feature. Zero values in the tensor indicates feature being
# non-existent. {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#index_feature_mapping InputMetadata.index_feature_mapping} must be provided
# for this encoding. For example:
# ```
# input = [2, 0, 5, 0, 1]
# index_feature_mapping = ["a", "b", "c", "d", "e"]
# ```
BAG_OF_FEATURES_SPARSE = 3
# The tensor is a list of binaries representing whether a feature exists
# or not (1 indicates existence). {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#index_feature_mapping InputMetadata.index_feature_mapping}
# must be provided for this encoding. For example:
# ```
# input = [1, 0, 1, 0, 1]
# index_feature_mapping = ["a", "b", "c", "d", "e"]
# ```
INDICATOR = 4
# The tensor is encoded into a 1-dimensional array represented by an
# encoded tensor. {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#encoded_tensor_name InputMetadata.encoded_tensor_name} must be provided
# for this encoding. For example:
# ```
# input = ["This", "is", "a", "test", "."]
# encoded = [0.1, 0.2, 0.3, 0.4, 0.5]
# ```
COMBINED_EMBEDDING = 5
# Select this encoding when the input tensor is encoded into a
# 2-dimensional array represented by an encoded tensor.
# {::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata#encoded_tensor_name InputMetadata.encoded_tensor_name} must be provided for this
# encoding. The first dimension of the encoded tensor's shape is the same
# as the input tensor's shape. For example:
# ```
# input = ["This", "is", "a", "test", "."]
# encoded = [[0.1, 0.2, 0.3, 0.4, 0.5],
# [0.2, 0.1, 0.4, 0.3, 0.5],
# [0.5, 0.1, 0.3, 0.5, 0.4],
# [0.5, 0.3, 0.1, 0.2, 0.4],
# [0.4, 0.3, 0.2, 0.5, 0.1]]
# ```
CONCAT_EMBEDDING = 6
end
end
# Metadata of the prediction output to be explained.
# @!attribute [rw] index_display_name_mapping
# @return [::Google::Protobuf::Value]
# Static mapping between the index and display name.
#
# Use this if the outputs are a deterministic n-dimensional array, e.g. a
# list of scores of all the classes in a pre-defined order for a
# multi-classification Model. It's not feasible if the outputs are
# non-deterministic, e.g. the Model produces top-k classes or sort the
# outputs by their values.
#
# The shape of the value must be an n-dimensional array of strings. The
# number of dimensions must match that of the outputs to be explained.
# The {::Google::Cloud::AIPlatform::V1beta1::Attribution#output_display_name Attribution.output_display_name} is populated by locating in the
# mapping with {::Google::Cloud::AIPlatform::V1beta1::Attribution#output_index Attribution.output_index}.
# @!attribute [rw] display_name_mapping_key
# @return [::String]
# Specify a field name in the prediction to look for the display name.
#
# Use this if the prediction contains the display names for the outputs.
#
# The display names in the prediction must have the same shape of the
# outputs, so that it can be located by {::Google::Cloud::AIPlatform::V1beta1::Attribution#output_index Attribution.output_index} for
# a specific output.
# @!attribute [rw] output_tensor_name
# @return [::String]
# Name of the output tensor. Required and is only applicable to Vertex
# AI provided images for Tensorflow.
class OutputMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::InputMetadata]
class InputsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::Google::Cloud::AIPlatform::V1beta1::ExplanationMetadata::OutputMetadata]
class OutputsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
|
export * from './create-composable-state-synchronizer';
export * from './compose-state-synchronizers';
export * from './create-state-synchronizer';
export * from './create-synchronized-state-updater';
export * from './types';
export * from './enhancements';
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.riot.lang;
import org.apache.jena.atlas.junit.BaseTest ;
import org.apache.jena.graph.Node ;
import org.apache.jena.iri.IRI ;
import org.apache.jena.iri.IRIFactory ;
import org.apache.jena.riot.ErrorHandlerTestLib ;
import org.apache.jena.riot.ErrorHandlerTestLib.ExWarning ;
import org.apache.jena.riot.checker.CheckerIRI ;
import org.apache.jena.riot.system.Checker ;
import org.apache.jena.riot.system.ErrorHandler ;
import org.apache.jena.riot.system.RiotLib ;
import org.junit.Test ;
public class TestIRI extends BaseTest
{
static protected final ErrorHandler handler = new ErrorHandlerTestLib.ErrorHandlerEx() ;
static protected final Checker checker = new Checker(new ErrorHandlerTestLib.ErrorHandlerEx()) ;
static IRIFactory factory = IRIFactory.iriImplementation() ;
@Test public void iri1() { testIRI("http://example/") ; }
@Test(expected=ErrorHandlerTestLib.ExError.class)
// No relative IRIs
public void iri2() { testIRI("example") ; }
@Test(expected=ExWarning.class)
public void iriErr1()
{ testIRI("http:") ; }
@Test(expected=ExWarning.class)
public void iriErr2() { testIRI("http:///::") ; }
@Test(expected=ExWarning.class)
public void iriErr3() { testIRI("http://example/.") ; }
private void testIRI(String uriStr)
{
IRI iri = factory.create(uriStr) ;
CheckerIRI.iriViolations(iri, handler) ;
}
@Test public void bNodeIRI_1()
{
Node n = RiotLib.createIRIorBNode("_:abc") ;
assertTrue(n.isBlank()) ;
assertEquals("abc", n.getBlankNodeLabel()) ;
}
@Test public void bNodeIRI_2()
{
Node n = RiotLib.createIRIorBNode("abc") ;
assertTrue(n.isURI()) ;
assertEquals("abc", n.getURI()) ;
}
}
|
<gh_stars>10-100
// Package pic contains te code to be able to launch a picture service.
// A picture service will serve as a proxy for the image delivery underlying service and map a repository
// with it's associated star chart.
package pic
|
<gh_stars>0
#include "CpuTimer.hh"
|
clear
rm -rf ../force-app/main/generated
fmpp -S . -O ../force-app/main/generated
cd ..
sfdx force:source:push
cd templates |
<filename>appsec-v1/appsec.go
package appsec
import (
"bytes"
"encoding/json"
"fmt"
client "github.com/akamai/AkamaiOPEN-edgegrid-golang/client-v1"
"time"
)
type ActivationConfig struct {
ConfigID int `json:"configId"`
ConfigName string `json:"configName"`
ConfigVersion int `json:"configVersion"`
PreviousConfigVersion int `json:"previousConfigVersion"`
}
type Activation struct {
Action string `json:"action"`
Network string `json:"network"`
Note string `json:"note"`
NotificationEmails []string `json:"notificationEmails"`
ActivationConfigs []ActivationConfig `json:"activationConfigs"`
}
type ActivationResponse struct {
ResponseCode int
ActivationStatus ActivationStatus
ActivationRequestStatus ActivationRequestStatusCreated
}
type ActivationStatus struct {
DispatchCount int `json:"dispatchCount"`
ActivationID int `json:"activationId"`
Action string `json:"action"`
Status string `json:"status"`
Network string `json:"network"`
Estimate string `json:"estimate"`
CreatedBy string `json:"createdBy"`
CreateDate time.Time `json:"createDate"`
ActivationConfigs []ActivationConfig `json:"activationConfigs"`
}
type ActivationRequestStatusCreated struct {
StatusID string `json:"statusId"`
CreateDate time.Time `json:"createDate"`
Links struct {
CheckStatus struct {
Href string `json:"href"`
} `json:"check-status"`
} `json:"links"`
}
type ActivationRequestStatusResponse struct {
ResponseCode int
ActivationRequestStatusInProgress ActivationRequestStatusInProgress
ActivationRequestStatusComplete ActivationRequestStatusComplete
}
type ActivationRequestStatusInProgress struct {
StatusID string `json:"statusId"`
CreateDate time.Time `json:"createDate"`
}
type ActivationRequestStatusComplete struct {
ActivationID int `json:"activationId"`
}
type ConfigurationClone struct {
CreateFromVersion int `json:"createFromVersion"`
RuleUpdate bool `json:"ruleUpdate"`
}
type HostnameList struct {
Hostname string `json:"hostname"`
}
type SelectedHostnames struct {
HostnameList []HostnameList `json:"hostnameList"`
}
type VersionList struct {
TotalSize int `json:"totalSize"`
PageSize int `json:"pageSize"`
Page int `json:"page"`
ConfigID int `json:"configId"`
ConfigName string `json:"configName"`
StagingExpediteRequestID int `json:"stagingExpediteRequestId"`
ProductionExpediteRequestID int `json:"productionExpediteRequestId"`
ProductionActiveVersion int `json:"productionActiveVersion"`
StagingActiveVersion int `json:"stagingActiveVersion"`
LastCreatedVersion int `json:"lastCreatedVersion"`
}
type Version struct {
ConfigID int `json:"configId"`
ConfigName string `json:"configName"`
Version int `json:"version"`
VersionNotes string `json:"versionNotes"`
CreateDate time.Time `json:"createDate"`
CreatedBy string `json:"createdBy"`
BasedOn int `json:"basedOn"`
Production struct {
Status string `json:"status"`
Time time.Time `json:"time"`
} `json:"production"`
Staging struct {
Status string `json:"status"`
Time time.Time `json:"time"`
} `json:"staging"`
}
func ListConfigurationVersions(configid int) (*VersionList, error) {
req, err := client.NewRequest(
Config,
"GET",
fmt.Sprintf("/appsec/v1/configs/%d/versions", configid),
nil,
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response VersionList
if err = client.BodyJSON(res, &response); err != nil {
return nil, err
}
return &response, nil
}
func CloneConfigurationVersion(configid int, configurationclone ConfigurationClone) (*Version, error) {
r, err := json.Marshal(configurationclone)
if err != nil {
return nil, err
}
req, err := client.NewRequest(
Config,
"POST",
fmt.Sprintf("/appsec/v1/configs/%d/versions", configid),
bytes.NewReader(r),
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response Version
if err = client.BodyJSON(res, &response); err != nil {
return nil, err
}
return &response, nil
}
func ActivateConfigurationVersion(activation Activation) (*ActivationResponse, error) {
r, err := json.Marshal(activation)
if err != nil {
return nil, err
}
req, err := client.NewRequest(
Config,
"POST",
"/appsec/v1/activations",
bytes.NewReader(r),
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response ActivationResponse
var activationresponse ActivationStatus
var activationrequeststatus ActivationRequestStatusCreated
// This pesky API call can return different responses!
if res.StatusCode == 200 {
if err = client.BodyJSON(res, &activationresponse); err != nil {
return nil, err
}
} else if res.StatusCode == 202 {
if err = client.BodyJSON(res, &activationrequeststatus); err != nil {
return nil, err
}
}
response.ResponseCode = res.StatusCode
response.ActivationStatus = activationresponse
response.ActivationRequestStatus = activationrequeststatus
return &response, nil
}
func GetActivationRequestStatus(statusid string) (*ActivationRequestStatusResponse, error) {
req, err := client.NewRequest(
Config,
"GET",
fmt.Sprintf("/appsec/v1/activations/status/%s", statusid),
nil,
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response ActivationRequestStatusResponse
var activationrequeststatusinprogress ActivationRequestStatusInProgress
var activationrequeststatuscomplete ActivationRequestStatusComplete
// This pesky API call can return different responses!
if res.StatusCode == 200 {
if err = client.BodyJSON(res, &activationrequeststatusinprogress); err != nil {
return nil, err
}
} else if res.StatusCode == 303 {
if err = client.BodyJSON(res, &activationrequeststatuscomplete); err != nil {
return nil, err
}
}
response.ResponseCode = res.StatusCode
response.ActivationRequestStatusInProgress = activationrequeststatusinprogress
response.ActivationRequestStatusComplete = activationrequeststatuscomplete
return &response, nil
}
func GetConfigurationVersion(configid int, version int) (*Version, error) {
req, err := client.NewRequest(
Config,
"GET",
fmt.Sprintf("/appsec/v1/configs/%d/versions/%d", configid, version),
nil,
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response Version
if err = client.BodyJSON(res, &response); err != nil {
return nil, err
}
return &response, nil
}
func GetActivationStatus(activationid int) (*ActivationStatus, error) {
req, err := client.NewRequest(
Config,
"GET",
fmt.Sprintf("/appsec/v1/activations/%d", activationid),
nil,
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response ActivationStatus
if err = client.BodyJSON(res, &response); err != nil {
return nil, err
}
return &response, nil
}
func ListSelectedHostnames(configid int, version int) (*SelectedHostnames, error) {
req, err := client.NewRequest(
Config,
"GET",
fmt.Sprintf("/appsec/v1/configs/%d/versions/%d/selected-hostnames", configid, version),
nil,
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response SelectedHostnames
if err = client.BodyJSON(res, &response); err != nil {
return nil, err
}
return &response, nil
}
func UpdateSelectedHostnames(configid int, version int, selectedhostnames SelectedHostnames) (*SelectedHostnames, error) {
r, err := json.Marshal(selectedhostnames)
if err != nil {
return nil, err
}
req, err := client.NewRequest(
Config,
"PUT",
fmt.Sprintf("/appsec/v1/configs/%d/versions/%d/selected-hostnames", configid, version),
bytes.NewReader(r),
)
if err != nil {
return nil, err
}
res, err := client.Do(Config, req)
if err != nil {
return nil, err
}
if client.IsError(res) {
return nil, client.NewAPIError(res)
}
var response SelectedHostnames
if err = client.BodyJSON(res, &response); err != nil {
return nil, err
}
return &response, nil
}
|
#!/usr/bin/python3
"""Script for creating scaled L2 norm
"""
import torch.nn as nn
import torch
import torch.nn.functional as F
class ScaledL2Norm(nn.Module):
def __init__(self, in_channels, initial_scale):
super(ScaledL2Norm, self).__init__()
self.in_channels = in_channels
self.scale = nn.Parameter(torch.Tensor(in_channels))
self.initial_scale = initial_scale
self.reset_parameters()
def forward(self, x):
return (F.normalize(x, p=2, dim=1)
* self.scale.unsqueeze(0).unsqueeze(2).unsqueeze(3))
def reset_parameters(self):
self.scale.data.fill_(self.initial_scale)
|
#!/usr/bin/env bash
DIR="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
dest_d="$HOME/.config/common-lisp/source-registry.conf.d/"
mkdir -p ${dest_d}
cp ${DIR}/jeannie.conf "${dest_d}"
|
#!/usr/bin/env bash
echo "┏━━━ 🧹 CLEAN: removing lib folder ━━━━━━━"
lerna run clean --stream
|
package leetCode;//给你链表的头结点 head ,请将其按 升序 排列并返回 排序后的链表 。
//
// 进阶:
//
//
// 你可以在 O(n log n) 时间复杂度和常数级空间复杂度下,对链表进行排序吗?
//
//
//
//
// 示例 1:
//
//
//输入:head = [4,2,1,3]
//输出:[1,2,3,4]
//
//
// 示例 2:
//
//
//输入:head = [-1,5,3,4,0]
//输出:[-1,0,3,4,5]
//
//
// 示例 3:
//
//
//输入:head = []
//输出:[]
//
//
//
//
// 提示:
//
//
// 链表中节点的数目在范围 [0, 5 * 104] 内
// -105 <= Node.val <= 105
//
// Related Topics 链表 双指针 分治 排序 归并排序
// 👍 1248 👎 0
//leetcode submit region begin(Prohibit modification and deletion)
import leetCode.L10104_MaxDepth;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Definition for singly-linked list.
* int val;
* ListNode next;
* ListNode() {}
* ListNode(int val) { this.val = val; }
* ListNode(int val, ListNode next) { this.val = val; this.next = next; }
* }
*/
public class L10148_SortList {
public ListNode sortList(ListNode head) {
if (head == null || head.next == null) {
return head;
}
int length = calculateListNodeLength(head);
return null;
// return mergeSort(head, 0, length - 1);
}
private int calculateListNodeLength(ListNode node) {
int length = 0;
while (node != null) {
length++;
node = node.next;
}
return length;
}
// public ListNode sortList(ListNode head) {
// if (head == null || head.next == null) {
// return head;
// }
//
// List<ListNode> nodeCollect = new ArrayList<>();
// ListNode collectElementNode = head;
// while (collectElementNode != null) {
// nodeCollect.add(collectElementNode);
// collectElementNode = collectElementNode.next;
// }
//
// Collections.sort(nodeCollect, new Comparator<ListNode>() {
// @Override
// public int compare(ListNode o1, ListNode o2) {
// return o1.val - o2.val;
// }
// });
//
//
// ListNode newHead = null;
// for (int index = 0; index < nodeCollect.size(); index++) {
// if (newHead == null) {
// newHead = nodeCollect.get(index);
// continue;
// }
//
// nodeCollect.get(index - 1).next = nodeCollect.get(index);
// nodeCollect.get(index).next = null;
// }
// return newHead;
// }
public static class ListNode {
int val;
ListNode next;
ListNode() {
}
ListNode(int val) {
this.val = val;
}
ListNode(int val, ListNode next) {
this.val = val;
this.next = next;
}
}
}
|
from bidso.utils import read_tsv
import plotly.graph_objects as go
from numpy import sign
from ..names import name
from ..read import load
from .surf import AXIS
def plot_brain_regions(parameters, ieeg_file, region_type):
"""
region_type can be one of:
'aparc.a2009s',
'aparc.DKTatlas',
'BA_exvivo',
'BA_exvivo.thresh',
"""
brainregions_file = name(parameters, 'brainregions', ieeg_file)
electrodes = read_tsv(brainregions_file)
pial = load('pial', parameters, ieeg_file)
annot = load(region_type, parameters, ieeg_file)
colors = []
labels = []
for elec in electrodes:
region = elec[region_type]
labels.append(f'{elec["chan"]} = {region}')
colors.append(annot['regions']['colors'][region])
# to normalize plotly
n_regions = len(annot['regions']['names'])
right_or_left = sign((electrodes['x'] > 0).sum() / electrodes.shape[0] - .5)
traces = [
go.Mesh3d(
x=pial.vert[:, 0],
y=pial.vert[:, 1],
z=pial.vert[:, 2],
i=pial.tri[:, 0],
j=pial.tri[:, 1],
k=pial.tri[:, 2],
intensity=annot['regions']['values'] / n_regions,
colorscale=annot['regions']['colorscale'],
hoverinfo='skip',
showscale=False,
flatshading=False,
lighting=dict(
ambient=0.18,
diffuse=1,
fresnel=0.1,
specular=1,
roughness=0.1,
),
lightposition=dict(
x=0,
y=0,
z=-1,
),
),
go.Scatter3d(
x=electrodes['x'],
y=electrodes['y'],
z=electrodes['z'],
text=labels,
mode='markers',
hoverinfo='text',
marker=dict(
size=5,
color=colors,
),
)
]
fig = go.Figure(
data=traces,
layout=go.Layout(
scene=dict(
xaxis=AXIS,
yaxis=AXIS,
zaxis=AXIS,
camera=dict(
eye=dict(
x=right_or_left,
y=0,
z=0.5,
),
projection=dict(
type='orthographic',
),
),
),
),
)
return fig
|
module.exports = ['Femelle', 'Mâle'];
|
<reponame>SalvadorP/UdaciCards<filename>components/DeckDetails.js
import React, { Component } from "react";
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
import { connect } from "react-redux";
import { StackNavigator } from "react-navigation";
import { lightGrey } from "../utils/colors";
import GenericButton from "./GenericButton";
import styles from "./Styles.js";
import { AsyncStorage } from "react-native";
import { getAllDecks, getDeck } from "../utils/api";
const DECKS_STORAGE_KEY = "udacicards.decks";
class DeckDetails extends Component {
startQuiz = () => {
const { decks } = this.props;
const title = this.props.navigation.state.params.title;
var cards = [];
if (decks[title] !== undefined) {
cards = decks[title].cards;
}
if (cards.length > 0) {
return (
<GenericButton
btnStyle={"orangeBtn"}
btnText={"Start Quiz"}
onPress={() =>
this.props.navigation.navigate("Quiz", { title: title })
}
textStyle={"orangeBtnText"}
/>
);
}
};
render() {
const { decks } = this.props;
const deckTitle = this.props.navigation.state.params.title;
cardsNumber = 0;
if (decks[deckTitle] !== undefined) {
cardsNumber = decks[deckTitle].cards.length;
}
return (
<View style={stylesCustom.container}>
<View style={styles.textContainer}>
<Text style={styles.deckTitle}>{deckTitle}</Text>
<Text style={styles.cardsNumber}>{cardsNumber + " cards"}</Text>
</View>
<GenericButton
btnStyle={"blueBtn"}
btnText={"Add Card"}
onPress={() =>
this.props.navigation.navigate("CreateCard", { title: deckTitle })
}
textStyle={"blueBtnText"}
/>
{this.startQuiz()}
</View>
);
}
}
const stylesCustom = StyleSheet.create({
container: {
flex: 1,
justifyContent: "flex-start",
alignItems: "center",
backgroundColor: lightGrey
}
});
function mapStateToProps(decks) {
return {
decks
};
}
export default connect(mapStateToProps)(DeckDetails);
|
<filename>src/app/video-player/controls/captured-frame-list/operation-dialog/operation-dialog.component.ts
import {AfterViewInit, Component, ElementRef, ViewChild} from '@angular/core';
import {IMAGE_PROPERTY_NAME} from '../../../core/video-capture.service';
import {UIDialogRef} from 'altair-ui';
import {PersistStorage} from '../../../../user-service';
import {Capture} from '../../../core/settings';
import download from 'downloadjs';
export const RESULT_TWITTER = 'twitter';
export const RESULT_DOWNLOAD = 'download';
export const RESULT_TRASH = 'trash';
@Component({
selector: 'captured-image-operation-dialog',
templateUrl: './operation-dialog.html',
styleUrls: ['./operation-dialog.less']
})
export class CapturedImageOperationDialog implements AfterViewInit {
private _autoRemove: boolean;
image: HTMLImageElement;
set autoRemove(v: boolean) {
this._autoRemove = v;
this._persistStorage.setItem(Capture.AUTO_REMOVE, v + '');
}
get autoRemove(): boolean {
return this._autoRemove;
}
@ViewChild('imageWrapper', {static: false}) imageWrapper: ElementRef;
constructor(private _dialogRef: UIDialogRef<CapturedImageOperationDialog>,
private _persistStorage: PersistStorage) {
let savedAutoRemove = this._persistStorage.getItem(Capture.AUTO_REMOVE, 'true');
this._autoRemove = savedAutoRemove === 'true';
}
shareToTwitter(event: Event) {
event.preventDefault();
event.stopPropagation();
this._dialogRef.close({result: RESULT_TWITTER, remove: this.autoRemove});
}
download(event: Event) {
event.preventDefault();
event.stopPropagation();
let dataURI = this.image.src;
let {bangumi_name, episode_no, currentPlayTime} = this.image[IMAGE_PROPERTY_NAME];
let filename = `${bangumi_name}_${episode_no}_${Math.round(currentPlayTime)}.png`;
download(dataURI, filename, 'application/octet-stream');
this._dialogRef.close({result: RESULT_DOWNLOAD, remove: this.autoRemove});
}
trash(event: Event) {
event.preventDefault();
event.stopPropagation();
this._dialogRef.close({result: RESULT_TRASH, remove: true});
}
ngAfterViewInit(): void {
let imageWrapperElement = this.imageWrapper.nativeElement as HTMLElement;
imageWrapperElement.appendChild(this.image);
}
}
|
#!/bin/bash
# -----------------------------------------------------------------------------
#
# Copyright (C) 2021 CERN & University of Surrey for the benefit of the
# BioDynaMo collaboration. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# See the LICENSE file distributed with this work for details.
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# -----------------------------------------------------------------------------
if [[ $# -lt 2 ]]; then
echo "Wrong number of arguments.
Description:
Run a script inside a docker container
Usage:
run-inside-docker.sh CONTAINER_OS SCRIPT [SCRIPT_ARGUMENTS]
Arguments:
CONTAINER_OS OS id of the container
SCRIPT absolute path to script that should be executed inside the container
or relative path to BDM_PROJECT_DIR.
NB: In both cases the script must be inside BDM_PROJECT_DIR
SCRIPT_ARGUMENTS arguments that are passed to the script inside the docker
container (optional)
"
exit 1
fi
set -e
# save arguements in variables
BDM_OS=$1
shift
BDM_SCRIPT=$1
shift
BDM_SCRIPT_ARGUMENTS=$@
BDM_PROJECT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/.."
# include util functions
. $BDM_PROJECT_DIR/util/installation/common/util.sh
# check if BDM_OS is valid
CheckOsSupported $BDM_PROJECT_DIR/util/installation $BDM_OS
# check if BDM_OS supports docker
BDM_PATH_TO_DOCKERFILE=$BDM_PROJECT_DIR/util/installation/$BDM_OS
if [ ! -f "${BDM_PATH_TO_DOCKERFILE}/Dockerfile" ]; then
echo "Could not find a Docker file in ${BDM_PATH_TO_DOCKERFILE}"
echo "${BDM_OS} does not support Docker at the moment."
exit 1
fi
BDM_CONTAINER=bdmdev-${BDM_OS}
# enables GUI apps
xhost +local:root &>/dev/null || true
EchoNewStep "Stop and remove any previously created $BDM_CONTAINER container..."
sudo docker stop $BDM_CONTAINER || true
sudo docker rm $BDM_CONTAINER || true
# create image from Dockerfile
echo ""
EchoNewStep "Build docker image..."
BDM_IMAGE=$BDM_CONTAINER
sudo docker build \
--network=host \
--build-arg HOST_UID=$(id -u `whoami`) \
--build-arg HOST_GID=$(id -g `whoami`) \
-t $BDM_IMAGE $BDM_PATH_TO_DOCKERFILE
echo ""
EchoNewStep "Start docker container..."
BDM_PROJECT_DIR_ABS=$(GetAbsolutePath $BDM_PROJECT_DIR)
# check if working directory is inside BDM_PROJECT_DIR
if [[ "$PWD" != $(readlink -e $BDM_PROJECT_DIR_ABS)* ]]; then
EchoError "ERROR: working directory must be inside ${BDM_PROJECT_DIR_ABS}"
echo "Current working directory: $PWD"
echo "Change your working directory and run the script again."
exit 1
fi
# Flatten container to avoid openMPI bug inside a docker container
# https://stackoverflow.com/questions/46138549/docker-openmpi-and-unexpected-end-of-proc-mounts-line
sudo docker run --name $BDM_CONTAINER $BDM_IMAGE /bin/bash
TMP_CONTAINER_TAR=$(mktemp)
sudo docker export $BDM_CONTAINER > $TMP_CONTAINER_TAR
cat $TMP_CONTAINER_TAR | sudo docker import - ${BDM_IMAGE}:flattened
rm $TMP_CONTAINER_TAR
sudo docker stop $BDM_CONTAINER || true
sudo docker rm $BDM_CONTAINER || true
BDM_FORWARD_ENV=""
# BDM_LOCAL_LFS is defined add the environment variable and volume
if [ $BDM_LOCAL_LFS ]; then
BDM_FORWARD_ENV="$BDM_FORWARD_ENV --env BDM_LOCAL_LFS=$BDM_LOCAL_LFS"
BDM_LOCAL_LFS_VOLUME="--volume $BDM_LOCAL_LFS:$BDM_LOCAL_LFS"
fi
# flattening the image somehow resets the default user specified in the
# Dockerfile to root. Therefore, we have to add the --user option here
sudo docker run \
--name $BDM_CONTAINER \
--user testuser \
--net=host \
--cap-add=SYS_PTRACE \
--security-opt seccomp=unconfined \
--env="DISPLAY" \
$BDM_FORWARD_ENV \
--volume $BDM_PROJECT_DIR_ABS:$BDM_PROJECT_DIR_ABS \
--volume /var/run/docker.sock:/var/run/docker.sock \
$BDM_LOCAL_LFS_VOLUME \
--workdir $PWD \
-dit \
${BDM_IMAGE}:flattened \
/bin/bash
# execute script
# avoid exit if $BDM_SCRIPT returns non zero exit code;
# returning exit code is done manually afterwards
set +e
BDM_SCRIPT_ABS=$(GetAbsolutePath $BDM_SCRIPT)
echo ""
EchoNewStep "Execute ${BDM_SCRIPT}..."
sudo docker exec \
-ti \
$BDM_CONTAINER \
$BDM_SCRIPT_ABS $BDM_SCRIPT_ARGUMENTS
RETURN_VAL=$?
echo ""
EchoNewStep "Finished"
echo "$BDM_SCRIPT return code was: $RETURN_VAL"
echo "The container '$BDM_CONTAINER' is still running."
echo "You can connect to it using 'sudo docker exec -ti $BDM_CONTAINER /bin/bash'"
exit $RETURN_VAL
|
import { Task } from '@prisma/client';
import { inject, injectable } from 'tsyringe';
import { GetTasksBySubjectIdDTO } from '../dtos/GetTasksBySubjecId.dto';
import { ITaskRepository } from '../repositories/ITaskRepository';
@injectable()
export class GetTasksBySubjectIdService {
constructor(
@inject('PrismaTaskRepository')
private TaskRepository: ITaskRepository
) {}
async handle({ id }: GetTasksBySubjectIdDTO): Promise<Task[]> {
return this.TaskRepository.getTasksBySubjectId(id);
}
}
|
<filename>test/net/networking_engine_unit.js
/**
* @license
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
describe('NetworkingEngine', /** @suppress {accessControls} */ function() {
var networkingEngine;
var resolveScheme;
var rejectScheme;
var requestType;
var Util;
var originalGetLocationProtocol;
var fakeProtocol;
beforeAll(function() {
Util = shaka.test.Util;
requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
originalGetLocationProtocol =
shaka.net.NetworkingEngine.getLocationProtocol_;
shaka.net.NetworkingEngine.getLocationProtocol_ = function() {
return fakeProtocol;
};
});
beforeEach(function() {
networkingEngine = new shaka.net.NetworkingEngine();
resolveScheme = jasmine.createSpy('resolve scheme').and.callFake(
function() {
return Promise.resolve({
uri: '', data: new ArrayBuffer(5), headers: {}
});
});
rejectScheme = jasmine.createSpy('reject scheme')
.and.callFake(function() { return Promise.reject(); });
shaka.net.NetworkingEngine.registerScheme('resolve', resolveScheme);
shaka.net.NetworkingEngine.registerScheme('reject', rejectScheme);
});
afterEach(function() {
shaka.net.NetworkingEngine.unregisterScheme('resolve');
shaka.net.NetworkingEngine.unregisterScheme('reject');
});
afterAll(function() {
shaka.net.NetworkingEngine.getLocationProtocol_ =
originalGetLocationProtocol;
});
describe('retry', function() {
it('will retry', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 2,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
rejectScheme.and.callFake(function() {
if (rejectScheme.calls.count() == 1)
return Promise.reject();
else
return Promise.resolve({
uri: '', data: new ArrayBuffer(0), headers: {}
});
});
networkingEngine.request(requestType, request)
.catch(fail)
.then(function() {
expect(rejectScheme.calls.count()).toBe(2);
done();
});
});
it('will retry twice', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 3,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
rejectScheme.and.callFake(function() {
if (rejectScheme.calls.count() < 3)
return Promise.reject();
else
return Promise.resolve({
uri: '', data: new ArrayBuffer(0), headers: {}
});
});
networkingEngine.request(requestType, request)
.catch(fail)
.then(function() {
expect(rejectScheme.calls.count()).toBe(3);
done();
});
});
it('will fail overall', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 3,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
networkingEngine.request(requestType, request)
.then(fail)
.catch(function() { expect(rejectScheme.calls.count()).toBe(3); })
.then(done);
});
describe('backoff', function() {
var baseDelay = 200;
var origSetTimeout;
var setTimeoutSpy;
var realRandom;
beforeAll(function() {
origSetTimeout = shaka.net.NetworkingEngine.setTimeout_;
setTimeoutSpy = jasmine.createSpy('setTimeout');
setTimeoutSpy.and.callFake(origSetTimeout);
shaka.net.NetworkingEngine.setTimeout_ = setTimeoutSpy;
realRandom = Math.random;
Math.random = function() { return 0.75; };
});
afterAll(function() {
Math.random = realRandom;
shaka.net.NetworkingEngine.setTimeout_ = origSetTimeout;
});
beforeEach(function() {
setTimeoutSpy.calls.reset();
});
it('uses baseDelay', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 2,
baseDelay: baseDelay,
fuzzFactor: 0,
backoffFactor: 2,
timeout: 0
});
networkingEngine.request(requestType, request)
.then(fail)
.catch(function() {
expect(setTimeoutSpy.calls.count()).toBe(1);
expect(setTimeoutSpy)
.toHaveBeenCalledWith(jasmine.any(Function), baseDelay);
})
.then(done);
});
it('uses backoffFactor', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 3,
baseDelay: baseDelay,
fuzzFactor: 0,
backoffFactor: 2,
timeout: 0
});
networkingEngine.request(requestType, request)
.then(fail)
.catch(function() {
expect(setTimeoutSpy.calls.count()).toBe(2);
expect(setTimeoutSpy)
.toHaveBeenCalledWith(jasmine.any(Function), baseDelay);
expect(setTimeoutSpy)
.toHaveBeenCalledWith(jasmine.any(Function), baseDelay * 2);
})
.then(done);
});
it('uses fuzzFactor', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 2,
baseDelay: baseDelay,
fuzzFactor: 1,
backoffFactor: 1,
timeout: 0
});
networkingEngine.request(requestType, request)
.then(fail)
.catch(function() {
// (rand * 2.0) - 1.0 = (0.75 * 2.0) - 1.0 = 0.5
// 0.5 * fuzzFactor = 0.5 * 1 = 0.5
// delay * (1 + 0.5) = baseDelay * (1 + 0.5)
expect(setTimeoutSpy.calls.count()).toBe(1);
expect(setTimeoutSpy)
.toHaveBeenCalledWith(jasmine.any(Function), baseDelay * 1.5);
})
.then(done);
});
});
it('uses multiple URIs', function(done) {
var request = createRequest('', {
maxAttempts: 3,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
request.uris = ['reject://foo', 'resolve://foo'];
networkingEngine.request(requestType, request)
.catch(fail)
.then(function() {
expect(rejectScheme.calls.count()).toBe(1);
expect(resolveScheme.calls.count()).toBe(1);
done();
});
});
});
describe('request', function() {
it('uses registered schemes', function(done) {
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function() {
expect(resolveScheme).toHaveBeenCalled();
done();
});
});
it('can unregister scheme', function(done) {
shaka.net.NetworkingEngine.unregisterScheme('resolve');
networkingEngine.request(requestType, createRequest('resolve://foo'))
.then(fail)
.catch(function() { expect(resolveScheme).not.toHaveBeenCalled(); })
.then(done);
});
it('rejects if scheme does not exist', function(done) {
networkingEngine.request(requestType, createRequest('foo://foo'))
.then(fail)
.catch(function() { expect(resolveScheme).not.toHaveBeenCalled(); })
.then(done);
});
it('returns the response object', function(done) {
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function(response) {
expect(response).toBeTruthy();
expect(response.data).toBeTruthy();
expect(response.data.byteLength).toBe(5);
expect(response.headers).toBeTruthy();
done();
});
});
it('passes correct arguments to plugin', function(done) {
var request = createRequest('resolve://foo');
request.method = 'POST';
resolveScheme.and.callFake(function(uri, request) {
expect(uri).toBe(request.uris[0]);
expect(request).toEqual(request);
return Promise.resolve();
});
networkingEngine.request(requestType, request).catch(fail).then(done);
});
it('infers a scheme for // URIs', function(done) {
fakeProtocol = 'resolve:';
networkingEngine.request(requestType, createRequest('//foo'))
.catch(fail)
.then(function() {
expect(resolveScheme).toHaveBeenCalled();
expect(resolveScheme.calls.argsFor(0)[0]).toBe('resolve://foo');
done();
});
});
});
describe('request filter', function() {
var filter;
beforeEach(function() {
filter = jasmine.createSpy('request filter');
networkingEngine.registerRequestFilter(filter);
});
afterEach(function() {
networkingEngine.unregisterRequestFilter(filter);
});
it('can be called', function(done) {
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function() {
expect(filter).toHaveBeenCalled();
done();
});
});
it('called on failure', function(done) {
networkingEngine.request(requestType, createRequest('reject://foo'))
.then(fail)
.catch(function() { expect(filter).toHaveBeenCalled(); })
.then(done);
});
it('is given correct arguments', function(done) {
var request = createRequest('resolve://foo');
networkingEngine.request(requestType, request)
.catch(fail)
.then(function() {
expect(filter.calls.argsFor(0)[0]).toBe(requestType);
expect(filter.calls.argsFor(0)[1]).toBe(request);
expect(filter.calls.argsFor(0)[1].uris[0]).toBe(request.uris[0]);
done();
});
});
it('can modify uris', function(done) {
filter.and.callFake(function(type, request) {
request.uris = ['resolve://foo'];
});
networkingEngine.request(requestType, createRequest('reject://foo'))
.catch(fail)
.then(function() {
expect(filter).toHaveBeenCalled();
done();
});
});
it('can modify allowCrossSiteCredentials', function(done) {
filter.and.callFake(function(type, request) {
request.allowCrossSiteCredentials = true;
});
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function() {
expect(filter).toHaveBeenCalled();
expect(resolveScheme).toHaveBeenCalled();
expect(resolveScheme.calls.argsFor(0)[1].allowCrossSiteCredentials)
.toBe(true);
done();
});
});
it('if throws will stop requests', function(done) {
var request = createRequest('resolve://foo', {
maxAttempts: 3,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
filter.and.throwError(new Error());
networkingEngine.request(requestType, request)
.then(fail)
.catch(function() {
expect(resolveScheme).not.toHaveBeenCalled();
expect(filter.calls.count()).toBe(1);
})
.then(done);
});
});
describe('response filter', function() {
var filter;
beforeEach(function() {
filter = jasmine.createSpy('response filter');
networkingEngine.registerResponseFilter(filter);
resolveScheme.and.callFake(function(request) {
var response = {
uri: '', data: new ArrayBuffer(100), headers: {}
};
return Promise.resolve(response);
});
});
afterEach(function() {
networkingEngine.unregisterResponseFilter(filter);
});
it('can be called', function(done) {
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function() {
expect(filter).toHaveBeenCalled();
done();
});
});
it('not called on failure', function(done) {
networkingEngine.request(requestType, createRequest('reject://foo'))
.then(fail)
.catch(function() { expect(filter).not.toHaveBeenCalled(); })
.then(done);
});
it('is given correct arguments', function(done) {
var request = createRequest('resolve://foo');
networkingEngine.request(requestType, request)
.catch(fail)
.then(function() {
expect(filter.calls.argsFor(0)[0]).toBe(requestType);
expect(filter.calls.argsFor(0)[1]).toBeTruthy();
expect(filter.calls.argsFor(0)[1].data).toBeTruthy();
expect(filter.calls.argsFor(0)[1].headers).toBeTruthy();
done();
});
});
it('can modify data', function(done) {
filter.and.callFake(function(type, response) {
response.data = new ArrayBuffer(5);
});
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function(response) {
expect(filter).toHaveBeenCalled();
expect(response).toBeTruthy();
expect(response.data.byteLength).toBe(5);
done();
});
});
it('can modify headers', function(done) {
filter.and.callFake(function(type, response) {
expect(response.headers).toBeTruthy();
response.headers['DATE'] = 'CAT';
});
networkingEngine.request(requestType, createRequest('resolve://foo'))
.catch(fail)
.then(function(response) {
expect(filter).toHaveBeenCalled();
expect(response).toBeTruthy();
expect(response.headers['DATE']).toBe('CAT');
done();
});
});
it('if throws will stop requests', function(done) {
filter.and.throwError(new Error());
networkingEngine.request(requestType, createRequest('resolve://foo'))
.then(fail)
.catch(function() { expect(filter).toHaveBeenCalled(); })
.then(done);
});
it('if throws will retry', function(done) {
var request = createRequest('resolve://foo', {
maxAttempts: 2,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
filter.and.callFake(function() {
if (filter.calls.count() == 1) throw new Error();
});
networkingEngine.request(requestType, request)
.catch(fail)
.then(function() {
expect(resolveScheme.calls.count()).toBe(2);
expect(filter.calls.count()).toBe(2);
done();
});
});
});
describe('destroy', function() {
it('waits for all operations to complete', function(done) {
var request = createRequest('resolve://foo');
var p = new shaka.util.PublicPromise();
resolveScheme.and.returnValue(p);
var r1 = networkingEngine.request(requestType, request);
var r2 = networkingEngine.request(requestType, request);
Util.capturePromiseStatus(r1);
Util.capturePromiseStatus(r2);
expect(r1.status).toBe('pending');
expect(r2.status).toBe('pending');
var d = networkingEngine.destroy();
Util.capturePromiseStatus(d);
expect(d.status).toBe('pending');
Util.delay(0.1).then(function() {
expect(d.status).toBe('pending');
p.resolve();
return d;
}).then(function() {
return Util.delay(0.1);
}).then(function() {
expect(r1.status).toBe('resolved');
expect(r2.status).toBe('resolved');
expect(d.status).toBe('resolved');
}).catch(fail).then(done);
});
it('resolves even when a request fails', function(done) {
var request = createRequest('reject://foo');
var p = new shaka.util.PublicPromise();
rejectScheme.and.returnValue(p);
var r1 = networkingEngine.request(requestType, request);
var r2 = networkingEngine.request(requestType, request);
Util.capturePromiseStatus(r1);
Util.capturePromiseStatus(r2);
expect(r1.status).toBe('pending');
expect(r2.status).toBe('pending');
var d = networkingEngine.destroy();
Util.capturePromiseStatus(d);
expect(d.status).toBe('pending');
Util.delay(0.1).then(function() {
expect(d.status).toBe('pending');
p.reject();
return d;
}).then(function() {
return Util.delay(0.1);
}).then(function() {
expect(r1.status).toBe('rejected');
expect(r2.status).toBe('rejected');
expect(d.status).toBe('resolved');
}).catch(fail).then(done);
});
it('prevents new requests', function(done) {
var request = createRequest('resolve://foo');
var p = new shaka.util.PublicPromise();
resolveScheme.and.returnValue(p);
var r1 = networkingEngine.request(requestType, request);
Util.capturePromiseStatus(r1);
expect(r1.status).toBe('pending');
// The request has already been made.
expect(resolveScheme.calls.count()).toBe(1);
var d = networkingEngine.destroy();
Util.capturePromiseStatus(d);
expect(d.status).toBe('pending');
var r2 = networkingEngine.request(requestType, request);
Util.capturePromiseStatus(r2);
expect(r2.status).toBe('pending');
// A new request has not been made.
expect(resolveScheme.calls.count()).toBe(1);
Util.delay(0.1).then(function() {
expect(r1.status).toBe('pending');
expect(r2.status).toBe('rejected');
expect(d.status).toBe('pending');
p.resolve();
return d;
}).then(function() {
return Util.delay(0.1);
}).then(function() {
expect(r1.status).toBe('resolved');
expect(r2.status).toBe('rejected');
expect(d.status).toBe('resolved');
expect(resolveScheme.calls.count()).toBe(1);
}).catch(fail).then(done);
});
it('does not allow further retries', function(done) {
var request = createRequest('reject://foo', {
maxAttempts: 3,
baseDelay: 0,
backoffFactor: 0,
fuzzFactor: 0,
timeout: 0
});
var p1 = new shaka.util.PublicPromise();
var p2 = new shaka.util.PublicPromise();
rejectScheme.and.callFake(function() {
return (rejectScheme.calls.count() == 1) ? p1 : p2;
});
var r1 = networkingEngine.request(requestType, request);
Util.capturePromiseStatus(r1);
expect(r1.status).toBe('pending');
expect(rejectScheme.calls.count()).toBe(1);
var d = networkingEngine.destroy();
Util.capturePromiseStatus(d);
expect(d.status).toBe('pending');
Util.delay(0.1).then(function() {
expect(r1.status).toBe('pending');
expect(d.status).toBe('pending');
expect(rejectScheme.calls.count()).toBe(1);
// Reject the initial request.
p1.reject();
// Resolve any retry, but since we have already been destroyed, this
// promise should not be used.
p2.resolve();
return d;
}).then(function() {
return Util.delay(0.1);
}).then(function() {
expect(d.status).toBe('resolved');
// The request was never retried.
expect(r1.status).toBe('rejected');
expect(rejectScheme.calls.count()).toBe(1);
}).catch(fail).then(done);
});
});
/**
* @param {string} uri
* @param {shakaExtern.RetryParameters=} opt_retryParameters
* @return {shakaExtern.Request}
*/
function createRequest(uri, opt_retryParameters) {
var retryParameters = opt_retryParameters ||
shaka.net.NetworkingEngine.defaultRetryParameters();
return shaka.net.NetworkingEngine.makeRequest([uri], retryParameters);
}
});
|
<gh_stars>0
const Post = require("../models/Post");
const APIFeatures = require("../utils/apiFeatures");
const ErrorResponse = require("../utils/errorResponse");
const asyncHandler = require("../middleware/async");
const cloudinary = require("../utils/cloudinary");
// Create new post => /api/v1/admin/post/new
exports.addPost = asyncHandler(async (req, res, next) => {
const result = await cloudinary.uploader.upload(req.file.path, {
folder: "post",
resource_type: "auto",
});
let post = new Post({
title: req.body.title,
description: req.body.description,
tags: req.body.tags,
postcategoryId: req.body.postcategoryId,
userId: req.user.id,
// imageUrl: result.secure_url,
// cloudinary_id: result.public_id,
imageUrl: {
public_id: result.public_id,
url: result.secure_url,
},
});
// Save user
await post.save();
res.status(200).json({ success: true, data: post });
});
// Get all posts => /api/v1/posts?keyword=apple
exports.getPosts = asyncHandler(async (req, res, next) => {
const resPerPage = 30;
const postCount = await Post.countDocuments();
const apiFeatures = new APIFeatures(
Post.find({})
.populate("userId", "name avatar")
.populate("postcategoryId", "title")
.sort("-createdAt "),
req.query
)
.search()
.filter();
let posts = await apiFeatures.query;
let filteredPostsCount = posts.length;
apiFeatures.pagination(resPerPage);
posts = await apiFeatures.query.clone();
res.status(200).json({
success: true,
postCount,
resPerPage,
filteredPostsCount,
data: posts,
});
});
// Get single post details => /api/v1/post/:id
exports.getPost = asyncHandler(async (req, res, next) => {
const post = await Post.findById(req.params.id)
.populate("userId")
.populate("postcategoryId", "title")
.populate("tags");
if (!post) {
return next(new ErrorResponse("post not found", 404));
}
res.status(200).json({
success: true,
post,
});
});
// Get post categories => /api/v1/post/:categoryId
exports.getCategories = asyncHandler(async (req, res, next) => {
const categories = await Post.find({
postcategoryId: req.params.postcategoryId,
})
.populate("userId")
.populate("postcategoryId");
if (!categories) {
return next(new ErrorResponse("category not found", 404));
}
res.status(200).json({
success: true,
categories,
});
});
// Get single post details => /api/v1/post/:categoryId
exports.getRelated = asyncHandler(async (req, res, next) => {
const categories = await Post.find({
postcategoryId: req.params.postcategoryId,
})
.populate("userId")
.populate("postcategoryId");
if (!categories) {
return next(new ErrorResponse("category not found", 404));
}
res.status(200).json({
success: true,
categories,
});
});
// Get user post => /api/v1/post/:userId
exports.getUserPosts = asyncHandler(async (req, res, next) => {
const userPosts = await Post.find({ userId: req.params.userId })
.populate("userId")
.populate("categoryId");
if (!userPosts) {
return next(new ErrorResponse("userPosts not found", 404));
}
res.status(200).json({
success: true,
userPosts,
});
});
// Get single post details => /api/v1/post/:categoryId
exports.getTags = asyncHandler(async (req, res, next) => {
const tags = await Post.find({ tags: req.params.tagsId })
.populate("userId")
.populate("categoryId");
if (!tags) {
return next(new ErrorResponse("category not found", 404));
}
res.status(200).json({
success: true,
tags,
});
});
// Update Product => /api/v1/admin/product/:id
// Update Product => /api/v1/admin/product/:id
exports.updatePost = asyncHandler(async (req, res, next) => {
let post = await Post.findById(req.params.id);
await cloudinary.uploader.destroy(post.cloudinary_id);
// Upload image to cloudinary
let result;
if (req.file) {
result = await cloudinary.uploader.upload(req.file.path);
}
const data = {
title: req.body.title,
description: req.body.description,
tags: req.body.tags,
categoryId: req.body.categoryId,
imageUrl: result.secure_url,
cloudinary_id: result.public_id,
};
post = await Post.findByIdAndUpdate(req.params.id, data, { new: true });
res.status(200).json({
success: true,
post,
});
});
// Delete post => /api/v1/admin/post/:id
exports.deletePost = asyncHandler(async (req, res, next) => {
const post = await Post.findById(req.params.id);
await cloudinary.uploader.destroy(post.imageUrl.public_id);
if (!post) {
return next(new ErrorResponse("post not found", 404));
}
await post.remove();
res.status(200).json({
success: true,
message: "post is deleted.",
});
});
|
-- Roles and permissions for Digital banking users
select u.user_id, u.first_name||' '||u.middle_name||' '||u.last_name employee_name ,r.role_name,r.role_desc, p.permission_name from
usr_mgmt_user u, usr_mgmt_user_role_mapping rm, usr_mgmt_role r, usr_mgmt_role_permission rp, usr_mgmt_permission p
where rm.user_id = u.user_id
and rm.role_id = r.role_id
and rp.role_id = r.role_id
and rp.permission_id = p.permission_id
order by u.first_name;
---- list of roles and permissions
select r.role_name,r.role_desc, p.permission_name from
usr_mgmt_role r, usr_mgmt_role_permission rp, usr_mgmt_permission p
where rp.role_id = r.role_id
and rp.permission_id = p.permission_id
order by r.role_id;
----- All roles
select r.role_name from usr_mgmt_role r;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.