text stringlengths 1 1.05M |
|---|
docker build -t codotype-plugin-dev . |
<gh_stars>0
/*****
License
--------------
Copyright © 2017 Bill & Melinda Gates Foundation
The Mojaloop files are made available by the Bill & Melinda Gates Foundation under the Apache License, Version 2.0 (the "License") and you may not use these files except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, the Mojaloop files are distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Contributors
--------------
This is the official list (alphabetical ordering) of the Mojaloop project contributors for this file.
Names of the original copyright holders (individuals or organizations)
should be listed with a '*' in the first column. People who have
contributed from an organization can be listed under the organization
that actually holds the copyright for their contributions (see the
Gates Foundation organization for an example). Those individuals should have
their names indented and be marked with a '-'. Email address can be added
optionally within square brackets <email>.
* Gates Foundation
- Name Surname <<EMAIL>>
* Crosslake
- <NAME> <<EMAIL>>
--------------
******/
"use strict"
import express from "express";
import {ConsoleLogger, ILogger} from "@mojaloop/logging-bc-client-lib";
import {AppPrivileges} from "@mojaloop/security-bc-public-types-lib";
import {AuthorizationAggregate} from "../domain/authorization_agg";
import {IAMAuthorizationAdapter, IAuthorizationRepository} from "../domain/interfaces";
import {FileAuthorizationRepo} from "../infrastructure/file_authorization_repo";
import {
CannotCreateDuplicateAppPrivilegesError, CannotOverrideAppPrivilegesError, CouldNotStoreAppPrivilegesError,
InvalidAppPrivilegesError
} from "../domain/errors";
import {AllPrivilegesResp} from "../domain/types";
import {ExpressRoutes} from "./routes";
const logger: ILogger = new ConsoleLogger();
const app = express();
let authorizationAggregate: AuthorizationAggregate;
//let iamAuthNAdapter:IAMAuthorizationAdapter;
let authNRepo: IAuthorizationRepository;
let routes: ExpressRoutes;
function setupExpress() {
app.use(express.json()); // for parsing application/json
app.use(express.urlencoded({extended: true})); // for parsing application/x-www-form-urlencoded
}
function setupRoutes() {
app.use("/", routes.MainRouter);
app.use("/appPrivileges", routes.PrivilegesRouter);
app.use("/platformRoles", routes.RolesRouter);
app.use((req, res) => {
// catch all
res.send(404);
})
}
async function start():Promise<void>{
authNRepo = new FileAuthorizationRepo("./dist/iamTempStorageFile", logger);
await authNRepo.init();
authorizationAggregate = new AuthorizationAggregate(authNRepo, logger);
routes = new ExpressRoutes(authorizationAggregate, logger);
setupExpress();
setupRoutes();
const server = app.listen(3000, () =>console.log(`🚀 Server ready at: http://localhost:3000`))
}
async function _handle_int_and_term_signals(signal: NodeJS.Signals): Promise<void> {
logger.info(`Service - ${signal} received - cleaning up...`);
process.exit();
}
//catches ctrl+c event
process.on("SIGINT", _handle_int_and_term_signals.bind(this));
//catches program termination event
process.on("SIGTERM", _handle_int_and_term_signals.bind(this));
//do something when app is closing
process.on('exit', () => {
logger.info("Microservice - exiting...");
});
start();
|
import { h, render, Fragment } from "preact";
import { useState, useEffect } from "preact/hooks";
import { auth, useDatabase, update } from "./firebase.js";
import World from "./World.js";
import { hexesInRadius } from "./hexes.js";
const App = () => {
const [user, setUser] = useState(null);
const [error, setError] = useState(null);
useEffect(() => {
auth.signInAnonymously().catch(function(error) {
setError(error);
console.error(error);
});
auth.onAuthStateChanged(user => {
if (user) {
setUser(user);
console.log("Signed in as", user.uid, user);
} else {
setUser(null);
console.log("Signed out");
}
});
}, []);
if (error) {
return (
<Fragment>
<h1>Authentication error</h1> <pre>{JSON.stringify(error.message, null, 2)}</pre>
</Fragment>
);
}
return user ? <Game user={user} /> : "Connecting…";
};
render(<App />, document.body);
const Game = ({ user }) => {
const { uid } = user;
const regionX = useDatabase(`players/${uid}/public/regionX`);
const regionY = useDatabase(`players/${uid}/public/regionY`);
const hasConnected = regionX !== undefined && regionY !== undefined;
const hasSpawned = typeof regionX === "number" && typeof regionY === "number";
useEffect(() => {
if (hasConnected && !hasSpawned) {
const regionX = 0;
const regionY = 0;
const x = 0;
const y = 0;
const visibleRegions = hexesInRadius([regionX, regionY], 1);
const canSee = {};
for (const [x, y] of visibleRegions) {
canSee[x] = canSee[x] || {};
canSee[x][y] = true;
}
update({
[`players/${uid}/public`]: {
regionX,
regionY,
x,
y,
canSee,
},
[`players/${uid}/private`]: null,
[`regions/${regionX}/${regionY}/players/${uid}`]: true,
});
}
}, [regionX, regionY]);
return hasConnected && hasSpawned ? <World uid={uid} regionX={regionX} regionY={regionY} /> : null;
};
|
import { ILottery, createLotteryPicker, ILotteryNextOf } from './create-lottery-picker'
import { INextOf } from '../manipulators/next-item-factory'
import { shuffleArray } from './shuffle-array'
export function createUniqueLotteryPicker<T extends Object>(lottery: ILottery<T>) : INextOf<T> {
const map = new Map<T, Boolean>()
let drawSymbol = true
const nextItem = createLotteryPicker(lottery, (results) => {
drawSymbol = !drawSymbol
shuffleArray(results)
})
return () => {
while (true) {
const item = nextItem()
if (map.get(item) !== drawSymbol) {
map.set(item, drawSymbol)
return item
}
}
}
}
|
<reponame>mastoppink/antrianjs
var express = require('express');
var app = express();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var routes = require('./routes');
var favicon = require('serve-favicon');
var path = require('path');
var model = require('./db/db.js');
var port = "80";
app.set('view engine', 'pug');
app.set('views', './views')
app.locals.pretty = true;
app.disable('x-powered-by');
io.on('connection', function(socket){
socket.on('sebarkan', function(data){
socket.broadcast.emit(data.for, data.content);
});
socket.on('tambahantrian', function(data){
});
socket.on('tambahjenis', function(data){
});
socket.on('tambahpanggil', function(data){
});
socket.on('editsetting', function(data){
});
socket.on('hapuspanggil', function(data){
});
socket.on('hapusjenis', function(data){
});
});
app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')))
app.use(express.static(__dirname + '/public'));
app.use('/bower_components', express.static(__dirname + '/bower_components'));
app.get('/', routes.index);
app.get('/partials/:name', routes.partials);
app.get('*', routes.index);
http.listen(port, function(){
console.log("Express server listening on port %d in %s mode", this.address().port, app.settings.env);
}); |
#
# Copyright 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
add_lunch_combo cm_e7-userdebug
|
const buildRequest = ({ url, ...params }) => {
return Object.keys(params).reduce(
(accumulator, currentParam) =>
`${accumulator}${currentParam}=${params[currentParam]}&`,
url
)
}
export { buildRequest }
|
#!/bin/bash
#SBATCH -J Act_maxtanh_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py maxtanh 413 Nadam 4 0.2437437421212035 0.0023969628402831307 glorot_uniform 0.3
|
apiVersion: v1
kind: ConfigMap
metadata:
name: db-configmap
data:
DBUSER: root
DBPASSWORD: 123456 |
const taskPriorityConfiguration = {
"spawn":{
"weak_transferer":{
"centralTransfer":2,
},
"transferer":{
"pureTransfer":0, // Primary
"remoteHarvest":106,
"powerHarvest":107,
"remoteTransfer":106,
"remotePickUper":5,
},
"harvester":{ // Primary
"remoteHarvest":6,
"localHarvest":1,
},
"upgrader":{ // Primary
"pureUpgrader":2
},
"worker":{ // Primary
"pureWorker":5
},
"repairer":{ // Primary
"pureRepairer":4,
"remoteRepairer":9,
},
"defender":{ // Primary
"Defend":3
},
"defender_observed":{
"Defend_observed":5,
},
"defender_reserved":{
"Defend_reserved":5,
},
"attacker":{ // Primary
"Attack":7,
"powerHarvest":6,
},
"healer":{
"Attack":107,
"powerHarvest":106,
},
"claimer":{ // Primary
"Claim":7,
},
"traveler":{ // Primary
"Travel":8,
},
"attacker_invader_low":{
"AttackInvaderLow":5,
}
}
}
module.exports = taskPriorityConfiguration |
<gh_stars>1-10
// Copyright (c) 2021 Palantir Technologies Inc. All rights reserved.
// Use of this source code is governed by the Apache License, Version 2.0
// that can be found in the LICENSE file.
package gotypes
import (
"go/types"
"github.com/pkg/errors"
"golang.org/x/tools/go/packages"
)
// FindType searches through all definitions in the package for the named type specified by typeName.
func FindType(pkg *packages.Package, typeName string) (types.Object, error) {
for _, object := range pkg.TypesInfo.Defs {
if object == nil {
continue
}
name, ok := object.(*types.TypeName)
if !ok {
continue
}
if name.Name() == typeName {
return name, nil
}
}
return nil, errors.Errorf("package %s: did not find type %s", pkg.ID, typeName)
}
|
<reponame>maciej-plonka/tournament-app
import {Player, PrismaClient, Tournament} from "@prisma/client";
import {GetServerSideProps, NextPage} from "next";
import {FormEvent, useCallback, useMemo, useState} from "react";
import axios from "axios";
import {canGenerateAvailableTeamSizes, generateAvailableTeamSizes} from "../../shared/tournament/availableTeamSizes";
import {useInput} from "../../hooks/useInput";
import {usePlayerMultiSelect} from "../../hooks/usePlayerMultiSelect";
import {MultiSelect} from "@/components/MultiSelect";
import {useTeamSizeRadioInput} from "../../hooks/useRadioInput";
import {RadioInput} from "@/components/RadioInput";
import {Input} from "@/components/Input";
import {ApiResponse} from "../../shared/apiResponse";
import {createRepository} from "../../server/repository";
export const getServerSideProps: GetServerSideProps<NewTournamentProps> = async () => {
const repository = createRepository(new PrismaClient());
const availablePlayers = await repository.getAllPlayers()
return {
props: {
availablePlayers
}
}
}
const titleValidators = [
(title: string) => !title.length && 'Title should not be empty'
]
const playersValidators = [
(players: readonly Player[]) => !canGenerateAvailableTeamSizes(players.length) && "Cannot split players into proper teams"
]
const teamSizeValidators = [
(teamSize: number) => teamSize == 0 && "Please select valid team size"
]
function useNewTournament(availablePlayers: ReadonlyArray<Player>) {
const [changed, setChanged] = useState(false)
const [title, titleProps, titleErrors] = useInput('', titleValidators);
const [players, playersProps, playersErrors] = usePlayerMultiSelect([], availablePlayers, playersValidators)
const availableTeamSizes = useMemo(() => generateAvailableTeamSizes(players.length), [players]);
const [teamSize, teamSizeProps, teamSizeErrors] = useTeamSizeRadioInput(0, availableTeamSizes, teamSizeValidators)
const hasErrors = useMemo(
() => (teamSizeErrors.length + playersErrors.length + titleErrors.length) > 0,
[titleErrors, playersErrors, teamSizeErrors]
)
const payload = useMemo(() => ({title, players, teamSize}), [title, players, teamSize])
const handleSubmit = useCallback(async (event: FormEvent) => {
event.preventDefault()
if (changed && !hasErrors)
try {
const response = await axios.post('/api/tournament/create', payload);
const data = response.data as ApiResponse<Tournament>
switch (data.type) {
case "success": {
window.location.href = `/tournament/${data.payload?.id}`
break;
}
case "error": {
alert(data.message)
break;
}
}
} catch (error) {
alert(error)
}
}, [changed, hasErrors, payload]);
const formProps = useMemo(() => ({
onSubmit: handleSubmit,
onChange: () => setChanged(true)
}), [handleSubmit]);
const canSubmit = useMemo(() => changed && !hasErrors, [hasErrors, changed])
return {
formProps,
titleProps,
playersProps,
teamSizeProps,
canSubmit,
}
}
interface NewTournamentProps {
availablePlayers: ReadonlyArray<Player>
}
const NewTournament: NextPage<NewTournamentProps> = ({availablePlayers}) => {
const {titleProps, formProps, playersProps, teamSizeProps, canSubmit,} = useNewTournament(availablePlayers);
return (
<div className="container mx-auto py-4">
<div className="shadow-md rounded-xl p-4">
<h1 className="text-3xl text-gray-700">New tournament</h1>
<form className="mt-4 flex flex-col" {...formProps}>
<div className="w-full sm:w-1/2 lg:1/4 mb-4 flex flex-col">
<Input
id="title"
label="Title"
{...titleProps} />
</div>
<div className="w-full sm:w-1/2 lg:1/4 mb-4">
<MultiSelect
id="players"
label="Players"
{...playersProps} />
</div>
<div className="w-full sm:w-1/2 lg:1/4 mb-4">
<RadioInput
id="teamSize"
label="Players per team"
{...teamSizeProps} />
</div>
<button
className="block mx-auto sm:mx-0 w-36 px-3 py-1.5 rounded bg-green-500 disabled:bg-gray-300 text-white"
disabled={!canSubmit}
type="submit">
Create
</button>
</form>
</div>
</div>
)
}
export default NewTournament
|
#!/bin/bash
#Determine the OS:
osname=$(uname -s)
GotInfo=0
##################
GetSCMInfo() {
printf "Enter Surround username [Administrator]: "
read -r scm_un
scm_un=${scm_un:-Administrator}
printf "Enter password [ ]: "
read -r scm_pw
printf "Enter server address [localhost]: "
read -r scm_address
scm_address=${scm_address:-localhost}
printf "Enter port [4900]: "
read -r scm_port
scm_port=${scm_port:-4900}
#used incase user skips adding files
GotInfo=1
}
##################
PrintMainlines() {
echo ""
echo "Mainlines currently on this server:"
echo "-----------------------------------"
sscm lsmainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
}
##################
EditFiles() {
#Add text to all files in specific directory
FILES="$1/*"
for f in $FILES
do
echo "Editing $f"
echo "Version $2" >> $f
done
}
main() {
#get mainlines
if [ $GotInfo -eq 0 ]
then
GetSCMInfo
fi
mainline=$(sscm lsmainline -y$scm_un:$scm_pw -z$scm_address:$scm_port | head -n 1)
PrintMainlines
printf "Enter name of mainline to add files to [$mainline]: "
read -r scm_mainline
scm_mainline=${scm_mainline:-$mainline}
if [ "$osname" = "Darwin" ]
then
#Create Mac working directories
mkdir /Users/seapine/WDs
mkdir /Users/seapine/WDs/$scm_mainline-Mainline-$scm_un
wd_ml="/Users/seapine/WDs/$scm_mainline-Mainline-$scm_un"
mkdir /Users/seapine/WDs/$scm_mainline-Baseline-$scm_un
wd_bl="/Users/seapine/WDs/$scm_mainline-Baseline-$scm_un"
else
#Create Linux working directories
mkdir /home/seapine/WDs
mkdir /home/seapine/WDs/$scm_mainline-Mainline-$scm_un
wd_ml="/home/seapine/WDs/$scm_mainline-Mainline-$scm_un"
mkdir /home/seapine/WDs/$scm_mainline-Baseline-$scm_un
wd_bl="/home/seapine/WDs/$scm_mainline-Baseline-$scm_un"
fi
#Names of branches
Baseline="Baseline-$scm_mainline-$scm_un"
Snapshot="Snapshot-$scm_mainline-$scm_un"
Workspace="Workspace-$scm_mainline-$scm_un"
#Name of paths for branchs
path_bl="$scm_mainline/Agents"
path_ws="$scm_mainline/WFM"
#Set working directory of mainline
echo "sscm workdir $wd_ml $scm_mainline -b$scm_mainline -r -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm workdir $wd_ml $scm_mainline -b$scm_mainline -r -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Get files to working directory
echo "sscm get '*' -b$scm_mainline -p$scm_mainline -q -r -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm get "*" -b$scm_mainline -p$scm_mainline -q -r -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Create Baseline Branch from Agents repo
echo "sscm mkbranch $Baseline $path_bl -b$scm_mainline -c- -sbaseline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm mkbranch $Baseline $path_bl -b$scm_mainline -c- -sbaseline -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Set working directory of Baseline
echo "sscm workdir $wd_bl $path_bl -b$Baseline -r -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm workdir $wd_bl $path_bl -b$Baseline -r -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Get files to working directory
echo "sscm get '*' -b$Baseline -p$path_bl -q -r -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm get "*" -b$Baseline -p$path_bl -q -r -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Remove and destroy UserImport recursively in baseline branch
echo "sscm rm UserImport -b$Baseline -c- -d -f -p$path_bl -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm rm UserImport -b$Baseline -c- -d -f -p$path_bl -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Rebase Baseline
echo "sscm rebase $Baseline -p$path_bl -c- -s -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm rebase $Baseline -p$path_bl -c- -s -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Check out zz_iModelPrototype recursively on Mainline
echo "sscm checkout zz_iModelPrototype -b$scm_mainline -p$scm_mainline -c- -f -r -q -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm checkout zz_iModelPrototype -b$scm_mainline -p$scm_mainline -c- -f -r -q -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Check in zz_iModelPrototype recursively on Mainline and update version number
echo "sscm checkin zz_iModelPrototype -b$scm_mainline -p$scm_mainline -c- -q -r -u -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm checkin zz_iModelPrototype -b$scm_mainline -p$scm_mainline -c- -q -r -u -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Label WEB-INF on Mainline with label: Label_WEB-INF
echo "sscm label '*' -b$scm_mainline -p$scm_mainline/WEB-INF -c- -lLabel_WEB-INF -r -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm label "*" -b$scm_mainline -p$scm_mainline/WEB-INF -c- -lLabel_WEB-INF -r -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Check out MixedAuth on Baseline
echo "sscm checkout MixedAuth -b$Baseline -p$path_bl -c- -f -q -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm checkout MixedAuth -b$Baseline -p$path_bl -c- -f -q -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Edit files in MixedAuth with version 2
WD_MA="$wd_bl/MixedAuth"
EditFiles $WD_MA 2
#Check in ^^
echo "sscm checkin MixedAuth -b$Baseline -p$path_bl -c- -f -q -u -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm checkin MixedAuth -b$Baseline -p$path_bl -c- -f -q -u -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Promote Baseline branch
echo "sscm promote $Baseline -c- -p$path_bl -s -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm promote $Baseline -c- -p$path_bl -s -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Remove directory /TMS/Conversion on Mainline recursively
echo "sscm rm Conversion -b$scm_mainline -c- -f -p$scm_mainline/TMS -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm rm Conversion -b$scm_mainline -c- -f -p$scm_mainline/TMS -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Check out MPS/archive on Mainline
echo "sscm checkout '*' -b$scm_mainline -p$scm_mainline/MPS/archive -c- -f -r -q -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm checkout archive -b$scm_mainline -p$scm_mainline/MPS -c- -f -r -q -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Edit files in archive with version 2
WD_A=$wd_ml/MPS/archive
chmod 777 $WD_A/*
EditFiles $WD_A 2
#Check in ^^ with changelist "$scm_un:$scm_pw: date_time"
Name_CL="$scm_un:$scm_pw:$(date +%D_%T)"
echo "sscm checkin archive -b$scm_mainline -p$scm_mainline/MPS -c- -f -r -q -x$Name_CL -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm checkin archive -b$scm_mainline -p$scm_mainline/MPS -c- -f -r -q -x$Name_CL -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Get CLID from user
echo "sscm lschangelist -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm lschangelist -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
printf "\nSurround insists on using changelist IDs to commit a changelist...\n"
printf "Please enter the changelist ID: "
read CLID
echo "Thank you"
#Commit changelist ^^
echo "sscm commitchangelist $CLID -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm commitchangelist $CLID -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Sleep 2 second for changelist
echo "sleep 2"
sleep 2
#Remove directory ITSamples/VBSamples/ajax on Mainline recursively with changelist
Name_CL="$scm_un:$scm_pw:$(date +%D_%T)"
echo "sscm rm ajax -b$scm_mainline -c- -f -p$scm_mainline/ITSamples/VBSamples -q -x$Name_CL -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm rm ajax -b$scm_mainline -c- -f -p$scm_mainline/ITSamples/VBSamples -q -x$Name_CL -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Get CLID from user
echo "sscm lschangelist -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm lschangelist -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
printf "\nPlease enter the changelist ID: "
read CLID
echo "Thank you"
#Commit changelist ^^
echo "sscm commitchangelist $CLID -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm commitchangelist $CLID -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Share iCert/Templates to ICC4 on Mainline
echo "sscm share $scm_mainline/iCert/Templates -b$scm_mainline -c- -r -p$scm_mainline/ICC4 -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm share $scm_mainline/iCert/Templates -b$scm_mainline -c- -r -p$scm_mainline/ICC4 -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Share iCert/Templates to ICC3_GUI_3.6.X on Mainline
echo "sscm share $scm_mainline/iCert/Templates -b$scm_mainline -c- -r -p$scm_mainline/ICC3_GUI_3.6.X -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm share $scm_mainline/iCert/Templates -b$scm_mainline -c- -r -p$scm_mainline/ICC3_GUI_3.6.X -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Break share in ICC3_GUI_3.6.X on Mainline
#echo "sscm breakshare '*' -b$scm_mainline -c- -p$scm_mainline/ICC3_GUI_3.6.X/Templates -y$scm_un:$scm_pw -z$scm_address:$scm_port"
#sscm breakshare "*" -b$scm_mainline -c- -r -p$scm_mainline/ICC3_GUI_3.6.X/Templates -y$scm_un:$scm_pw -z$scm_address:$scm_port
#sleep 2 seconds for changelist
echo "sleep 2"
sleep 2
#Rename DMX/ttutil.rb to ttutil_renamed.rb with changelist on Mainline
Name_CL="$scm_un:$scm_pw:$(date +%D_%T)"
echo "sscm rename ttutil.rb ttutil_renamed.rb -b$scm_mainline -p$scm_mainline/DMX -c- -x$Name_CL -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm rename ttutil.rb ttutil_renamed.rb -b$scm_mainline -p$scm_mainline/DMX -c- -x$Name_CL -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Get CLID from user
echo "sscm lschangelist -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm lschangelist -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
printf "\nPlease enter the changelist ID: "
read CLID
echo "Thank you"
#Commit changelist
echo "sscm commitchangelist $CLID -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm commitchangelist $CLID -p$scm_mainline -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Create Snapshot branch of the whole Mainline
echo "sscm mkbranch $Snapshot $scm_mainline -b$scm_mainline -c- -ssnapshot -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm mkbranch $Snapshot $scm_mainline -b$scm_mainline -c- -ssnapshot -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Create Workspace branch of WFM on Mainline
echo "sscm mkbranch $Workspace $scm_mainline/WFM -b$scm_mainline -c- -sworkspace -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm mkbranch $Workspace $scm_mainline/WFM -b$scm_mainline -c- -sworkspace -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Remove directory Production_051203 recursively on Workspace branch
echo "sscm rm Production_051203 -b$Workspace -c- -f -p$path_ws -r -y$scm_un:$scm_pw -z$scm_address:$scm_port"
sscm rm Production_051203 -b$Workspace -c- -f -p$path_ws -y$scm_un:$scm_pw -z$scm_address:$scm_port
#Repeat for additional mainlines
printf "Would you like to perform these actions to another mainline? [y/n]: "
read answer
if [ "$answer" = "y" ] || [ "$answer" = "Y" ]
then
echo "main"
main
else
exit 0
fi
}
#Run the main program at least once
main
|
RESTSERVER=localhost
#정상 동작
curl -X DELETE http://$RESTSERVER:1024/publicip/cbpublicip01?connection_name=aws-config01
|
import { Form } from "./form";
export var Account = (function () {
var entity = {};
entity.showAccounts = function() {
var container = $(".accounts-list");
$.ajax({
type: 'GET',
url: container.data("action"),
success: function(response) {
container.html(response);
},
});
}
entity.addAccount = function() {
var btn = $(".btn-create-account");
var createAccount = $(".frm-create-account");
Form.waiting(btn);
$.ajax({
type: 'POST',
url: createAccount.attr("action"),
data: createAccount.serialize(),
success: function(response) {
Form.ready(btn);
entity.showAccounts();
},
});
return false;
}
entity.attachEvents = function() {
$(".frm-create-account").submit(function() {
entity.addAccount();
return false;
});
}
entity.run = function() {
entity.showAccounts();
entity.attachEvents();
}
return entity;
}()); |
#!/bin/bash
#
# File: collect_node_diag.sh
#
# Created: Wednesday, May 22 2019
# Modified: $Format:%cD$
# Hash: $Format:%h$
#
# This script collects diagnostic for individual node
##
function usage() {
echo "Usage: $0 -t <type> [options] [path]"
echo " ----- Required --------"
echo " -t type - valid choices are \"coss\", \"ddac\", \"dse\" "
echo " ----- Options --------"
echo " -c cqlsh_options - e.g \"-u user -p password\" etc. Ensure you enclose with \""
echo " -n nodetool_options - options to pass to nodetool. Syntax the same as \"-c\""
echo " -d dsetool_options - options to pass to dsetool. Syntax the same as \"-c\""
echo " -p pid - PID of DSE or DDAC/Cassandra process"
echo " -f file_name - name of resulting file"
echo " -k keystore_ssl_info - collect keystore and truststore information"
echo " -i insights - collect only data for DSE Insights"
echo " -I insights_dir - directory to find the insights .gz files"
echo " -o output_dir - where to put generated files. Default: /var/tmp"
echo " -m collection_mode - light, normal, extended. Default: normal"
echo " -v - verbose output"
echo " -z - don't execute commands that require sudo"
echo " -P path - top directory of COSS, DDAC or DSE installation (for tarball installs)"
echo " -e timeout - e.g. \"-e 600\" allow for a longer timeout on operations"
}
#echo "Got args $*"
# ----------
# Setup vars
# ----------
VERBOSE=""
NT_OPTS=""
COLLECT_SSL=""
CQLSH_OPTS=""
DT_OPTS=""
PID=""
RES_FILE=""
INSIGHTS_MODE=""
INSIGHTS_DIR=""
IS_COSS=""
IS_DSE=""
IS_TARBALL=""
IS_PACKAGE=""
OUTPUT_DIR="/var/tmp"
NODE_ADDR=""
CONN_ADDR=""
CONN_PORT=9042
ROOT_DIR=""
DATA_DIR=""
CONF_DIR=""
DSE_CONF_DIR=""
LOG_DIR=""
TMP_DIR=""
OLDWD="$(pwd)"
HOST_OS="$(uname -s)"
JCMD="$JAVA_HOME/bin/jcmd"
DEFAULT_INSIGHTS_DIR="/var/lib/cassandra/insights_data/insights"
DEFAULT_MCAC_DIR="/var/lib/cassandra/mcac_data/insights"
MODE="normal"
NOSUDO=""
JMX_OPTS=""
ROOT_DIR=""
TIMEOUT="120"
# ---------------
# Parse arguments
# ---------------
while getopts ":hzivke:c:n:p:f:d:o:t:I:m:P:" opt; do
case $opt in
n) NT_OPTS="$OPTARG"
;;
c) CQLSH_OPTS="$OPTARG"
;;
p) PID="$OPTARG"
;;
f) RES_FILE="$OPTARG"
;;
d) DT_OPTS="$OPTARG"
;;
o) OUTPUT_DIR="$OPTARG"
;;
i) INSIGHTS_MODE="true"
;;
I) INSIGHTS_DIR="$OPTARG"
;;
k) COLLECT_SSL="true"
;;
z) NOSUDO="true"
;;
m) MODE="$OPTARG"
if [ "$MODE" != "normal" ] && [ "$MODE" != "extended" ] && [ "$MODE" != "light" ]; then
echo "Incorrect collection mode: $MODE"
usage
exit 1
fi
;;
t) TYPE=$OPTARG
;;
v) VERBOSE=true
;;
P) ROOT_DIR="$OPTARG"
;;
e) TIMEOUT="$OPTARG"
;;
h) usage
exit 0
;;
*) echo "Unknown flag '$opt'"
usage
exit 1
;;
esac
done
shift "$((OPTIND -1))"
mkdir -p "${OUTPUT_DIR}"
[ -n "${ROOT_DIR}" ] && echo "Using ${ROOT_DIR} as root dir for DSE/DDAC/C*"
# settings overridable via environment variables
if [ "$MODE" = "extended" ]; then
IOSTAT_LEN="${IOSTAT_LEN:-30}"
else
IOSTAT_LEN="${IOSTAT_LEN:-5}"
fi
MAYBE_RUN_WITH_TIMEOUT=""
if [ -n "$(command -v timeout)" ]; then
MAYBE_RUN_WITH_TIMEOUT="timeout --foreground $TIMEOUT"
fi
function debug {
if [ -n "$VERBOSE" ]; then
DT="$(date -u '+%Y-%m-%dT%H:%M:%SZ')"
echo "[${DT}]: $1"
fi
}
function get_node_ip {
CONN_ADDR="$(grep -E '^(native_transport_broadcast_address|broadcast_rpc_address): ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)[ ]*$|\1|'|head -n 1|tr -d "'")"
if [ -z "$CONN_ADDR" ]; then
CONN_ADDR="$(grep -E '^(native_transport_address|rpc_address): ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)[ ]*$|\1|'|head -n 1|tr -d "'")"
fi
if [ -z "$CONN_ADDR" ]; then
IFACE="$(grep -E '^(native_transport_interface|rpc_interface): ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)[ ]*$|\1|'|head -n 1|tr -d "'")"
if [ -n "$IFACE" ]; then
if [ "$HOST_OS" = "Linux" ]; then
CONN_ADDR="$(ifconfig "$IFACE"|grep 'inet addr:'|sed -e 's|^.*inet addr:\([^ ]*\) .*[ ]*$|\1|')"
else
CONN_ADDR="$(ipconfig getifaddr "$IFACE")"
fi
fi
fi
# extract listen address
NODE_ADDR="$(grep -e '^broadcast_address: ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)[ ]*$|\1|'|tr -d "'")"
if [ -z "$NODE_ADDR" ]; then
IFACE="$(grep -E '^listen_interface: ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)[ ]*$|\1|'|tr -d "'")"
if [ -n "$IFACE" ]; then
if [ "$HOST_OS" = "Linux" ]; then
NODE_ADDR="$(ifconfig "$IFACE"|grep 'inet addr:'|sed -e 's|^.*inet addr:\([^ ]*\) .*[ ]*$|\1|')"
else
NODE_ADDR="$(ipconfig getifaddr "$IFACE")"
fi
fi
if [ -z "$NODE_ADDR" ]; then
NODE_ADDR="$(grep -e '^listen_address: ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)[ ]*$|\1|'|tr -d "'")"
if [ -z "$NODE_ADDR" ] || [ "$NODE_ADDR" = "127.0.0.1" ] || [ "$NODE_ADDR" = "localhost" ]; then
# echo "Can't detect node's address from cassandra.yaml, or it's set to localhost. Trying to use the 'hostname'"
if [ "$HOST_OS" = "Linux" ]; then
NODE_ADDR="$(hostname -i)"
else
NODE_ADDR="$(hostname)"
fi
fi
fi
fi
debug "Native (RPC) address=$CONN_ADDR, Listen address=$NODE_ADDR"
if [ -z "$CONN_ADDR" ]; then
CONN_ADDR="$NODE_ADDR"
fi
if [ -z "$CONN_ADDR" ]; then
echo "Can't detect node's address..."
exit 1
fi
TSTR="$(grep -e '^native_transport_port: ' "$CONF_DIR/cassandra.yaml" |sed -e 's|^[^:]*:[ ]*\([^ ]*\)$|\1|'|tr -d "'")"
if [ -n "$TSTR" ]; then
CONN_PORT="$TSTR"
fi
debug "NODE_ADDR=$NODE_ADDR CONN_ADDR=$CONN_ADDR CONN_PORT=$CONN_PORT"
}
function set_paths {
# tmp and output paths
if [ -d "$OUTPUT_DIR" ]; then
TMP_DIR="$OUTPUT_DIR/diag.$$"
else
TMP_DIR="/var/tmp/diag.$$"
fi
mkdir -p $TMP_DIR
# log paths
if [ -z "$LOG_DIR" ] && [ -n "$IS_PACKAGE" ]; then
LOG_DIR=/var/log/cassandra
elif [ -z "$LOG_DIR" ] && [ -n "$IS_TARBALL" ]; then
LOG_DIR=$ROOT_DIR/logs
fi
# config paths
if [ -z "$CONF_DIR" ]; then
if [ -n "$IS_TARBALL" ] && [ -n "$IS_COSS" ]; then
CONF_DIR="$ROOT_DIR/conf"
elif [ -n "$IS_TARBALL" ] && [ -n "$IS_DSE" ]; then
CONF_DIR="$ROOT_DIR/resources/cassandra/conf"
DSE_CONF_DIR="$ROOT_DIR/resources/dse/conf"
# DSE package
elif [ -n "$IS_PACKAGE" ] && [ -n "$IS_DSE" ]; then
CONF_DIR=/etc/dse/cassandra
DSE_CONF_DIR=/etc/dse/
# COSS package
elif [ -n "$IS_PACKAGE" ] && [ -n "$IS_COSS" ]; then
CONF_DIR=/etc/cassandra
fi
fi
# binary paths
if [ -z "$BIN_DIR" ]; then
if [ -n "$IS_TARBALL" ]; then
BIN_DIR="$ROOT_DIR/bin"
elif [ -n "$IS_PACKAGE" ]; then
BIN_DIR=/opt/cassandra/bin
fi
fi
debug "CONF_DIR=${CONF_DIR}"
debug "DSE_CONF_DIR=${DSE_CONF_DIR}"
debug "BIN_DIR=${BIN_DIR}"
debug "LOG_DIR=${LOG_DIR}"
debug "TMP_DIR=${TMP_DIR}"
[[ -d "$CONF_DIR" ]] || { echo "Missing CONF_DIR"; exit 1; }
[[ -z "${DSE_CONF_DIR}" || -d "$DSE_CONF_DIR" ]] || { echo "Missing DSE_CONF_DIR"; exit 1; }
[[ -d "$BIN_DIR" ]] || { echo "Missing BIN_DIR"; exit 1; }
[[ -d "$TMP_DIR" ]] || { echo "Missing TMP_DIR"; exit 1; }
}
function detect_install {
# DDAC Install
if [ "$TYPE" == "ddac" ]; then
if [ -d "$ROOT_DIR" ] && [ -d "$ROOT_DIR/conf" ]; then
IS_TARBALL="true"
IS_COSS="true" # structure of DDAC is the same as OSS
else
echo "DDAC install: no tarball directory found, or was specified."
usage
exit 1
fi
# COSS Install
elif [ "$TYPE" == "coss" ]; then
IS_COSS="true"
# COSS package install
if [ -z "$ROOT_DIR" ] && [ -d "/etc/cassandra" ]; then
IS_PACKAGE="true"
ROOT_DIR="/etc/cassandra"
debug "COSS install: package directories successfully found. Proceeding..."
# COSS tarball install
elif [ -d "$ROOT_DIR" ] && [ -d "$ROOT_DIR/conf" ]; then
IS_TARBALL="true"
debug "COSS install: tarball directories successfully found. Proceeding..."
else
echo "COSS install: no package or tarball directories found, or no tarball directory specified."
usage
exit 1
fi
# DSE install
elif [ "$TYPE" == "dse" ]; then
IS_DSE="true"
# DSE package install
debug "DSE install: Checking install type..."
if [ -z "$ROOT_DIR" ] && [ -d "/etc/dse" ] && [ -f "/etc/default/dse" ] && [ -d "/usr/share/dse/" ]; then
IS_PACKAGE="true"
ROOT_DIR="/etc/dse"
debug "DSE install: package directories successfully found. Proceeding..."
# DSE tarball install
elif [ -d "$ROOT_DIR" ] && [ -d "$ROOT_DIR/resources/cassandra/conf" ] && [ -d "$ROOT_DIR/resources/dse/conf" ]; then
IS_TARBALL="true"
debug "DSE install: tarball directories successfully found. Proceeding..."
else
echo "DSE install: no package or tarball directories found, or no tarball directory specified."
usage
exit 1
fi
else
# No install type selected
usage
exit 1
fi
# Select user (defaults to current user for tarball, "cassandra" for package
if [ -z "$CASS_USER" ]; then
if [ -n "$IS_PACKAGE" ]; then
CASS_USER="cassandra"
else
CASS_USER=$USER
fi
fi
}
function get_pid {
if [ -z "$PID" ]; then
if [ -n "$IS_COSS" ] ; then
PID="$(ps -aef|grep org.apache.cassandra.service.CassandraDaemon|grep java|sed -e 's|^[ ]*[^ ]*[ ]*\([^ ]*\)[ ].*|\1|')"
else
PID="$(ps -aef|grep com.datastax.bdp.DseModule|grep java|sed -e 's|^[ ]*[^ ]*[ ]*\([^ ]*\)[ ].*|\1|')"
fi
fi
if [ -n "$PID" ]; then
if [ -n "$IS_DSE" ]; then
ps -aef|grep "$PID"|grep com.datastax.bdp.DseModule > "$DATA_DIR/java_cmdline"
else
ps -aef|grep "$PID"|grep CassandraDaemon > "$DATA_DIR/java_cmdline"
fi
fi
}
# try to detect if we're running in the cloud, and then collect more cloud-specific information
function collect_cloud_info() {
CLOUD="none"
if [ -f /sys/hypervisor/uuid ]; then
if [[ "$(cat /sys/hypervisor/uuid)" =~ ec2.* ]]; then
CLOUD="AWS"
fi
fi
if [ "$CLOUD" = "none" ] && [ -n "$(command -v dmidecode)" ] && [ -z "$NOSUDO" ]; then
BIOS_INFO="$(sudo dmidecode -s bios-version)"
if [[ "$BIOS_INFO" =~ .*amazon.* ]]; then
CLOUD="AWS"
elif [[ "$BIOS_INFO" =~ Google.* ]]; then
CLOUD="GCE"
elif [[ "$BIOS_INFO" =~ .*OVM.* ]]; then
CLOUD="Oracle"
fi
fi
if [ "$CLOUD" = "none" ] && [ -r /sys/devices/virtual/dmi/id/product_uuid ]; then
if [ "$(head -c 3 /sys/devices/virtual/dmi/id/product_uuid)" == "EC2" ]; then
CLOUD="AWS"
fi
fi
AZ_API_VERSION="2019-11-01"
if [ "$CLOUD" = "none" ]; then
if curl -s -m 2 http://169.254.169.254/latest/dynamic/instance-identity/document 2>&1 |grep '"availabilityZone"' > /dev/null ; then
CLOUD="AWS"
elif curl -s -m 2 -H Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/zone > /dev/null 2>&1 ; then
CLOUD="GCE"
elif curl -s -m 2 -H Metadata:true "http://169.254.169.254/metadata/instance?api-version=$AZ_API_VERSION" 2>&1 |grep "azEnvironment" > /dev/null ; then
CLOUD="Azure"
fi
fi
debug "detected cloud provider: $CLOUD"
echo "cloud provider: $CLOUD" > "$DATA_DIR/os-metrics/cloud_info"
if [ "$CLOUD" = "AWS" ]; then
curl -s http://169.254.169.254/latest/dynamic/instance-identity/document > "$DATA_DIR/os-metrics/cloud_aws"
{
echo "instance type: $(curl -s http://169.254.169.254/latest/meta-data/instance-type)"
echo "availability zone: $(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone)"
echo "public hostname: $(curl -s http://169.254.169.254/latest/meta-data/public-hostname)"
echo "public IP: $(curl -s http://169.254.169.254/latest/meta-data/public-ipv4)"
echo "private hostname: $(curl -s http://169.254.169.254/latest/meta-data/hostname)"
echo "private IP: $(curl -s http://169.254.169.254/latest/meta-data/local-ipv4)"
} >> "$DATA_DIR/os-metrics/cloud_info"
fi
if [ "$CLOUD" = "GCE" ]; then
{
echo "instance type: $(curl -s -H Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/machine-type|sed -e 's|^.*/\([^/]*\)$|\1|')"
echo "availability zone: $(curl -s -H Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/zone|sed -e 's|^.*/\([^/]*\)$|\1|')"
# echo "public hostname: "
echo "public IP: $(curl -s -H Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/access-configs/0/external-ip)"
echo "private hostname: $(curl -s -H Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/hostname)"
echo "private IP: $(curl -s -H Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip)"
} >> "$DATA_DIR/os-metrics/cloud_info"
fi
if [ "$CLOUD" = "Azure" ]; then
FNAME=$DATA_DIR/os-metrics/cloud_azure
curl -s -H Metadata:true "http://169.254.169.254/metadata/instance?api-version=$AZ_API_VERSION" > $FNAME
{
echo "instance type: $(sed -e 's|^.*"vmSize":"\([^"]*\)".*$|\1|' $FNAME)"
echo "availability zone: $(sed -e 's|^.*"zone":"\([^"]*\)".*$|\1|' $FNAME)"
echo "public IP: $(sed -e 's|^.*"publicIpAddress":"\([^"]*\)".*$|\1|' $FNAME)"
echo "private IP: $(sed -e 's|^.*"privateIpAddress":"\([^"]*\)".*$|\1|' $FNAME)"
} >> "$DATA_DIR/os-metrics/cloud_info"
fi
}
# Collects OS info
function collect_system_info() {
debug "Collecting OS level info..."
if [ "$HOST_OS" = "Linux" ]; then
if [ -f /etc/lsb-release ]; then
cp /etc/lsb-release "$DATA_DIR/os-metrics/"
fi
if [ -f /etc/os-release ]; then
cp /etc/os-release "$DATA_DIR/"
fi
if [ -f /etc/redhat-release ]; then
cp /etc/redhat-release "$DATA_DIR/"
fi
if [ -f /etc/debian_version ]; then
cp /etc/debian_version "$DATA_DIR/"
fi
if [ -n "$PID" ]; then
cat "/proc/$PID/limits" > "$DATA_DIR/process_limits" 2>&1
fi
cat /sys/kernel/mm/transparent_hugepage/enabled > "$DATA_DIR/os-metrics/hugepage_enabled" 2>&1
cat /sys/kernel/mm/transparent_hugepage/defrag > "$DATA_DIR/os-metrics/hugepage_defrag" 2>&1
if [ -n "$(command -v blockdev)" ]; then
if [ -z "$NOSUDO" ]; then
sudo blockdev --report 2>&1 |tee > "$DATA_DIR/os-metrics/blockdev_report"
fi
else
echo "Please install 'blockdev' to collect data about devices"
fi
if [ -n "$(command -v dmidecode)" ] && [ -z "$NOSUDO" ]; then
sudo dmidecode |tee > "$DATA_DIR/os-metrics/dmidecode"
fi
free > "$DATA_DIR/os-metrics/free" 2>&1
if [ -n "$(command -v iostat)" ]; then
if [ "$MODE" != "light" ]; then
iostat -ymxt 1 "$IOSTAT_LEN" > "$DATA_DIR/os-metrics/iostat" 2>&1
fi
else
echo "Please install 'iostat' to collect data about I/O activity"
fi
if [ -n "$(command -v vmstat)" ]; then
vmstat -w -t -s > "$DATA_DIR/os-metrics/wmstat-stat" 2>&1
if [ "$MODE" != "light" ]; then
vmstat -w -t -a 1 "$IOSTAT_LEN" > "$DATA_DIR/os-metrics/wmstat-mem" 2>&1
vmstat -w -t -d 1 "$IOSTAT_LEN" > "$DATA_DIR/os-metrics/wmstat-disk" 2>&1
fi
else
echo "Please install 'vmstat' to collect data about Linux"
fi
if [ -n "$(command -v lscpu)" ]; then
lscpu > "$DATA_DIR/os-metrics/lscpu" 2>&1
fi
ps auxww > "$DATA_DIR/os-metrics/ps-aux.txt" 2>&1
cat /proc/cpuinfo > "$DATA_DIR/os-metrics/cpuinfo" 2>&1
cat /proc/meminfo > "$DATA_DIR/os-metrics/meminfo" 2>&1
cat /proc/interrupts > "$DATA_DIR/os-metrics/interrupts" 2>&1
cat /proc/version > "$DATA_DIR/os-metrics/version_proc" 2>&1
if [ -n "$(command -v numactl)" ]; then
numactl -show > "$DATA_DIR/os-metrics/numactl" 2>&1
numactl --hardware > "$DATA_DIR/os-metrics/numactl_hardware" 2>&1
else
echo "Please install 'numactl' to collect data about NUMA subsystem"
fi
# collect information about CPU frequency, etc.
if [ -d /sys/devices/system/cpu/cpu0/cpufreq/ ]; then
mkdir -p "$DATA_DIR/os-metrics/cpus/"
for i in /sys/devices/system/cpu/cpu[0-9]*; do
CPUN="$(basename "$i")"
for file in $i/cpufreq/*; do
echo "$(basename "$file"): $(cat "$file" 2>/dev/null)" >> "$DATA_DIR/os-metrics/cpus/$CPUN"
done
done
fi
if [ -f /proc/sys/vm/zone_reclaim_mode ]; then
cat /proc/sys/vm/zone_reclaim_mode > "$DATA_DIR/os-metrics/zone_reclaim_mode"
fi
if [ -f /etc/fstab ]; then
cp /etc/fstab "$DATA_DIR/os-metrics/fstab"
fi
if [ -f /etc/security/limits.conf ]; then
cp /etc/security/limits.conf "$DATA_DIR/os-metrics/limits.conf"
fi
if [ -d /etc/security/limits.d/ ]; then
mkdir -p "$DATA_DIR/os-metrics/limits.d/"
cp -r /etc/security/limits.d/* "$DATA_DIR/os-metrics/limits.d/"
fi
if [ -n "$(command -v lsblk)" ]; then
lsblk > "$DATA_DIR/os-metrics/lsblk" 2>&1
lsblk -oname,kname,fstype,mountpoint,label,ra,model,size,rota > "$DATA_DIR/os-metrics/lsblk_custom" 2>&1
fi
if [ -n "$(command -v sar)" ]; then
sar -B > "$DATA_DIR/os-metrics/sar" 2>&1
fi
if [ -n "$(command -v lspci)" ]; then
lspci> "$DATA_DIR/os-metrics/lspci" 2>&1
fi
if [ -n "$(command -v ss)" ]; then
ss -at > "$DATA_DIR/os-metrics/ss" 2>&1
fi
uptime > "$DATA_DIR/os-metrics/uptime" 2>&1
if [ -n "$(command -v pvdisplay)" ] && [ -z "$NOSUDO" ]; then
sudo pvdisplay 2>&1|tee > "$DATA_DIR/os-metrics/pvdisplay"
fi
if [ -n "$(command -v vgdisplay)" ] && [ -z "$NOSUDO" ]; then
sudo vgdisplay 2>&1|tee > "$DATA_DIR/os-metrics/vgdisplay"
fi
if [ -n "$(command -v lvdisplay)" ] && [ -z "$NOSUDO" ]; then
sudo lvdisplay -a 2>&1|tee > "$DATA_DIR/os-metrics/lvdisplay"
fi
if [ -n "$(command -v lvs)" ] && [ -z "$NOSUDO" ]; then
sudo lvs -a 2>&1|tee > "$DATA_DIR/os-metrics/lvs"
fi
for i in /sys/block/*; do
DSK="$(basename "$i")"
if [[ "$DSK" =~ loop* ]]; then
continue
fi
mkdir -p "$DATA_DIR/os-metrics/disks/"
if [ -n "$(command -v smartctl)" ] && [ -b "/dev/$DSK" ] && [ -z "$NOSUDO" ]; then
sudo smartctl -H -i "$DM" 2>&1|tee > "$DATA_DIR/os-metrics/disks/smartctl-$DSK"
fi
for file in $i/queue/*; do
if [ -f "$file" ]; then
echo "$(basename "$file"): $(cat "$file" 2>/dev/null)" >> "$DATA_DIR/os-metrics/disks/$DSK"
fi
done
done
if [ -d /sys/devices/system/clocksource/clocksource0/ ]; then
echo "available: $(cat /sys/devices/system/clocksource/clocksource0/available_clocksource)" > "$DATA_DIR/os-metrics/clocksource"
echo "current: $(cat /sys/devices/system/clocksource/clocksource0/current_clocksource)" >> "$DATA_DIR/os-metrics/clocksource"
fi
if [ -n "$(command -v dmesg)" ]; then
dmesg -T > "$DATA_DIR/os-metrics/dmesg"
fi
if [ -n "$(command -v ifconfig)" ]; then
ifconfig > "$DATA_DIR/os-metrics/ifconfig"
if [ -n "$(command -v ethtool)" ]; then
mkdir -p "$DATA_DIR/os-metrics/ethtool/"
for i in $(ifconfig |grep -e '^[a-z]'|cut -f 1 -d ' '); do
ethtool -i "$i" > "$DATA_DIR/os-metrics/ethtool/$i" 2>&1
done
fi
fi
if [ -n "$(command -v netstat)" ]; then
if [ -z "$NOSUDO" ]; then
sudo netstat -laputen 2>&1|tee > "$DATA_DIR/os-metrics/netstat"
fi
else
echo "Please install 'netstat' to collect data about network connections"
fi
if [ -n "$(command -v netstat)" ]; then
netstat --statistics > "$DATA_DIR/os-metrics/netstat-stats" 2>&1
fi
fi
df -k > "$DATA_DIR/os-metrics/df" 2>&1
sysctl -a > "$DATA_DIR/os-metrics/sysctl" 2>&1
# Collect uname info (for Linux)
debug "Collecting uname info..."
if [ "$HOST_OS" = "Linux" ]; then
{
echo "kernel_name: $(uname -s)"
echo "node_name: $(uname -n)"
echo "kernel_release: $(uname -r)"
echo "kernel_version: $(uname -v)"
echo "machine_type: $(uname -m)"
echo "processor_type: $(uname -p)"
echo "platform_type: $(uname -i)"
echo "os_type: $(uname -o)"
} > "$DATA_DIR/os-info.txt" 2>&1
# Collect uname info (for MacOS)
elif [ "$HOST_OS" = "Darwin" ]; then
{
echo "hardware_name: $(uname -m)"
echo "node_name: $(uname -n)"
echo "processor_type: $(uname -p)"
echo "os_release: $(uname -r)"
echo "os_version: $(uname -v)"
echo "os_name: $(uname -s)"
} > "$DATA_DIR/os-info.txt" 2>&1
else
echo "os type $HOST_OS not catered for or detected" > "$DATA_DIR/os-info.txt" 2>&1
fi
# Collect NTP info (for Linux)
debug "Collecting ntp info..."
if [ "$HOST_OS" = "Linux" ]; then
if [ -n "$(command -v ntptime)" ]; then
ntptime > "$DATA_DIR/ntp/ntptime" 2>&1
fi
if [ -n "$(command -v ntpstat)" ]; then
ntpstat > "$DATA_DIR/ntp/ntpstat" 2>&1
fi
if [ -n "$(command -v ntpq)" ]; then
ntpq -p > "$DATA_DIR/os-metrics/ntpq_p" 2>&1
fi
fi
# Collect Chrony info (for Linux)
debug "Collecting Chrony info..."
if [ "$HOST_OS" = "Linux" ]; then
if [ -n "$(command -v chronyc)" ]; then
mkdir -p "$DATA_DIR"/os-metrics/chrony
chronyc tracking > "$DATA_DIR/os-metrics/chrony/tracking" 2>&1
chronyc sources -v > "$DATA_DIR/os-metrics/chrony/sources" 2>&1
chronyc sourcestats -v > "$DATA_DIR/os-metrics/chrony/sourcestats" 2>&1
fi
fi
# Collect TOP info (for Linux)
debug "Collecting top info..."
if [ "$HOST_OS" = "Linux" ]; then
top -n1 -b | \
grep "Cpu" | \
cut -d\: -f2 | \
awk '{
user=$1;
systm=$3;
nice=$5;
idle=$7;
iowait=$9;
steal=$15
}
END {
print "user: "user "\nnice: "nice "\nsystem: "systm "\niowait: "iowait "\nsteal: "steal "\nidle: "idle
}' > "$DATA_DIR/os-metrics/cpu.txt" 2>&1
fi
# Collect FREE info (for Linux)
debug "Collecting free info..."
if [ "$HOST_OS" = "Linux" ]; then
free -m | \
grep -E "Mem|Swap" | \
awk '{
type=$1;
total=$2;
used=$3;
free=$4;
shared=$5;
buffcache=$6;
avail=$7;
if (type=="Mem:"){
print "mem total: "total"\nmem used: "used"\nmem free: "free"\nmem shared: "shared"\nmem buff/cache: "buffcache"\nmem available: "avail
} else {
print "swap total: "total"\nswap used: "used"\nswap free: "free}}' > "$DATA_DIR/os-metrics/memory.txt" 2>&1
fi
# Collect JVM system info (for Linux)
debug "Collecting jvm system info..."
java -version > "$DATA_DIR/java_version.txt" 2>&1
if [ -n "$PID" ] && [ "$HOST_OS" = "Linux" ] && [ -n "$JAVA_HOME" ] && [ "$MODE" != "light" ]; then
# TODO: think how to do it without sudo?
if [ -n "$IS_PACKAGE" ]; then
sudo -u "$CASS_USER" "$JCMD" "$PID" VM.system_properties 2>&1| tee > "$DATA_DIR/java_system_properties.txt"
sudo -u "$CASS_USER" "$JCMD" "$PID" VM.command_line 2>&1 |tee > "$DATA_DIR/java_command_line.txt"
else
"$JCMD" "$PID" VM.system_properties > "$DATA_DIR/java_system_properties.txt" 2>&1
"$JCMD" "$PID" VM.command_line > "$DATA_DIR/java_command_line.txt" 2>&1
fi
fi
# Collect Data DIR info
debug "Collecting disk info..."
# TODO: rewrite this to be not dependent on OS, plus check both java_command_line.txt & java_cmdline
if [ "$HOST_OS" = "Linux" ]; then
# Try to read the data and commitlog directories from config file.
# The multiple sed statements strip out leading / trailing lines
# and concatenate on the same line where multiple directories are
# configured to allow Nibbler to read it as a csv line
DATA_CONF=$(sed -n '/^data_file_directories:/,/^[^- ]/{//!p;};/^data_file_directories:/d' "$CONF_DIR/cassandra.yaml" | grep -e "^[ ]*-" | sed -e "s/^.*- *//" | tr $'\n' ',' | sed -e "s/.$/\n/")
COMMITLOG_CONF=$(grep -e "^commitlog_directory:" "$CONF_DIR/cassandra.yaml" |sed -e 's|^commitlog_directory:[ ]*\(.*\)[ ]*$|\1|')
# Checks the data and commitlog variables are set. If not then
# read the JVM variable cassandra.storagedir and append paths as
# necessary.
if [ -n "$DATA_CONF" ]; then
echo "data: $DATA_CONF" > "$DATA_DIR/os-metrics/disk_config.txt" 2>&1
elif [ -f "$DATA_DIR/java_command_line.txt" ]; then
DATA_CONF=$(tr " " "\n" < "$DATA_DIR/java_command_line.txt" | grep "cassandra.storagedir" | awk -F "=" '{print $2"/data"}')
echo "data: $DATA_CONF" > "$DATA_DIR/os-metrics/disk_config.txt" 2>&1
fi
if [ -n "$COMMITLOG_CONF" ]; then
echo "commitlog: $COMMITLOG_CONF" >> "$DATA_DIR/os-metrics/disk_config.txt" 2>&1
elif [ -f "$DATA_DIR/java_command_line.txt" ]; then
COMMITLOG_CONF=$(tr " " "\n" < "$DATA_DIR/java_command_line.txt" | grep "cassandra.storagedir" | awk -F "=" '{print $2"/commitlog"}')
echo "commitlog: $COMMITLOG_CONF" >> "$DATA_DIR/os-metrics/disk_config.txt" 2>&1
fi
# Since the data dir might have multiple items we need to check
# each one using df to verify the physical device
#for DEVICE in $(cat "$CONF_DIR/cassandra.yaml" | sed -n "/^data_file_directories:/,/^$/p" | grep -E "^.*-" | awk '{print $2}')
for DEVICE in $(echo "$DATA_CONF" | awk '{gsub(/,/,"\n");print}')
do
DM="$(df -h "$DEVICE" | grep -v "Filesystem" | awk '{print $1}')"
if [ -z "$DATA_MOUNT" ]; then
DATA_MOUNT="$DM"
else
DATA_MOUNT="$DATA_MOUNT,$DM"
fi
done
COMMITLOG_MOUNT=$(df -h "$COMMITLOG_CONF" | grep -v "Filesystem" | awk '{print $1}')
echo "data: $DATA_MOUNT" > "$DATA_DIR/os-metrics/disk_device.txt" 2>&1
echo "commitlog: $COMMITLOG_MOUNT" >> "$DATA_DIR/os-metrics/disk_device.txt" 2>&1
fi
}
# Collects data from nodes
function collect_data {
echo "Collecting data from node $NODE_ADDR..."
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/cqlsh $CQLSH_OPTS -e 'describe cluster;' "$CONN_ADDR" "$CONN_PORT" > /dev/null 2>&1
RES=$?
if [ "$RES" -ne 0 ]; then
echo "Can't execute cqlsh command, exit code: $RES. If you're have cluster with authentication,"
echo "please pass the option -c with user name/password and other options, like:"
echo "-c '-u username -p password'"
echo "If you have SSL enabled for client connections, pass --ssl in -c"
exit 1
fi
for i in cassandra-rackdc.properties cassandra.yaml cassandra-env.sh jvm.options logback-tools.xml logback.xml jvm-clients.options jvm-server.options jvm11-clients.options jvm11-server.options jvm8-clients.options jvm8-server.options; do
if [ -f "$CONF_DIR/$i" ] ; then
cp $CONF_DIR/$i "$DATA_DIR/conf/cassandra/"
fi
done
# collecting nodetool information
debug "Collecting nodetool output..."
for i in cfstats compactionhistory compactionstats describecluster getcompactionthroughput getstreamthroughput gossipinfo info netstats proxyhistograms ring status statusbinary tpstats version cfhistograms; do
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/nodetool $NT_OPTS $i > "$DATA_DIR/nodetool/$i" 2>&1
done
if [ "$MODE" = "extended" ]; then
for i in tablestats tpstats ; do
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/nodetool $NT_OPTS -F json $i > "$DATA_DIR/nodetool/$i.json" 2>&1
done
fi
# collecting schema
debug "Collecting schema info..."
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/cqlsh $CQLSH_OPTS -e 'describe cluster;' "$CONN_ADDR" "$CONN_PORT" > "$DATA_DIR/driver/metadata" 2>&1
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/cqlsh $CQLSH_OPTS -e 'describe schema;' "$CONN_ADDR" "$CONN_PORT" > "$DATA_DIR/driver/schema" 2>&1
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/cqlsh $CQLSH_OPTS -e 'describe full schema;' "$CONN_ADDR" "$CONN_PORT" > "$DATA_DIR/driver/full-schema" 2>&1
# collecting process-related info
collect_system_info
# collection of cloud-related information
collect_cloud_info
# collect logs
# auto-detect log directory
if [ -f "$DATA_DIR/java_cmdline" ]; then
TLDIR="$(sed -e 's|^.*-Dcassandra.logdir=\([^ ]*\) .*$|\1|' < "$DATA_DIR/java_cmdline")"
if [ -n "$TLDIR" ] && [ -d "$TLDIR" ]; then
CASS_DSE_LOG_DIR=$TLDIR
fi
fi
# if not set, then default
if [ -z "$CASS_DSE_LOG_DIR" ]; then
CASS_DSE_LOG_DIR="$LOG_DIR"
fi
find "$CASS_DSE_LOG_DIR" -maxdepth 1 -name \*\.log -a -type f -exec cp {} "$DATA_DIR/logs/cassandra/" \;
if [ "$MODE" = "extended" ]; then
find "$CASS_DSE_LOG_DIR" -maxdepth 1 -name \*\.log\.\* -a -type f -exec cp {} "$DATA_DIR/logs/cassandra/" \;
fi
if [ -f "$DATA_DIR/java_cmdline" ]; then
GC_LOG="$(sed -e 's|^.* -Xloggc:\([^ ]*\) .*$|\1|' < "$DATA_DIR/java_cmdline")"
if [ -n "$GC_LOG" ] && [ -f "$GC_LOG" ]; then
cp "$GC_LOG" "$DATA_DIR/logs/cassandra/"
fi
fi
# Collect metrics from JMX for OSS C* and DDAC
if [ -n "$IS_COSS" ] ; then
if [ "$MODE" != "light" ]; then
$MAYBE_RUN_WITH_TIMEOUT java -jar ~/sjk-plus.jar mxdump $JMX_OPTS > "$DATA_DIR/jmx_dump.json" 2>&1
fi
fi
# The rest of DSE-specific things
if [ -n "$IS_DSE" ]; then
if [ -f "$DSE_CONF_DIR/dse.yaml" ]; then
cp "$DSE_CONF_DIR/dse.yaml" "$DATA_DIR/conf/dse/"
fi
if [ -f /etc/default/dse ]; then
cp /etc/default/dse "$DATA_DIR/conf/dse/"
fi
# TODO: decide, if we need to collect Tomcat logs
# if [ -f "$DATA_DIR/java_cmdline" ]; then
# # TOMCAT_DIR="`cat "$DATA_DIR/java_cmdline"|sed -e 's|^.*-Dtomcat.logs=\([^ ]*\) .*$|\1|'`"
# # if [ -n "$TOMCAT_DIR" -a -d "$TOMCAT_DIR" ]; then
# #
# # fi
# fi
if [ -f "$CASS_DSE_LOG_DIR/audit/dropped-events.log" ]; then
mkdir -p "$DATA_DIR/logs/cassandra/audit"
cp "$CASS_DSE_LOG_DIR/audit/dropped-events.log" "$DATA_DIR/logs/cassandra/audit"
fi
# Versions to determine if nodesync available
DSE_VERSION="$($BIN_DIR/dse -v)"
DSE_MAJOR_VERSION="$(echo $DSE_VERSION|sed -e 's|^\([0-9]\)\..*$|\1|')"
debug "Collecting DSE information..."
if [ "$MODE" != "light" ]; then
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/nodetool $NT_OPTS sjk mxdump > "$DATA_DIR/jmx_dump.json" 2>&1
fi
for i in status ring ; do
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS $i > "$DATA_DIR/dsetool/$i" 2>&1
done
if [ "$MODE" != "light" ]; then
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS insights_config --show_config > "$DATA_DIR/dsetool/insights_config" 2>&1
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS insights_filters --show_filters > "$DATA_DIR/dsetool/insights_filters" 2>&1
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS perf cqlslowlog recent_slowest_queries > "$DATA_DIR/dsetool/slowest_queries" 2>&1
# collect nodesync rate
if [ "$DSE_MAJOR_VERSION" -gt "5" ]; then
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/nodetool $NT_OPTS nodesyncservice getrate > "$DATA_DIR/nodetool/nodesyncrate" 2>&1
fi
fi
# collect DSE Search data
debug "Collecting DSE Search information..."
for core in $(grep -e 'CREATE CUSTOM INDEX.*Cql3SolrSecondaryIndex' "$DATA_DIR/driver/schema" 2>/dev/null |sed -e 's|^.* ON \([^ ]*\) (.*).*$|\1|'|tr -d '"' | uniq); do
debug "collecting data for DSE Search core $core"
mkdir -p "$DATA_DIR/solr/$core/"
# it's faster to execute cqlsh than dsetool, but it's internal info
$BIN_DIR/cqlsh $CQLSH_OPTS -e "select blobAsText(resource_value) from solr_admin.solr_resources where core_name = '$core' and resource_name ='solrconfig.xml.bak' ;" "$CONN_ADDR" "$CONN_PORT"|grep '<?xml version='|sed -e 's|^ *\(<?xml version=.*\)$|\1|'|sed -e "s|\\\n|\n|g" > "$DATA_DIR/solr/$core/solrconfig.xml" 2>&1
$BIN_DIR/cqlsh $CQLSH_OPTS -e "select blobAsText(resource_value) from solr_admin.solr_resources where core_name = '$core' and resource_name ='schema.xml.bak' ;" "$CONN_ADDR" "$CONN_PORT"|grep '<?xml version='|sed -e 's|^ *\(<?xml version=.*\)$|\1|'|sed -e "s|\\\n|\n|g" > "$DATA_DIR/solr/$core/schema.xml" 2>&1
if [ "$MODE" != "light" ]; then
#$BIN_DIR/dsetool $DT_OPTS get_core_config "$core" > "$DATA_DIR/solr/$core/config.xml" 2>&1
#$BIN_DIR/dsetool $DT_OPTS get_core_schema "$core" > "$DATA_DIR/solr/$core/schema.xml" 2>&1
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS list_core_properties "$core" > "$DATA_DIR/solr/$core/properties" 2>&1
fi
if [ "$MODE" = "extended" ]; then
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS core_indexing_status "$core" > "$DATA_DIR/solr/$core/status" 2>&1
$MAYBE_RUN_WITH_TIMEOUT $BIN_DIR/dsetool $DT_OPTS list_index_files "$core" > "$DATA_DIR/solr/$core/index_files" 2>&1
fi
done
if [ -d "$DATA_DIR/solr/" ]; then
SOLR_DATA_DIR=$(grep -E '^solr_data_dir: ' "$DSE_CONF_DIR/dse.yaml" 2>&1|sed -e 's|^solr_data_dir:[ ]*\(.*\)$|\1|')
# if it's not specified explicitly
if [ -z "$SOLR_DATA_DIR" ] && [ -n "$DATA_CONF" ]; then
debug "No Solr directory is specified in dse.yaml, detecting from DATA_CONF: $DATA_CONF"
SOLR_DATA_DIR="$(echo "$DATA_CONF"|sed -e 's|^\([^,]*\)\(,.*\)?$|\1|')/solr.data"
debug "SOLR_DATA_DIR is defined as: $SOLR_DATA_DIR"
fi
if [ -n "$SOLR_DATA_DIR" ] && [ -d "$SOLR_DATA_DIR" ]; then
cd "$SOLR_DATA_DIR" && du -s -- * 2>&1 > "$DATA_DIR/solr/cores-sizes.txt"
fi
fi
elif [ -n "$IS_COSS" ]; then
if [ -f /etc/default/cassandra ]; then
cp /etc/default/cassandra "$DATA_DIR/conf/cassandra/default"
fi
fi
}
function collect_insights {
echo "Collecting insights data"
if [ -z "$INSIGHTS_DIR" ]; then
if [ -n "$IS_DSE" ]; then
INSIGHTS_DIR="$DEFAULT_INSIGHTS_DIR"
# TODO: naive attempt to parse options - need to do better
while read line; do
name=$line
case $name in
*"$INSIGHTS_OPTIONS"*|*"$INSIGHTS_DATA_DIR"*)
if [[ $name != \#* ]];
then
awk '{i=1;next};i && i++ <= 3' $DSE_CONF_DIR/dse.yaml
if [[ $name == data_dir* ]]; then
INS_DIR="$(echo $name |grep -i 'data_dir:' |sed -e 's|data_dir:[ ]*\([^ ]*\)$|\1|')"
if [ -n "$INS_DIR" ] && [ -d "$INS_DIR" ] && [ -d "$INS_DIR/insights" ]; then
INSIGHTS_DIR="$INS_DIR/insights "
fi
break
fi
fi
esac
done < "$DSE_CONF_DIR/dse.yaml"
elif [ -n "$IS_COSS" ]; then
INSIGHTS_DIR="$DEFAULT_MCAC_DIR"
MCAC_HOME=""
if [ -f "$DATA_DIR/java_cmdline" ]; then
MCAC_HOME=$(grep -E -- '-javaagent:[^ ]*/lib/datastax-mcac-agent[^ /]*.jar' "$DATA_DIR/java_cmdline"|sed -e 's|^.*-javaagent:\([^ ]*\)/lib/datastax-mcac-agent[^ /]*.jar.*$|\1|')
fi
if [ -z "$MCAC_HOME" ] && [ -f "$CONF_DIR/jvm.options" ]; then
MCAC_HOME=$(grep -v -h -E '^#' "$CONF_DIR/jvm.options" | grep -E -- '-javaagent:[^ ]*/datastax-mcac-agent[^ /]*.jar'|sed -e 's|^.*-javaagent:\([^ ]*\)/lib/datastax-mcac-agent[^ /]*.jar.*$|\1|')
fi
if [ -z "$MCAC_HOME" ] && [ -f "$CONF_DIR/cassandra-env.sh" ]; then
MCAC_HOME=$(grep -v -h -E '^[ ]*#' "$CONF_DIR/cassandra-env.sh" | grep -E -- '-javaagent:[^ ]*/datastax-mcac-agent[^ /]*.jar'|sed -e 's|^.*-javaagent:\([^ ]*\)/lib/datastax-mcac-agent[^ /]*.jar.*$|\1|')
fi
if [ -n "$MCAC_HOME" ] && [ -d "$MCAC_HOME" ]; then
if [ -f "$MCAC_HOME/config/metric-collector.yaml" ]; then
CASS_DATA_DIR=$(grep -e '^data_dir:' $MCAC_HOME/config/metric-collector.yaml|sed -e 's|^data_dir:[ ]*\(.*\)$|\1|')
if [ -n "$CASS_DATA_DIR" ]; then
INSIGHTS_DIR="$CASS_DATA_DIR/mcac_data/insights"
fi
fi
else
echo "No installation of Metric Collector for Apache Cassandra was detected"
fi
fi
fi
if [ ! -d "$INSIGHTS_DIR" ]; then
echo "Can't find find directory with insights data, or it doesn't exist! $INSIGHTS_DIR"
echo "Please pass directory name via -I option (see help)"
exit 1
fi
if [ -z "$RES_FILE" ]; then
RES_FILE=$OUTPUT_DIR/dse-insights-$NODE_ADDR.tar.gz
fi
DFILES="$(ls -1 $INSIGHTS_DIR/*.gz 2>/dev/null |head -n 20)"
if [ -z "$DFILES" ]; then
echo "No Insights files in the specified directory"
exit 1
fi
NODE_ID="$($BIN_DIR/nodetool $NT_OPTS info|grep -E '^ID'|sed -e 's|^ID.*:[[:space:]]*\([0-9a-fA-F].*\)|\1|')"
# Node could be offline, so nodetool may not work
if [ -n "$NODE_ID" ]; then
NODE_ADDR="$NODE_ID"
fi
DATA_DIR="$TMP_DIR"/"$NODE_ADDR"
mkdir -p "$DATA_DIR"
# we should be careful when copying the data - list of files could be very long...
HAS_RSYNC="$(command -v rsync)"
if [ -n "$HAS_RSYNC" ]; then
rsync -r --include='*.gz' --exclude='*' "$INSIGHTS_DIR/" "$DATA_DIR/"
elif [ "$HOST_OS" = "Linux" ]; then
find "$INSIGHTS_DIR/" -maxdepth 1 -name '*.gz' -print0|xargs -0 cp -t "$DATA_DIR"
else
cp "$INSIGHTS_DIR"/*.gz "$DATA_DIR"
fi
}
function collect_ssl_info {
# Java location is assumed as per L641 but jcmd uses $JAVA_HOME...
debug "Collecting SSL related information"
is_client_ssl_enabled=$(find_yaml_sub_property client_encryption_options enabled)
is_server_ssl_enabled=$(find_yaml_sub_property server_encryption_options internode_encryption)
if [ ! -z $is_client_ssl_enabled ] && [ $is_client_ssl_enabled = true ]; then
debug "collecting keystore and truststore for client_encryption_options"
client_keystore=$(find_yaml_sub_property client_encryption_options keystore)
client_keystore_pass=$(find_yaml_sub_property client_encryption_options keystore_password)
client_truststore=$(find_yaml_sub_property client_encryption_options truststore)
client_truststore_pass=$(find_yaml_sub_property client_encryption_options truststore_password)
if [ ! -z $client_keystore ] && [ ! -z $client_keystore_pass ]; then
keytool -list -v -keystore $client_keystore -storepass $client_keystore_pass > "$DATA_DIR/conf/security/client-keystore.txt" 2>&1
fi
if [ ! -z $client_truststore ] && [ ! -z $client_truststore_pass ]; then
keytool -list -v -keystore $client_truststore -storepass $client_truststore_pass > "$DATA_DIR/conf/security/client-truststore.txt" 2>&1
fi
fi
if [ ! -z $is_server_ssl_enabled ]; then
if [ $is_server_ssl_enabled = "all" ] || [ $is_server_ssl_enabled = "dc" ] || [ $is_server_ssl_enabled = "rack" ]; then
debug "collecting keystore and truststore for server_encryption_options"
server_keystore=$(find_yaml_sub_property server_encryption_options keystore)
server_keystore_pass=$(find_yaml_sub_property server_encryption_options keystore_password)
server_truststore=$(find_yaml_sub_property server_encryption_options truststore)
server_truststore_pass=$(find_yaml_sub_property server_encryption_options truststore_password)
if [ ! -z "$server_keystore" ] && [ ! -z "$server_keystore_pass" ]; then
keytool -list -v -keystore $server_keystore -storepass $server_keystore_pass > "$DATA_DIR/conf/security/server-keystore.txt" 2>&1
fi
if [ ! -z $server_keystore ] && [ ! -z $server_keystore_pass ]; then
keytool -list -v -keystore $server_truststore -storepass $server_truststore_pass > "$DATA_DIR/conf/security/server-truststore.txt" 2>&1
fi
fi
fi
}
# Two arguments:
# 1) yaml property to begin searching
# 2) yaml subproperty to find under 1
# tolower and stripping quotes could be removed from this function in future to make this more general purpose
# Currenly only supports property values with no spaces
function find_yaml_sub_property {
awk_str="awk '/$1:/ {
getline;
while (\$0 ~ /^\s+|^#/) {
if (\$1 ~ /^$2:/) {
print tolower(\$2);
exit;
} else {
getline;
}
}
}' \"$CONF_DIR/cassandra.yaml\"
| tr -d \"\\\"'\""
eval $awk_str
}
function create_directories {
# Common for COSS / DDAC & DSE
mkdir -p "$DATA_DIR"/{logs/cassandra,nodetool,conf/cassandra,driver,os-metrics,ntp}
if [ -n "$IS_DSE" ]; then
mkdir -p "$DATA_DIR"/{logs/tomcat,dsetool,conf/dse}
fi
if [ -n "$COLLECT_SSL" ]; then
mkdir -p "$DATA_DIR"/conf/security
fi
}
function create_archive {
if [ -z "$RES_FILE" ]; then
RES_FILE=$OUTPUT_DIR/diag-$NODE_ADDR.tar.gz
fi
echo "Creating archive file $RES_FILE"
# Creates tar/gzip file without base dir same as node IP
tar -C "$TMP_DIR" -czf "$RES_FILE" "$NODE_ADDR"
}
function cleanup {
debug "Removing temp directory $TMP_DIR"
rm -rf "$TMP_DIR"
}
function adjust_nodetool_params {
local jmx_port=7199
local jmx_host=127.0.0.1
local tmp=""
# Get the JMX user/password from the NT_OPTS and put them in a format that sjk will understand
JMX_OPTS=`echo $NT_OPTS | sed -En "s/-u /--username /p" | sed -En "s/-pw /--password /p"`
if [ -f "$DATA_DIR/java_cmdline" ]; then
tmp=$(grep 'cassandra.jmx.local.port=' "$DATA_DIR/java_cmdline"|sed -e 's|^.*-Dcassandra.jmx.local.port=\([^ ]*\).*$|\1|')
if [ -n "$tmp" ]; then
jmx_port="$tmp"
else
tmp=$(grep 'cassandra.jmx.remote.port=' "$DATA_DIR/java_cmdline"|sed -e 's|^.*-Dcassandra.jmx.remote.port=\([^ ]*\).*$|\1|')
if [ -n "$tmp" ]; then
jmx_port="$tmp"
fi
tmp=$(grep 'java.rmi.server.hostname=' "$DATA_DIR/java_cmdline"|sed -e 's|^.*-Djava.rmi.server.hostname=\([^ ]*\).*$|\1|')
if [ -n "$tmp" ]; then
jmx_host="$tmp"
fi
fi
fi
if [ -n "$(command -v nc)" ]; then
if ! nc -z "$jmx_host" "$jmx_port" ; then
echo "JMX isn't available at $jmx_host:$jmx_port"
fi
fi
if [ "$jmx_port" != "7199" ]; then
NT_OPTS="$NT_OPTS -p $jmx_port"
fi
if [ "$jmx_host" != "127.0.01" ]; then
NT_OPTS="$NT_OPTS -h $jmx_host"
fi
JMX_OPTS="$JMX_OPTS -s $jmx_host:$jmx_port"
}
# Call functions in order
debug "Collection mode: $MODE"
detect_install
set_paths
get_node_ip
DATA_DIR="$TMP_DIR/$NODE_ADDR"
create_directories
get_pid
adjust_nodetool_params
if [ -n "$INSIGHTS_MODE" ]; then
collect_insights
else
collect_data
fi
if [ -n "$COLLECT_SSL" ]; then
collect_ssl_info
fi
create_archive
cleanup
cd "$OLDWD" || exit 1
|
// Initialize Firebase
var config = {
apiKey: "<KEY>",
authDomain: "atomlabs-b8da9.firebaseapp.com",
databaseURL: "https://atomlabs-b8da9.firebaseio.com",
projectId: "atomlabs-b8da9",
storageBucket: "atomlabs-b8da9.appspot.com",
messagingSenderId: "394511887133"
};
firebase.initializeApp(config); |
#!/bin/bash
hbase_nodes="`pwd`/../conf/hbase-nodes"
clients="`pwd`/../conf/clients"
jps_names_hbase="HQuorumPeer HRegionServer HMaster DITBClient DITBServer HybridClient HybridServer"
jps_names_hdfs="SecondaryNameNode DataNode NameNode"
jps_names_client="DITBClient DITBServer PerfClient HybridClient HybridServer"
usr=`whoami`
data_dir="/home/${usr}/data/"
#data_dir="/data/chfeng/data/"
HDP_VERSION=2.5.1
data_dir_hadoop="${data_dir}/hadoop-data/hadoop-${HDP_VERSION}"
data_dir_hadoop_name="${data_dir_hadoop}/name"
data_dir_hadoop_data="${data_dir_hadoop}/data"
data_dir_hadoop_tmp="${data_dir_hadoop}/tmp"
data_dir_hadoop_log="${HADOOP_HOME}/logs"
HBASE_VERSION=1.2.1
data_dir_hbase_tmp="${data_dir}/hbase-data/hbase-${HBASE_VERSION}-tmp"
data_dir_hbase_lcindex="${data_dir}/hbase-data/lcindex"
data_dir_hbase_log="${HBASE_HOME}/logs"
data_dir_zk="${data_dir}/zk-data"
fun_usage(){
echo "use the following cmd:"
echo " c / clean: clean hbase and hdfs filesm, re-format hdfs"
echo " k / kill: kill hbase and hdfs"
echo " conf: copy configurations from current node"
echo " jar: copy client, server and common jars from current node"
echo " jps: jps"
echo " redep: kill all, copy conf and start all"
}
fun_RemoveDirIfExists(){
if [ $# -lt 2 ]; then
echo "need at least two parameter"
else
host=$1
path=$2
echo "remove dir ${path} on ${host}"
ssh ${host} "rm -rf ${path}"
fi
}
fun_ClearHDFS(){
for host in `cat ${hbase_nodes}`; do
fun_RemoveDirIfExists ${host} ${data_dir_hadoop_data}
fun_RemoveDirIfExists ${host} ${data_dir_hadoop_name}
fun_RemoveDirIfExists ${host} ${data_dir_hadoop_tmp}
fun_RemoveDirIfExists ${host} ${data_dir_hadoop_log}
fun_RemoveDirIfExists ${host} /tmp/hadoop-`whoami`*
done
hadoop namenode -format -force
}
fun_ClearHBaseTmp(){
for host in `cat ${hbase_nodes}`; do
fun_RemoveDirIfExists ${host} ${data_dir_hbase_tmp}
fun_RemoveDirIfExists ${host} ${data_dir_hbase_lcindex}
fun_RemoveDirIfExists ${host} ${data_dir_hbase_log}
fun_RemoveDirIfExists ${host} /tmp/hbase-`whoami`*
fun_RemoveDirIfExists ${host} ${data_dir_zk}
done
}
fun_ClearLog(){
for host in `cat ${hbase_nodes}`; do
fun_RemoveDirIfExists ${host} ${data_dir_hadoop_log}
fun_RemoveDirIfExists ${host} ${data_dir_hbase_log}
done
}
fun_KillHBase(){
for host in `cat ${hbase_nodes}`; do
for name in `echo ${jps_names_hbase}`; do
pid=`ssh ${host} "jps | grep ${name} | cut -d ' ' -f 1"`
if [ "${pid}x" != "x" ]; then
echo "kill ${pid} on ${host}"
ssh ${host} "kill -9 ${pid}"
fi
done
done
echo "kill hbase done"
}
fun_KillClients(){
for host in `cat ${clients}`; do
for name in `echo ${jps_names_client}`; do
pid=`ssh ${host} "jps | grep ${name} | cut -d ' ' -f 1"`
while [ "${pid}x" != "x" ]; do
echo "kill ${pid} on ${host}"
ssh ${host} "kill -9 ${pid}"
pid=`ssh ${host} "jps | grep ${name} | cut -d ' ' -f 1"`
done
done
done
echo "kill clients done"
}
fun_KillHDFS(){
for host in `cat ${hbase_nodes}`; do
for name in `echo ${jps_names_hdfs}`; do
pid=`ssh ${host} "jps | grep ${name} | cut -d ' ' -f 1"`
if [ "${pid}x" != "x" ]; then
echo "kill ${pid} on ${host}"
ssh ${host} "kill -9 ${pid}"
fi
done
done
echo "kill hdfs done"
}
# copy conf files
fun_Conf(){
for host in `cat ${hbase_nodes}`; do
fun_ScpToRemote ${HBASE_HOME}/conf ${HBASE_HOME} ${host}
fun_ScpToRemote ${HADOOP_HOME}/etc ${HADOOP_HOME} ${host}
done
for host in `cat ${clients}`; do
fun_ScpToRemote ${HBASE_HOME}/conf ${HBASE_HOME} ${host}
fun_ScpToRemote ${HADOOP_HOME}/etc ${HADOOP_HOME} ${host}
done
}
# copy jar files, only client.jar server.jar common.jar
fun_Jar(){
lib_dir="${HBASE_HOME}/lib"
for host in `cat ${hbase_nodes}`; do
for f in client server common; do
fun_ScpToRemote ${lib_dir}/hbase-${f}-${HBASE_VERSION}.jar ${lib_dir} $host
done
done
for host in `cat ${clients}`; do
for f in client server common; do
fun_ScpToRemote ${lib_dir}/hbase-${f}-${HBASE_VERSION}.jar ${lib_dir} $host
done
done
}
# run jps
fun_Jps(){
for host in `cat ${hbase_nodes}`; do
ssh ${host} jps
done
}
# copy log files
fun_CopyHDFSLog(){
for host in `cat ${hbase_nodes}`; do
fun_ScpFromRemote ${data_dir_hadoop_log} $1 ${host}
done
}
# copy log files
fun_CopyHBaseLog(){
for host in `cat ${hbase_nodes}`; do
fun_ScpFromRemote ${data_dir_hbase_log} $1 ${host}
done
}
# basic scp function
fun_ScpToRemote(){
if [ $# -ne 3 ]; then
echo "need 3 parameters in scp"
exit
fi
src=$1
dest=$2
host=$3
echo "scp ${src} to ${dest} on ${host}"
scp -rq ${src} ${host}:${dest}
}
# basic scp function
fun_ScpFromRemote(){
if [ $# -ne 3 ]; then
echo "need 3 parameters in scp, now see $@"
exit
fi
src=$1
dest=$2
host=$3
echo "scp ${src} at ${dest} to ${host}"
scp -rq ${host}:${src} ${dest}
}
if [ $# = 0 ]; then
fun_usage
exit
elif [ ! -e "${hbase_nodes}" ]; then
echo "hbase node file not exist: ${hbase_nodes}"
exit
fi
cmd=$1
if [ "${cmd}x" = "kx" ] || [ "${cmd}x" = "killx" ]; then
echo "clean all and reboot"
fun_KillClients
fun_KillHBase
fun_KillHDFS
elif [ "${cmd}x" = "cx" ] || [ "${cmd}x" = "cleanx" ]; then
echo "clean hbase and hdfs"
fun_ClearHDFS
fun_ClearHBaseTmp
elif [ "${cmd}x" = "clearlogx" ]; then
echo "clear log"
fun_ClearLog
elif [ "${cmd}x" = "copyhdfsx" ]; then
echo "copy hdfs logs to local"
fun_CopyHDFSLog $2
elif [ "${cmd}x" = "copyhbasex" ]; then
echo "copy hbase logs to local"
fun_CopyHBaseLog $2
elif [ "${cmd}x" = "confx" ]; then
echo "copy configurations"
fun_Conf
elif [ "${cmd}x" = "jarx" ]; then
echo "copy jars"
fun_Jar
elif [ "${cmd}x" = "hbx" ]; then
fun_KillClients
fun_KillHBase
hdfs dfs -rm -r /hbase
fun_ClearHBaseTmp
elif [ "${cmd}x" = "kcx" ]; then
fun_KillClients
fun_KillHBase
fun_KillHDFS
fun_ClearHDFS
fun_ClearHBaseTmp
elif [ "${cmd}x" = "kcsx" ]; then
fun_KillClients
fun_KillHBase
fun_KillHDFS
fun_ClearHDFS
fun_ClearHBaseTmp
start-dfs.sh && sleep 10 && start-hbase.sh
elif [ "${cmd}x" = "startx" ]; then
start-dfs.sh && sleep 10 && start-hbase.sh
elif [ "${cmd}x" = "jpsx" ]; then
fun_Jps
elif [ "${cmd}x" = "redepx" ]; then
fun_KillClients
fun_KillHBase
fun_KillHDFS
fun_ClearHDFS
fun_ClearHBaseTmp
fun_Conf
start-dfs.sh && sleep 10 && start-hbase.sh
else
echo "unknow cmd ${cmd}, please follow the usage"
fun_usage
fi
|
let occurrences = {};
let words = ["hello", "world", "hi"];
let str = "Hello, world! Hi there!";
words.forEach(word => {
let regex = new RegExp(`\\b${word}\\b`, 'gi');
let matches = str.match(regex);
if (matches) {
occurrences[word] = matches.length;
}
});
console.log(occurrences); // { hello: 1, world: 1, hi: 1 } |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package fr.calamus.common.tools;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
/**
*
* @author haerwynn
*/
public class CommonDateFormats {
private static SimpleDateFormat pgDateFormatter;
private static SimpleDateFormat pgTimestampFormatter;
private static SimpleDateFormat frTimestampFormatter;
private static SimpleDateFormat pgTimestampSimplerFormatter;
private static SimpleDateFormat frTimestampSimplerFormatter;
private static SimpleDateFormat frDateFormatter;
public static SimpleDateFormat pgDateFormatter() {
if (pgDateFormatter == null) {
pgDateFormatter = new SimpleDateFormat("yyyy-MM-dd");
}
return pgDateFormatter;
}
public static SimpleDateFormat pgTimestampFormatter() {
if (pgTimestampFormatter == null) {
pgTimestampFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
return pgTimestampFormatter;
}
public static SimpleDateFormat pgTimestampSimplerFormatter() {
if (pgTimestampSimplerFormatter == null) {
pgTimestampSimplerFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
}
return pgTimestampSimplerFormatter;
}
public static SimpleDateFormat frDateFormatter() {
if (frDateFormatter == null) {
frDateFormatter = new SimpleDateFormat("dd/MM/yyyy");
}
return frDateFormatter;
}
public static SimpleDateFormat frTimestampFormatter() {
if (frTimestampFormatter == null) {
frTimestampFormatter = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss", Locale.FRENCH);
}
return frTimestampFormatter;
}
public static SimpleDateFormat frTimestampSimplerFormatter() {
if (frTimestampSimplerFormatter == null) {
frTimestampSimplerFormatter = new SimpleDateFormat("dd/MM/yyyy HH:mm", Locale.FRENCH);
}
return frTimestampSimplerFormatter;
}
public static Date pgParseDateOrNull(String s){
if(s==null)return null;
try{
return pgDateFormatter().parse(s);
}catch(ParseException e){
return null;
}
}
public static Date frParseDateOrNull(String s){
if(s==null)return null;
try{
return frDateFormatter().parse(s);
}catch(ParseException e){
return null;
}
}
}
|
#!/bin/bash
# https://linuxize.com/post/how-to-nvidia-drivers-on-ubuntu-20-04/#installing-the-nvidia-drivers-using-the-command-line
ubuntu-drivers device
# Find the "driver" with nvidia in the name
# e.g. for a MacBook 2013, the command is:
# sudo apt -y install nvidia-340
|
package com.sereno.vfv.Data.Annotation;
import com.sereno.vfv.Data.SubDataset;
/**Abstract base class for Drawable annotation component. Contains graphical information to render log annotation components.*/
public abstract class DrawableAnnotationLogComponent
{
/** The native C++ pointer of a std::shared_ptr<DrawableAnnotationLogComponent> linked to this java object*/
protected long m_ptr;
/** The id of this object, inside the subdataset, as defined by the server*/
protected int m_id;
/** The subdataset linked to this object*/
protected SubDataset m_sd;
/** Constructor
* @param ptr the native C++ std::shared_ptr<DrawableAnnotationLogComponent> ptr or derived
* @param sd the subdataset owning this drawable annotation log component
* @param id the ID as defined by the server*/
protected DrawableAnnotationLogComponent(long ptr, SubDataset sd, int id)
{
m_ptr = ptr;
m_sd = sd;
m_id = id;
}
@Override
public void finalize() {}
/** Get the native C++ std::shared_ptr<DrawableAnnotationLogComponent> ptr
* @return the native C++ ptr. Derived class might use derived types for the C++ ptr*/
public long getPtr()
{
return m_ptr;
}
/** Get the ID of this object as defined by the server
* @return the ID of this object INSIDE the corresponding subdataset*/
public int getID() {return m_id;}
/** Set whether or not this component should try to consider time values
* @param t true if this component should try to consider time values, false otherwise*/
public void setEnableTime(boolean t)
{
nativeSetEnableTime(m_ptr, t);
}
/** Get whether or not this component should try to consider time values
* @return true if yes, false otherwise*/
public boolean getEnableTime()
{
return nativeGetEnableTime(m_ptr);
}
/** Get whether or not the time component is used. This is different than "getEnableTime" as this function also considers the parent containers time settings
* @return true if yes, false otherwise*/
public boolean IsTimeUsed()
{
return nativeIsTimeUsed(m_ptr);
}
public SubDataset getSubDataset() {return m_sd;}
private static native void nativeSetEnableTime(long ptr, boolean t);
private static native boolean nativeGetEnableTime(long ptr);
private static native boolean nativeIsTimeUsed(long ptr);
} |
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbSpectralAngleDataNodeFeatureFunction_hxx
#define otbSpectralAngleDataNodeFeatureFunction_hxx
#include "otbSpectralAngleDataNodeFeatureFunction.h"
namespace otb
{
/**
* Constructor
*/
template<class TImage, class TCoordRep, class TPrecision>
SpectralAngleDataNodeFeatureFunction<TImage, TCoordRep, TPrecision>::SpectralAngleDataNodeFeatureFunction() :
m_Radius(2)
{
//Example for QuickBird images (on a specific image)
m_RefPixel.SetSize(4);
m_RefPixel.SetElement(0, 252.284);
m_RefPixel.SetElement(1, 357.3);
m_RefPixel.SetElement(2, 232.644);
m_RefPixel.SetElement(3, 261.558);
}
/**
* Standard "PrintSelf" method
*/
template<class TImage, class TCoordRep, class TPrecision>
void SpectralAngleDataNodeFeatureFunction<TImage, TCoordRep, TPrecision>::PrintSelf(std::ostream& os,
itk::Indent indent) const
{
Superclass::PrintSelf(os, indent);
os << indent << "Reference Pixel: " << m_RefPixel << std::endl;
}
template<class TImage, class TCoordRep, class TPrecision>
typename SpectralAngleDataNodeFeatureFunction<TImage, TCoordRep, TPrecision>::OutputType SpectralAngleDataNodeFeatureFunction<
TImage, TCoordRep, TPrecision>::Evaluate(const DataNodeType& node) const
{
// TODO faire avce un ikk
const typename ImageLineIteratorType::PathType* path;
switch (node.GetNodeType())
{
case FEATURE_POINT:
{
itkExceptionMacro(<< "This DataNode type is not handle yet");
break;
}
case FEATURE_LINE:
{
path = node.GetLine();
break;
}
case FEATURE_POLYGON:
{
path = node.GetPolygonExteriorRing();
break;
}
default:
{
itkExceptionMacro(<< "This DataNode type is not handle yet");
break;
}
}
std::vector<std::pair<IndexType, IndexType> > splitedLineIdNeigh;
std::vector<std::pair<IndexType, IndexType> > splitedLineIdCentral;
// Split line and polygon into segment (ie. line with two vertex
VertexListConstIteratorType it1 = path->GetVertexList()->Begin();
VertexListConstIteratorType it2 = path->GetVertexList()->Begin();
VertexListConstIteratorType itStop = path->GetVertexList()->End();
++it2;
if (it2 == itStop)
{
itkExceptionMacro(<< "Invalid DataNode, must at least contain two points");
}
while (it1 != itStop && it2 != itStop)
{
IndexType id1, id2;
id1[0] = static_cast<int> (it1.Value()[0]);
id1[1] = static_cast<int> (it1.Value()[1]);
id2[0] = static_cast<int> (it2.Value()[0]);
id2[1] = static_cast<int> (it2.Value()[1]);
// Compute the direction of the current line
itk::Vector<double, 2> direction;
direction[0] = it2.Value()[0] - it1.Value()[0];
direction[1] = it2.Value()[1] - it1.Value()[1];
direction.Normalize();
// Compute the orthogonal direction of the current line
itk::Vector<double, 2> orthogonalDirection;
orthogonalDirection[0] = direction[1];
orthogonalDirection[1] = -direction[0];
splitedLineIdCentral.push_back(IndexPairType(id1, id2));
for (unsigned int j = 1; j <= m_Radius; ++j)
{
IndexType shift11, shift12;
shift11[0] = id1[0] - j * orthogonalDirection[0];
shift11[1] = id1[1] - j * orthogonalDirection[1];
shift12[0] = id1[0] + j * orthogonalDirection[0];
shift12[1] = id1[1] + j * orthogonalDirection[1];
IndexType shift21, shift22;
shift21[0] = id2[0] - j * orthogonalDirection[0];
shift21[1] = id2[1] - j * orthogonalDirection[1];
shift22[0] = id2[0] + j * orthogonalDirection[0];
shift22[1] = id2[1] + j * orthogonalDirection[1];
splitedLineIdCentral.push_back(IndexPairType(shift11, shift21));
splitedLineIdCentral.push_back(IndexPairType(shift12, shift22));
}
++it1;
++it2;
}
// in FEATURE_POLYGON case, first point appears twice (first vertex and last vertew, thus we create a line of 1 point...)
if (node.GetNodeType() == FEATURE_POLYGON)
{
splitedLineIdCentral.pop_back();
}
double centralAccSpectralAngle = 0.;
//double centralAccSpectralAngleSecondOrder = 0.;
double centralNbVisitedPixel = 0.;
for (unsigned int i = 0; i < splitedLineIdCentral.size(); ++i)
{
LineIteratorType lineIt(this->GetInputImage(), splitedLineIdCentral[i].first, splitedLineIdCentral[i].second);
lineIt.GoToBegin();
while (!lineIt.IsAtEnd())
{
if (this->IsInsideBuffer(lineIt.GetIndex()))
{
PixelType currPixel = this->GetInputImage()->GetPixel( lineIt.GetIndex() );
double angle = m_SpectralAngleFunctor(currPixel, this->GetRefPixel());
centralAccSpectralAngle += angle;
//centralAccSpectralAngleSecondOrder += angle * angle;
centralNbVisitedPixel += 1;
}
++lineIt;
}
}
OutputType output;
double meanCentral = 0.;
//double stddevCentral = 0.;
if (centralNbVisitedPixel != 0.)
{
meanCentral = static_cast<double> (centralAccSpectralAngle) / centralNbVisitedPixel;
//stddevCentral = std::sqrt( centralAccSpectralAngleSecondOrder/centralNbVisitedPixel - meanCentral*meanCentral );
}
if (meanCentral == 0.)
{
output.push_back(static_cast<PrecisionType> (0.));
}
else
{
// Compute the descriptor here
// meanCentral is in [0, pi]
// We need a descriptor in [0 1]
double descriptor = meanCentral / otb::CONST_PI;
output.push_back(static_cast<PrecisionType>( descriptor ));
}
output.push_back(static_cast<PrecisionType> (centralAccSpectralAngle));
output.push_back(static_cast<PrecisionType> (centralNbVisitedPixel));
return output;
}
} // end namespace otb
#endif
|
var express = require('express');
var staticHandler = require('jcash');
var ejs = require('ejs');
var fs = require('fs');
var path = require('path');
var appRequire = require('app-require');
var config = appRequire.requireConfig();
var libs = appRequire.requireLib();
var mobile = libs.mobile;
var template = libs.template;
var Base = require('./routehandlers/base.js');
var handlers = [];
var isHandler = function(Handler) {
var handler = Handler;
if (handler) {
while(handler.super_) {
handler = handler.super_;
}
if (handler === Base) {
return true;
}
}
return false;
};
var scanHandlers = function(app, dir) {
var dir = path.normalize(dir);
var files = fs.readdirSync(dir);
files.forEach(function(filename) {
if (filename === 'base.js') {
return;
}
var stat = fs.statSync(dir + '/'+ filename);
if (stat.isDirectory()) {
return scanHandlers(app, dir + '/'+ filename);
}
try {
var Handler = require(dir + '/' + filename);
if (Object.keys(Handler).length !== 1) {
throw new Error('Please have one handler in each file');
}
if (isHandler(Handler)) {
handlers.push(new Handler(app));
} else {
console.log('Implementation is not a handle in filename', filename, 'They should inherits from Base. Use util.inherits');
}
} catch (e) {
console.error(e);
}
});
};
module.exports.setupApp = function(app, basedir) {
var jsManager, cssManager, block, location;
var basedir = path.normalize(basedir);
app.configure('development', 'production', function() {
app.engine('html', ejs.__express);
app.set('view engine', 'html');
app.set('views', basedir + '/app/views');
app.use(express.favicon());
app.use(express.bodyParser());
app.use(mobile.detect());
staticHandler.globalSettings({
active : true,
inmemory : true,
pathJs : basedir + '/public/js',
pathCss: basedir + '/public/css',
maxAgeCss : config.http.static.maxAge,
maxAgeJs : config.http.static.maxAge
});
app.use(staticHandler.jcash());
app.use(express.static(basedir + '/public', {
maxAge : config.http.static.maxAge
}));
});
app.configure('development', function() {
app.use(express.errorHandler({
dumpExceptions : true,
showStack : true
}));
});
app.configure('production', function() {
app.use(express.errorHandler());
app.enable('view cache');
});
/* template helpers */
template.addDynamicHelpers(app);
staticHandler.addTemplateHelpers(app);
jsManager = staticHandler.getJsManager();
cssManager = staticHandler.getCssManager();
jsManager.parseConfig(config.js);
cssManager.parseConfig(config.css);
imageManager = staticHandler.getImageManager({path : basedir + '/public/img', hasGm : false});
};
module.exports.bootstrap = function(appl) {
scanHandlers(appl, __dirname + '/routehandlers');
if (handlers.length) {
appl.use(function(req, res) {
handlers[0].error404Page(req, res);
});
}
};
module.exports.postrun = function() {
var jsManager, cssManager, imageManager;
jsManager = staticHandler.getJsManager();
cssManager = staticHandler.getCssManager();
imageManager = staticHandler.getImageManager();
jsManager.preRenderAll();
cssManager.preRenderAll();
imageManager.fetchFiles();
}; |
<reponame>fdw/sqlfluff
"""AnyNumberOf and OneOf."""
from typing import List, Optional, Tuple
from sqlfluff.core.parser.helpers import trim_non_code_segments
from sqlfluff.core.parser.match_result import MatchResult
from sqlfluff.core.parser.match_wrapper import match_wrapper
from sqlfluff.core.parser.match_logging import parse_match_logging
from sqlfluff.core.parser.context import ParseContext
from sqlfluff.core.parser.segments import BaseSegment, allow_ephemeral
from sqlfluff.core.parser.grammar.base import (
BaseGrammar,
MatchableType,
cached_method_for_parse_context,
)
from sqlfluff.core.parser.grammar.sequence import Sequence, Bracketed
class AnyNumberOf(BaseGrammar):
"""A more configurable version of OneOf."""
def __init__(self, *args, **kwargs):
self.max_times = kwargs.pop("max_times", None)
self.min_times = kwargs.pop("min_times", 0)
self.max_times_per_element = kwargs.pop("max_times_per_element", None)
# Any patterns to _prevent_ a match.
self.exclude = kwargs.pop("exclude", None)
super().__init__(*args, **kwargs)
@cached_method_for_parse_context
def simple(self, parse_context: ParseContext) -> Optional[List[str]]:
"""Does this matcher support a uppercase hash matching route?
AnyNumberOf does provide this, as long as *all* the elements *also* do.
"""
simple_buff = [
opt.simple(parse_context=parse_context) for opt in self._elements
]
if any(elem is None for elem in simple_buff):
return None
# Flatten the list
return [inner for outer in simple_buff for inner in outer]
def is_optional(self) -> bool:
"""Return whether this element is optional.
This is mostly set in the init method, but also in this
case, if min_times is zero then this is also optional.
"""
return self.optional or self.min_times == 0
@staticmethod
def _first_non_whitespace(segments) -> Optional[str]:
"""Return the raw upper representation of the first valid non-whitespace segment in the iterable."""
for segment in segments:
if segment.raw_segments_upper:
return segment.raw_segments_upper
return None
def _prune_options(
self, segments: Tuple[BaseSegment, ...], parse_context: ParseContext
) -> Tuple[List[MatchableType], List[str]]:
"""Use the simple matchers to prune which options to match on."""
available_options = []
simple_opts = []
prune_buff = []
non_simple = 0
pruned_simple = 0
matched_simple = 0
# Find the first code element to match against.
first_elem = self._first_non_whitespace(segments)
for opt in self._elements:
simple = opt.simple(parse_context=parse_context)
if simple is None:
# This element is not simple, we have to do a
# full match with it...
available_options.append(opt)
non_simple += 1
continue
# Otherwise we have a simple option, so let's use
# it for pruning.
for simple_opt in simple:
# Check it's not a whitespace option
if not simple_opt.strip(): # pragma: no cover
raise NotImplementedError(
"_prune_options not supported for whitespace matching."
)
# We want to know if the first meaningful element of the str_buff
# matches the option.
# match the FIRST non-whitespace element of the list.
if first_elem != simple_opt:
# No match, carry on.
continue
# If we get here, it's matched the FIRST element of the string buffer.
available_options.append(opt)
simple_opts.append(simple_opt)
matched_simple += 1
break
else:
# Ditch this option, the simple match has failed
prune_buff.append(opt)
pruned_simple += 1
continue
parse_match_logging(
self.__class__.__name__,
"match",
"PRN",
parse_context=parse_context,
v_level=3,
ns=non_simple,
ps=pruned_simple,
ms=matched_simple,
pruned=prune_buff,
opts=available_options or "ALL",
)
return available_options, simple_opts
def _match_once(
self, segments: Tuple[BaseSegment, ...], parse_context: ParseContext
) -> Tuple[MatchResult, Optional["MatchableType"]]:
"""Match the forward segments against the available elements once.
This serves as the main body of OneOf, but also a building block
for AnyNumberOf.
"""
# For efficiency, we'll be pruning options if we can
# based on their simpleness. this provides a short cut
# to return earlier if we can.
# `segments` may already be nested so we need to break out
# the raw segments within it.
available_options, _ = self._prune_options(
segments, parse_context=parse_context
)
# If we've pruned all the options, return unmatched (with some logging).
if not available_options:
return MatchResult.from_unmatched(segments)
with parse_context.deeper_match() as ctx:
match, matched_option = self._longest_trimmed_match(
segments,
available_options,
parse_context=ctx,
trim_noncode=False,
)
return match, matched_option
@match_wrapper()
@allow_ephemeral
def match(
self, segments: Tuple[BaseSegment, ...], parse_context: ParseContext
) -> MatchResult:
"""Match against any of the elements a relevant number of times.
If it matches multiple, it returns the longest, and if any are the same
length it returns the first (unless we explicitly just match first).
"""
# First if we have an *exclude* option, we should check that
# which would prevent the rest of this grammar from matching.
if self.exclude:
with parse_context.deeper_match() as ctx:
if self.exclude.match(segments, parse_context=ctx):
return MatchResult.from_unmatched(segments)
# Match on each of the options
matched_segments: MatchResult = MatchResult.from_empty()
unmatched_segments: Tuple[BaseSegment, ...] = segments
n_matches = 0
# Keep track of the number of times each option has been matched.
available_options, _ = self._prune_options(
segments, parse_context=parse_context
)
available_option_counter = {str(o): 0 for o in available_options}
while True:
if self.max_times and n_matches >= self.max_times:
# We've matched as many times as we can
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
# Is there anything left to match?
if len(unmatched_segments) == 0:
# No...
if n_matches >= self.min_times:
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
else: # pragma: no cover TODO?
# We didn't meet the hurdle
return MatchResult.from_unmatched(unmatched_segments)
# If we've already matched once...
if n_matches > 0 and self.allow_gaps:
# Consume any non-code if there is any
pre_seg, mid_seg, post_seg = trim_non_code_segments(unmatched_segments)
unmatched_segments = mid_seg + post_seg
else:
pre_seg = () # empty tuple
match, matched_option = self._match_once(
unmatched_segments, parse_context=parse_context
)
# Increment counter for matched option.
if matched_option and (str(matched_option) in available_option_counter):
available_option_counter[str(matched_option)] += 1
# Check if we have matched an option too many times.
if (
self.max_times_per_element
and available_option_counter[str(matched_option)]
> self.max_times_per_element
):
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
if match:
matched_segments += pre_seg + match.matched_segments
unmatched_segments = match.unmatched_segments
n_matches += 1
else:
# If we get here, then we've not managed to match. And the next
# unmatched segments are meaningful, i.e. they're not what we're
# looking for.
if n_matches >= self.min_times:
return MatchResult(
matched_segments.matched_segments, pre_seg + unmatched_segments
)
else:
# We didn't meet the hurdle
return MatchResult.from_unmatched(unmatched_segments)
class OneOf(AnyNumberOf):
"""Match any of the elements given once.
If it matches multiple, it returns the longest, and if any are the same
length it returns the first (unless we explicitly just match first).
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, max_times=1, min_times=1, **kwargs)
class OptionallyBracketed(OneOf):
"""Hybrid of Bracketed and Sequence: allows brackets but they aren't required.
NOTE: This class is greedy on brackets so if they *can* be claimed, then
they will be.
"""
def __init__(self, *args, **kwargs):
super().__init__(
Bracketed(*args),
# In the case that there is only one argument, no sequence is required.
args[0] if len(args) == 1 else Sequence(*args),
**kwargs,
)
class AnySetOf(AnyNumberOf):
"""Match any number of the elements but each element can only be matched once."""
def __init__(self, *args, **kwargs):
super().__init__(*args, max_times_per_element=1, **kwargs)
|
<html>
<head>
<title>Compare Inputs</title>
</head>
<body>
<form action="/action_page.php" method="post">
<label>Input 1: </label><br>
<input type="text" name="input1"><br><br>
<label>Input 2:</label><br>
<input type="text" name="input2"><br><br>
<input type="submit" value="Submit">
</form>
<?php
if(isset($_POST['input1'])
&& isset($_POST['input2'])
&& isset($_POST['submit'])) {
$input1 = $_POST['input1'];
$input2 = $_POST['input2'];
if ($input1 == $input2) {
echo 'The two inputs are equal';
}
else {
echo 'The two inputs are not equal';
}
}
?>
</body>
</html> |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.garesst.gmost.recursos;
/**
*
* @author oscar
*/
public enum RecursoConector {
jdbc,odbc,otro
}
|
var classdroid_1_1_runtime_1_1_utilities_1_1_sampling_1_1_perlin =
[
[ "Perlin", "classdroid_1_1_runtime_1_1_utilities_1_1_sampling_1_1_perlin.html#a6a7ace126c8f493e7093ec7736acb797", null ],
[ "Noise", "classdroid_1_1_runtime_1_1_utilities_1_1_sampling_1_1_perlin.html#a62726d332fc6a25a34b8fe1d929a3f29", null ],
[ "Noise", "classdroid_1_1_runtime_1_1_utilities_1_1_sampling_1_1_perlin.html#a51cf775b2502c7875dd9c9a5aaf9217d", null ],
[ "Noise", "classdroid_1_1_runtime_1_1_utilities_1_1_sampling_1_1_perlin.html#ad97f406611b0b4f8db2bfcd64ad62964", null ],
[ "SetSeed", "classdroid_1_1_runtime_1_1_utilities_1_1_sampling_1_1_perlin.html#ad58ec9680d44aaa38a042a33962edff9", null ]
]; |
<reponame>gy20151015/umy-ui
import uButton from './src/button'
uButton.install = function(Vue) {
Vue.component(uButton.name, uButton)
}
export default uButton
|
# Generated by Django 3.1.5 on 2021-01-31 22:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0016_auto_20210131_1429'),
]
operations = [
migrations.AddField(
model_name='rpc',
name='timestamp',
field=models.DateTimeField(default='0000-00-00 00:00:00'),
),
migrations.AlterField(
model_name='rpc',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
]
|
<filename>src/epics/blog/ArticleList.js
import React from "react";
import PropTypes from "prop-types";
import { Link, graphql, StaticQuery } from "gatsby";
import Img from "gatsby-image";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import styled from "styled-components";
import { UpPose } from "../../components/pose/Poses";
import Head from "../../components/Head";
import ShareActions from "./ShareActions";
import color from "../../common/styles/color";
import MEDIA from "../../common/styles/media";
import IO from "../../components/pose/IO";
const Blog = ({ blogDetails }) => (
<StaticQuery
query={graphql`
query BlogQuery {
allMarkdownRemark(
sort: { order: DESC, fields: [frontmatter___datePublished] }
filter: { frontmatter: { templateKey: { eq: "blog-post" } } }
) {
totalCount
edges {
node {
id
frontmatter {
title
category
description
datePublished(formatString: "MMMM Do, YYYY")
image {
childImageSharp {
fluid(maxHeight: 400, quality: 90) {
...GatsbyImageSharpFluid_withWebp_noBase64
}
}
}
}
fields {
slug
readingTime {
minutes
}
}
}
}
}
}
`}
render={data => (
<>
<Head
title={blogDetails.title}
pageDescription={blogDetails.description}
/>
<IO>
{({ isVisible, hasBeenVisible }) => (
<UpPose pose={isVisible || hasBeenVisible ? "visible" : "hidden"}>
<Wrapper>
<Title>
<strong>Studeo Blog</strong>
</Title>
<Subtitle>{blogDetails.title}</Subtitle>
<CardGrid>
{data.allMarkdownRemark.edges.map(
({ node: { id, frontmatter, fields } }, i) => (
<FlexBox key={id} className={"key" + i}>
{/* <Card className={styles.Card} to={fields.slug}> */}
<Card to={fields.slug}>
<Img
fluid={
frontmatter.image
? frontmatter.image.childImageSharp.fluid
: {}
}
alt={frontmatter.title}
/>
<h4>{frontmatter.category}</h4>
<h3>{frontmatter.title} </h3>
<p>{frontmatter.description}</p>
<span>
<h5>{frontmatter.datePublished}</h5>
{/* <h5 className={styles.Time}> */}
<h5>
<FontAwesomeIcon icon="book-open" />
{Math.round(fields.readingTime.minutes)} min
</h5>
</span>
</Card>
</FlexBox>
)
)}
</CardGrid>
<SocialWrapper>
<SocialText>Don’t miss a post</SocialText>
<ShareActions>color={color.blackLight}</ShareActions>
</SocialWrapper>
</Wrapper>
</UpPose>
)}
</IO>
</>
)}
/>
);
Blog.propTypes = {
blogDetails: PropTypes.object.isRequired
};
export default Blog;
const Wrapper = styled.div`
display: flex;
flex-direction: column;
background: #fff;
align-items: center;
`;
const Title = styled.h1`
letter-spacing: -1.4px;
text-align: center;
font-size: 50px;
color: ${color.blackLight};
margin: 60px 40px 0px 40px;
`;
const Subtitle = styled.h2`
color: ${color.blackLight};
margin: 10px 8px 60px 8px;
font-size: 18px;
letter-spacing: 6px;
font-weight: 700;
text-transform: uppercase;
display: block;
${MEDIA.TABLET`
margin: 10px 8px 10px 8px;
`};
`;
const Card = styled(Link)`
border: 1px solid ${color.greyLight};
box-shadow: 0px 15px 92px 0px rgba(0, 37, 67, 0.06);
transition: all 0.5s ease;
display: flex;
flex-direction: column;
border-radius: 14px;
overflow: hidden;
flex-grow: 1;
h3 {
font-size: 28px;
font-weight: 700;
letter-spacing: -1px;
padding: 0px 22px;
line-height: 1.2;
}
h4 {
display: relative;
font-size: 14px;
margin-bottom: 6px;
margin: 22px 0px 8px 0px;
font-weight: 700;
color: ${color.black};
letter-spacing: -0.3px;
padding: 0px 23px;
}
p {
flex-grow: 1;
font-size: 15px;
line-height: 1.4;
padding: 0px 22px;
margin-top: 6px;
color: ${color.grey};
letter-spacing: -0.2px;
}
span {
display: flex;
justify-content: space-between;
padding: 14px 22px 22px 22px;
}
h5 {
font-size: 13px;
color: ${color.blackLight};
letter-spacing: -0.2px;
transition: 0.5s;
svg {
font-size: 12px;
display: absolute;
margin: 1px 4px;
}
}
&:hover {
transform: translate3D(0, -1px, 0) scale(1.02);
box-shadow: 0px 15px 114px 0px rgba(0, 37, 67, 0.14);
z-index: 1000;
border: 1px solid #fff;
.pink {
color: ${color.grey};
}
}
`;
const CardGrid = styled.div`
margin: 60px 0px 80px 0px;
width: 100%;
display: grid;
max-width: 1100px;
grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));
grid-template-rows: repeat(auto-fill, minmax(260px, 420px));
.key0 {
grid-column-end: span 2;
p {
display: none;
}
${MEDIA.TABLET`
p {
display: inline;
}
`};
}
grid-gap: 2em;
${MEDIA.LARGE`
max-width: 700px;
`};
${MEDIA.TABLET`
max-width: 350px;
width: 90%;
grid-gap: 1.5em;
.key0 {
grid-column-end: auto;
}
`};
`;
const FlexBox = styled.div`
flex-grow: 1;
display: flex;
flex-direction: column;
`;
const SocialText = styled.h5`
font-weight: 700;
letter-spacing: -0.2px;
color: ${color.blackLight};
`;
const SocialWrapper = styled.div`
margin-bottom: 80px;
text-align: center;
`;
|
<reponame>ritaswc/wechat_app_template
var app = getApp()
Page({
data: {
stars: [0, 1, 2, 3, 4, 5],
img_url: 'http://appuat.huihuishenghuo.com/img/',
normalSrc: 'http://appuat.huihuishenghuo.com/img/order/star.png',
selectedSrc: 'http://appuat.huihuishenghuo.com/img/order/stars.png',
key: 0,//评分
},
onLoad: function () {
},
selectStar: function (e) {
var key = e.currentTarget.dataset.key
this.setData({
key: key
})
}
}) |
CREATE TABLE Sales (
RecordId INT NOT NULL AUTO_INCREMENT,
Date DATETIME NOT NULL,
Revenue DECIMAL(10,2) NOT NULL,
TotalQty INT NOT NULL,
PRIMARY KEY (RecordId)
); |
<reponame>liadmagen/Keep-Current-App<gh_stars>1-10
import { TestBed, inject } from '@angular/core/testing';
import { RecommendationListService } from './recommendation-list.service';
describe('RecommendationService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [RecommendationListService]
});
});
it('should be created', inject([RecommendationListService], (service: RecommendationListService) => {
expect(service).toBeTruthy();
}));
});
|
<filename>include/re/lib/container/ring_array.hpp
#pragma once
#include <cassert>
#include <array>
#include <algorithm>
#include <iterator>
#include <stdexcept>
#include <type_traits>
#include <utility>
#include <gsl/span>
#include "indexed_iterator.hpp"
namespace re {
template <typename T, std::size_t Size>
class ring_array {
public:
using value_type = T;
using reference = T&;
using const_reference = T const&;
using container_type = std::array<value_type, Size>;
using size_type = typename container_type::size_type;
using difference_type = typename container_type::difference_type;
using container_iterator = typename container_type::iterator;
using index_type = difference_type;
using iterator = indexed_iterator<ring_array, false>;
using const_iterator = indexed_iterator<ring_array, true>;
using reverse_iterator = std::reverse_iterator<iterator>;
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
constexpr ring_array()
noexcept(std::is_nothrow_default_constructible<T>::value) :
next(std::begin(c))
{
}
constexpr ring_array(ring_array const& rhs)
noexcept(std::is_nothrow_copy_constructible<T>::value) :
c(rhs.c),
next(std::begin(c) + std::distance(std::begin(rhs.c), rhs.next))
{
}
constexpr ring_array(ring_array&& rhs)
noexcept(std::is_nothrow_move_constructible<T>::value) :
c(std::move(rhs.c)),
next(std::begin(c) + std::distance(std::begin(rhs.c), rhs.next))
{
}
constexpr ring_array(container_type const& rhs)
noexcept(std::is_nothrow_copy_constructible<T>::value) :
c(rhs),
next(std::begin(c))
{
}
constexpr ring_array(container_type&& rhs)
noexcept(std::is_nothrow_move_constructible<T>::value) :
c(std::move(rhs)),
next(std::begin(c))
{
}
constexpr ring_array& operator=(ring_array const& rhs)
noexcept(std::is_nothrow_copy_assignable<T>::value) {
c = rhs.c;
next = std::begin(c) + std::distance(std::begin(rhs.c), rhs.next);
return *this;
}
constexpr ring_array& operator=(ring_array&& rhs)
noexcept(std::is_nothrow_move_assignable<T>::value) {
auto position = static_cast<size_type>(
std::distance(std::begin(rhs.c), rhs.next)
);
c = std::move(rhs.c);
next = std::begin(c) + position;
return *this;
}
constexpr container_type& linearize()
noexcept {
std::rotate(std::begin(c), next, std::end(c));
next = std::begin(c);
return c;
}
constexpr size_type size() noexcept {
return Size;
}
constexpr bool empty() noexcept {
return Size == 0;
}
constexpr void fill(value_type const& value)
noexcept(std::is_nothrow_copy_constructible<T>::value) {
c.fill(value);
}
constexpr reference operator[](size_type position) noexcept {
return c[position_to_index(position)];
}
constexpr const_reference operator[](size_type position)
const noexcept {
return c[position_to_index(position)];
}
constexpr reference at(size_type position) {
if (position >= Size || position < 0) {
throw_out_of_range();
}
return c[position_to_index(position)];
}
constexpr const_reference at(size_type position) const {
if (position >= Size || position < 0) {
throw_out_of_range();
}
return c[position_to_index(position)];
}
constexpr reference back()
noexcept {
return (next != std::begin(c)) ? *(next - 1) : c.front();
}
constexpr const_reference back()
const noexcept {
return (next != std::begin(c)) ? *(next - 1) : c.front();
}
constexpr reference front()
noexcept {
return *next;
}
constexpr const_reference front()
const noexcept {
return *next;
}
constexpr iterator begin()
noexcept {
return iterator(this, 0u);
}
constexpr iterator end()
noexcept {
return iterator(this, Size);
}
constexpr const_iterator begin()
const noexcept {
return const_iterator(this, 0u);
}
constexpr const_iterator end()
const noexcept {
return const_iterator(this, Size);
}
constexpr const_iterator cbegin()
const noexcept {
return const_iterator(this, 0u);
}
constexpr const_iterator cend()
const noexcept {
return const_iterator(this, Size);
}
constexpr reverse_iterator rbegin()
noexcept {
return reverse_iterator(end());
}
constexpr reverse_iterator rend()
noexcept {
return reverse_iterator(begin());
}
constexpr const_reverse_iterator rbegin()
const noexcept {
return const_reverse_iterator(cend());
}
constexpr reverse_iterator rend()
const noexcept {
return const_reverse_iterator(cbegin());
}
constexpr const_reverse_iterator crbegin()
const noexcept {
return const_reverse_iterator(cend());
}
constexpr const_reverse_iterator crend()
const noexcept {
return const_reverse_iterator(cbegin());
}
constexpr void push_back(value_type const& value)
noexcept(std::is_nothrow_copy_constructible<T>::value) {
auto value_copy{value};
push_back(std::move(value_copy));
}
constexpr void push_back(value_type&& value)
noexcept(std::is_nothrow_move_assignable<T>::value) {
*(next++) = std::move(value);
if (next == std::end(c)) {
next = std::begin(c);
}
}
template <typename... From>
constexpr void emplace_back(From&&... from)
noexcept(std::is_nothrow_constructible<T, From...>::value
&& std::is_nothrow_move_assignable<T>::value) {
*(next++) = T(std::forward<From>(from)...);
if (next == std::end(c)) {
next = std::begin(c);
}
}
template <typename InputIterator>
constexpr void append(InputIterator first, InputIterator last) {
assert(std::distance(first, last) >= 0);
while (first != last) {
push_back(*(first++));
}
}
template <std::ptrdiff_t N>
constexpr void append(gsl::span<T const, N> data) {
append(std::cbegin(data), std::cend(data));
}
private:
constexpr void throw_out_of_range()
const {
throw std::out_of_range("ring_array");
}
constexpr size_type position_to_index(size_type position)
const noexcept {
difference_type d = next - std::cbegin(c);
assert(d < static_cast<difference_type>(Size) && d >= 0);
size_type index = static_cast<size_type>(d) + position;
return (index < Size) ? index : (index - Size);
}
container_iterator next;
container_type c;
};
}
|
package com.it.zzb.niceweibo.activity;
import android.content.Context;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.Gravity;
import android.view.View;
import android.view.Window;
import com.it.zzb.niceweibo.R;
import com.it.zzb.niceweibo.bean.Status;
import java.util.ArrayList;
import uk.co.senab.photoview.PhotoViewAttacher;
public class PictureActivity extends AppCompatActivity implements ViewPagerAdapter.OnSingleTagListener{
private ImageDetailViewPager mViewPager;
private ArrayList<String> mDatas;
private int mPosition;
private int mImgNum;
private ViewPagerAdapter mAdapter;
private Context mContext;
private ImageDetailTopBar mImageDetailTopBar;
// private PhotoViewAttacher.OnPhotoTapListener mPhotoTapListener = new PhotoViewAttacher.OnPhotoTapListener() {
// @Override
// public void onPhotoTap(View view, float v, float v1) {
// finish();
// }
//
// @Override
// public void onOutsidePhotoTap() {
// finish();
// }
// };
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_picture);
mContext = PictureActivity.this;
mDatas = this.getIntent().getStringArrayListExtra("imagelist_url");
mPosition = getIntent().getIntExtra("image_position", 0);
mImgNum = mDatas.size();
final Status status = (Status) getIntent().getSerializableExtra("status");
mViewPager = (ImageDetailViewPager) findViewById(R.id.viewpagerId);
mImageDetailTopBar = (ImageDetailTopBar) findViewById(R.id.imageTopBar);
mAdapter = new ViewPagerAdapter(mDatas, this);
mAdapter.setOnSingleTagListener(this);
mViewPager.setAdapter(mAdapter);
if (mImgNum == 1) {
mImageDetailTopBar.setPageNumVisible(View.GONE);
} else {
mImageDetailTopBar.setPageNum((mPosition + 1) + "/" + mImgNum);
}
mViewPager.setCurrentItem(mPosition);
mViewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
// 每当页数发生改变时重新设定一遍当前的页数和总页数
mImageDetailTopBar.setPageNum((position + 1) + "/" + mImgNum);
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
mImageDetailTopBar.setOnMoreOptionsListener(new ImageDetailTopBar.OnMoreOptionsListener() {
@Override
public void onClick(View view) {
ImageOptionPopupWindow mPopupWindow = new ImageOptionPopupWindow(mDatas.get(mViewPager.getCurrentItem()), mContext);
if (mPopupWindow.isShowing()) {
mPopupWindow.dismiss();
} else {
mPopupWindow.showAtLocation(findViewById(R.id.activity_picture), Gravity.BOTTOM, 0, 0);
}
}
});
}
@Override
public void onTag() {
finish();
}
}
|
/*!
* JavaScript to update page URL when a redirect is viewed, ensuring that the
* page is scrolled to the id when it's a redirect with fragment.
*
* This is loaded in the top queue, so avoid unnecessary dependencies
* like mediawiki.Title or mediawiki.Uri.
*/
( function ( mw, $ ) {
var profile = $.client.profile(),
canonical = mw.config.get( 'wgInternalRedirectTargetUrl' ),
fragment = null,
shouldChangeFragment, index;
// Clear internal mw.config entries, so that no one tries to depend on them
mw.config.set( 'wgInternalRedirectTargetUrl', null );
index = canonical.indexOf( '#' );
if ( index !== -1 ) {
fragment = canonical.slice( index );
}
// Never override the fragment if the user intended to look at a different section
shouldChangeFragment = fragment && !location.hash;
// Replace the whole URL if possible, otherwise just change the fragment
if ( canonical && history.replaceState ) {
if ( !shouldChangeFragment ) {
// If the current page view has a fragment already, don't override it
canonical = canonical.replace( /#.*$/, '' );
canonical += location.hash;
}
// This will also cause the browser to scroll to given fragment
history.replaceState( /*data=*/ history.state, /*title=*/ document.title, /*url=*/ canonical );
// …except for IE 10 and 11. Prod it with a location.hash change.
if ( shouldChangeFragment && profile.name === 'msie' && profile.versionNumber >= 10 ) {
location.hash = fragment;
}
} else if ( shouldChangeFragment ) {
if ( profile.layout === 'webkit' && profile.layoutVersion < 420 ) {
// Released Safari w/ WebKit 418.9.1 messes up horribly
// Nightlies of 420+ are ok
return;
}
location.hash = fragment;
}
if ( shouldChangeFragment && profile.layout === 'gecko' ) {
// Mozilla needs to wait until after load, otherwise the window doesn't
// scroll. See <https://bugzilla.mozilla.org/show_bug.cgi?id=516293>.
// There's no obvious way to detect this programmatically, so we use
// version-testing. If Firefox fixes the bug, they'll jump twice, but
// better twice than not at all, so make the fix hit future versions as
// well.
$( function () {
if ( location.hash === fragment ) {
location.hash = fragment;
}
} );
}
}( mediaWiki, jQuery ) );
|
<gh_stars>1-10
/*
* Copyright 2022 Whilein
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package w.bot.type.user.name;
/**
* @author whilein
*/
public enum UserNameCase {
NOM, // именительный
GEN, // родительный
DAT, // дательный
ACC, // винительный
INS, // творительный
ABL; // предложный
public static final UserNameCase[] VALUES = values();
}
|
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 9954번: Cedric's Cypher
*
* @see https://www.acmicpc.net/problem/9954/
*
*/
public class Boj9954 {
private static final String NEW_LINE = "\n";
private static final String TERMINATE = "#";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
while(true) {
String line = br.readLine();
if(line.equals(TERMINATE)) break;
int length = line.length();
int code = line.charAt(length - 1) - 'A';
line = line.substring(0, length - 1);
for(char c: line.toCharArray()) {
if((c < 'A' || c > 'Z') && (c < 'a' || c > 'z')) {
sb.append(c);
}
else {
int value = c - code;
if('A' <= c && c <= 'Z') {
if(value < 65) sb.append((char)('Z' + (value - 64))); // 코드에 맞춰 변형
else sb.append((char) value);
}
else {
if(value < 97) sb.append((char)('z' + (value - 96)));
else sb.append((char) value);
}
}
}
sb.append(NEW_LINE);
}
System.out.println(sb);
}
}
|
#!/bin/bash
cd /home/polarsite/cursivedata/www/
source venv/bin/activate
exec gunicorn www.wsgi:application -b localhost:8000 -w 2
|
<reponame>coder-blog/satellite<gh_stars>1-10
package com.kinstalk.satellite.socket.manager;
import com.kinstalk.satellite.domain.AgentModel;
import com.kinstalk.satellite.domain.packet.SocketPacket;
import com.kinstalk.satellite.domain.packet.TimerRunStartPacket;
import com.kinstalk.satellite.service.api.AgentService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.ContextLoader;
/**
* Created by digitZhang on 16/6/22.
*/
public class TimerRunStartHandler {
private Logger logger = LoggerFactory.getLogger(TimerRunStartHandler.class);
public void handle(SocketPacket packet, int agentId) {
logger.debug("agentId:" + agentId);
try {
if (!(packet instanceof TimerRunStartPacket)) {
logger.error("invalid packet type!" + packet);
}
TimerRunStartPacket pkt = (TimerRunStartPacket) packet;
ApplicationContext act = ContextLoader.getCurrentWebApplicationContext();
AgentService agentService = (AgentService) act.getBean("agentService");
agentService.updateRunStatus(pkt.getTimerId(), agentId, AgentModel.STATUS_RUN);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
package com.company.sales.api.models;
import java.math.BigDecimal;
import java.util.UUID;
public class Subscription {
public UUID id;
public UUID customerId;
public int dayOfMonth;
public BigDecimal price;
}
|
#!/usr/bin/sh
env="mujoco"
scenario="Ant-v2"
agent_conf="2x4"
agent_obsk=1
algo="macpo"
exp="rnn"
seed_max=1
echo "env is ${env}, scenario is ${scenario}, algo is ${algo}, exp is ${exp}, max seed is ${seed_max}"
for seed in `seq ${seed_max}`;
do
echo "seed is ${seed}:"
CUDA_VISIBLE_DEVICES=0 python train/train_mujoco.py --env_name ${env} --algorithm_name ${algo} --experiment_name ${exp} --scenario ${scenario} --agent_conf ${agent_conf} --agent_obsk ${agent_obsk} --lr 9e-5 --critic_lr 5e-3 --std_x_coef 1 --std_y_coef 5e-1 --seed 50 --n_training_threads 4 --n_rollout_threads 16 --num_mini_batch 40 --episode_length 1000 --num_env_steps 10000000 --ppo_epoch 1 --use_value_active_masks --add_center_xy --use_state_agent --kl_threshold 0.0065 --safety_bound 10 --safety_gamma 0.09 --line_search_fraction 0.5 --fraction_coef 0.27
done
|
#!/usr/bin/env bash
set -eu
echo $(grep -Po "version='\K\d\.\d\.\d" setup.py)
|
$(document).ready(function(){
$("#btnChangeColor").click(function(){
$("body").css("background-color", "#00FF00");
});
}); |
function printMultiplicationTable() {
for (let i = 1; i <= 5; i++) {
let row = '';
for (let j = 1; j <= 5; j++) {
row += `${i * j}\t`;
}
console.log(row);
}
}
printMultiplicationTable();
// Output:
// 1 2 3 4 5
// 2 4 6 8 10
// 3 6 9 12 15
// 4 8 12 16 20
// 5 10 15 20 25 |
#!/bin/bash
function ergodic(){
for file in ` ls $1`
do
if [ -d $1"/"$file ]
then
ergodic $1"/"$file
else
local path=$1"/"$file #得到文件的完整的目录
local name=$file #得到文件的名字
#做自己的工作.
if [ "${file##*.}" = "bmp" ]; then
python3 bmp2hex.py $file -kbin
fi
fi
done
}
INIT_PATH="."
ergodic $INIT_PATH
#delete exist file
if [ -e pic.kfpkg ]
then
rm pic.kfpkg
fi
#compress bin to kfpkg
zip pic.kfpkg flash-list.json 0.bin 1.bin 2.bin 3.bin 4.bin 5.bin 6.bin
if [ "$1" ] ;then
echo "merge kfpkg"
#merge kfpkg to bin
python3 merge_kfpkg.py pic.kfpkg
rm pic.kfpkg
# addr file save_file
python3 pack_kfpkg.py 12582912 pic.bin pic.kfpkg
fi
rm *.bin
|
<filename>algorand-spring-starter-demo/src/main/java/com/algorand/starter/demo/cryptopayment/model/Card.java<gh_stars>0
package com.algorand.starter.demo.cryptopayment.model;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@NoArgsConstructor
@AllArgsConstructor
public class Card extends PaymentInstrument {
@NotNull
@Size(min = 13, max = 19)
private String primaryAccountNumber;
public String getPrimaryAccountNumber() {
return primaryAccountNumber;
}
public void setPrimaryAccountNumber(String primaryAccountNumber) {
this.primaryAccountNumber = primaryAccountNumber;
}
} |
<filename>doc/html/search/groups_a.js<gh_stars>0
var searchData=
[
['types_1264',['Types',['../group__types_group.html',1,'']]]
];
|
<reponame>yuetchn/VueNext-ElementPlus-Admin<filename>src/components/index.ts<gh_stars>1-10
/*
* @ModuleName: Global components
* @Author: <EMAIL>
* @LastEditTime: 2022-04-18 14:08:49
*/
import { App } from "vue";
import Dialog from "./Dialog/Dialog.vue";
import SvgIcon from "./SvgIcon/SvgIcon";
import Page from "./Page/Page.vue";
import Editor from "./Editor/Editor.vue";
import Chart from "./Charts/Chart";
import Table from "./Table/Table";
import MarkDown from "./MarkDown/MarkDown"
import QrCode from "./QrCode/QrCode"
import Link from "./Link/Link"
// 注册组件后注册组件Props声明
// src > types > components > components.d.ts
export default (app: App) => {
app.component("g-dialog", Dialog);
app.component("g-svg-icon", SvgIcon);
app.component("g-page", Page);
app.component("g-editor", Editor);
app.component("g-chart", Chart);
app.component("g-table", Table);
app.component("g-mark-down", MarkDown)
app.component("g-qr-code", QrCode)
app.component("g-link", Link)
};
|
import ApiMap from "@/apimap";
function _promise(apiFunction, payload, store, constant) {
return new Promise((resolve, reject) => {
apiFunction(payload)
.then(resp => {
store.commit(constant, resp.data);
resolve(resp);
})
.catch(err => {
reject(err);
});
});
}
export default {
}; |
<reponame>lsst-sqre/mobu<filename>src/mobu/monkey.py
"""The monkey."""
from __future__ import annotations
import logging
import sys
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING
import structlog
from aiohttp import ClientSession
from aiojobs import Scheduler
from .config import config
from .exceptions import SlackError
from .models.monkey import MonkeyData, MonkeyState
from .slack import SlackClient
if TYPE_CHECKING:
from typing import Optional, Type
from aiojobs._job import Job
from .business.base import Business
from .models.monkey import MonkeyConfig
from .models.user import AuthenticatedUser
__all__ = ["Monkey"]
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
class Monkey:
"""Runs one business and manages its log and configuration."""
def __init__(
self,
monkey_config: MonkeyConfig,
business_type: Type[Business],
user: AuthenticatedUser,
session: ClientSession,
):
self.config = monkey_config
self.name = monkey_config.name
self.state = MonkeyState.IDLE
self.user = user
self.restart = monkey_config.restart
self._session = session
self._logfile = NamedTemporaryFile()
self._job: Optional[Job] = None
formatter = logging.Formatter(
fmt="%(asctime)s %(message)s", datefmt=DATE_FORMAT
)
fileHandler = logging.FileHandler(self._logfile.name)
fileHandler.setFormatter(formatter)
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(formatter)
logger = logging.getLogger(self.name)
logger.handlers.clear()
logger.setLevel(logging.INFO)
logger.addHandler(fileHandler)
logger.addHandler(streamHandler)
logger.propagate = False
logger.info(f"Starting new file logger {self._logfile.name}")
self.log = structlog.wrap_logger(logger)
self._slack = None
if config.alert_hook and config.alert_hook != "None":
self._slack = SlackClient(config.alert_hook, session, self.log)
self.business = business_type(self.log, self.config.options, self.user)
async def alert(self, e: Exception) -> None:
if self.state in (MonkeyState.STOPPING, MonkeyState.FINISHED):
state = self.state.name
self.log.info(f"Not sending alert because state is {state}")
return
if not self._slack:
self.log.info("Alert hook isn't set, so not sending to Slack")
return
if isinstance(e, SlackError):
await self._slack.alert_from_exception(e)
else:
msg = f"Unexpected exception {type(e).__name__}: {str(e)}"
await self._slack.alert(self.user.username, msg)
def logfile(self) -> str:
self._logfile.flush()
return self._logfile.name
async def start(self, scheduler: Scheduler) -> None:
self._job = await scheduler.spawn(self._runner())
async def _runner(self) -> None:
run = True
while run:
try:
self.state = MonkeyState.RUNNING
await self.business.run()
run = False
except Exception as e:
self.log.exception(
"Exception thrown while doing monkey business"
)
await self.alert(e)
run = self.restart and self.state == MonkeyState.RUNNING
if self.state == MonkeyState.RUNNING:
self.state = MonkeyState.ERROR
if run:
await self.business.error_idle()
if self.state == MonkeyState.STOPPING:
self.log.info("Shutting down monkey")
run = False
else:
self.log.info("Shutting down monkey")
await self.business.close()
self.state = MonkeyState.FINISHED
async def stop(self) -> None:
if self.state == MonkeyState.FINISHED:
return
elif self.state in (MonkeyState.RUNNING, MonkeyState.ERROR):
self.state = MonkeyState.STOPPING
await self.business.stop()
if self._job:
await self._job.wait()
self.state = MonkeyState.FINISHED
def dump(self) -> MonkeyData:
return MonkeyData(
name=self.name,
business=self.business.dump(),
restart=self.restart,
state=self.state,
user=self.user,
)
|
#!/bin/bash
pidfile=$1
pwd=$(pwd)
service=${pwd##*/}
if [ ! -f "$pidfile" ]
then
echo "[$service] Service is not running - $pidfile"
exit 0
fi
echo "[$service] Stopping Service - $pidfile. PID=$(cat $pidfile)"
kill -9 $(cat $pidfile)
rm $pidfile
|
package io.opensphere.core.util.lang;
/**
* Runtime exception that indicates an unintended branch of code has been
* reached.
*/
public class ImpossibleException extends RuntimeException
{
/**
* Default serial version UID.
*/
private static final long serialVersionUID = 1L;
/**
* Construct the exception.
*
* @param throwable The causing {@link Throwable}.
*/
public ImpossibleException(Throwable throwable)
{
super(throwable);
}
}
|
protoc --proto_path=../../MessageDefinition/ --python_out=./ ../../MessageDefinition/gait_recorder_message.proto |
#!/bin/sh
rm -rf /var/log/spinnaker/front50
|
<gh_stars>0
package de.arthurpicht.tail4j.modules.tail;
import de.arthurpicht.tail4j.Logger;
import de.arthurpicht.tail4j.Tail;
import de.arthurpicht.tail4j.helper.LogFileCreator;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.fail;
/**
* <NAME>, Düsseldorf, 16.05.18.
*/
public class TailTest {
private static final String LOGFILE_NAME = "tail4Jtemp.log";
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Test
public void test1() {
try {
File tempFile = this.folder.newFile(LOGFILE_NAME);
Logger.debug("tempFile: " + tempFile.getAbsolutePath());
Thread logFileCreatorThread = this.startLogFileCreator(tempFile);
CounterTestLogstatementProcessor counterTestLogstatementProcessor = new CounterTestLogstatementProcessor();
Tail tail = new Tail(tempFile, 10, counterTestLogstatementProcessor);
while(logFileCreatorThread.isAlive() && counterTestLogstatementProcessor.hasSuccess()) {
tail.visit();
Thread.sleep(500);
}
tail.visit();
if (!counterTestLogstatementProcessor.hasSuccess()) {
fail("Could not process expected log statements.");
}
assertEquals("Could not process all expected log statements. Some are missing.", 101, counterTestLogstatementProcessor.getCounter());
} catch (IOException | InterruptedException e) {
Logger.info(e.getMessage());
fail(e.getMessage());
}
}
private Thread startLogFileCreator(File tempFile) throws IOException {
Thread thread = new Thread(new LogFileCreator(tempFile));
thread.start();
return thread;
}
}
|
class JsonModel {
public title: string;
public jsonPath: string;
constructor(title = '', jsonPath = '') {
this.title = title;
this.jsonPath = jsonPath;
}
}
export default class {
private jsonModel: JsonModel;
constructor(type: string) {
this.jsonModel = this.getQuestionList(type);
}
private getQuestionList(type: string): JsonModel {
switch (type) {
case 'ebc':
return new JsonModel('EBC', 'ebc.json');
case 'erp':
return new JsonModel('ERP', 'erp.json');
case 'mkp':
return new JsonModel('MKP', 'mkp.json');
case 'bulldozer':
return new JsonModel('推土機', 'bulldozer.json');
}
return new JsonModel();
}
public get title(): string {
return this.jsonModel.title;
}
public get jsonPath(): string {
return this.jsonModel.jsonPath;
}
}
|
#!/usr/bin/bash
# # ------------------- 1-5 Var -------------------
python dataset.py \
--config ./configs/dataset_1-5var.json \
--folder ./datasets/1-5Var_v2/Test/ \
--seed 2023 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 200
python dataset.py \
--config ./configs/dataset_1-5var.json \
--folder ./datasets/1-5Var_v2/Val/ \
--seed 2022 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 200
# Note: Original settings (numSamples=10000,numSamplesEachEq=50) should produce
# 2,500,000 samples (10000*50*5) but due to bugs the actual number was 2,262,058.
# Now we make 2,000,000 samples to be affordable for 32GB memory.
python dataset.py \
--config ./configs/dataset_1-5var.json \
--folder ./datasets/1-5Var_v2/Train/ \
--seed 2021 \
--testPoints 0 \
--numSamplesEachEq 20 \
--force_threshold 1 \
--numSamples 20000
# ------------------- 1 Var -------------------
python dataset.py \
--config ./configs/dataset_1var.json \
--folder ./datasets/1Var_v2/Test/ \
--seed 2023 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 1000
python dataset.py \
--config ./configs/dataset_1var.json \
--folder ./datasets/1Var_v2/Val/ \
--seed 2022 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 1000
python dataset.py \
--config ./configs/dataset_1var.json \
--folder ./datasets/1Var_v2/Train/ \
--seed 2021 \
--testPoints 0 \
--force_threshold 1
# ------------------- 2 Var -------------------
python dataset.py \
--config ./configs/dataset_2var.json \
--folder ./datasets/2Var_v2/Test/ \
--seed 2023 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 500
python dataset.py \
--config ./configs/dataset_2var.json \
--folder ./datasets/2Var_v2/Val/ \
--seed 2022 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 500
python dataset.py \
--config ./configs/dataset_2var.json \
--folder ./datasets/2Var_v2/Train/ \
--seed 2021 \
--testPoints 0 \
--force_threshold 1
# ------------------- 3 Var -------------------
python dataset.py \
--config ./configs/dataset_3var.json \
--folder ./datasets/3Var_v2/Test/ \
--seed 2023 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 333
python dataset.py \
--config ./configs/dataset_3var.json \
--folder ./datasets/3Var_v2/Val/ \
--seed 2022 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 333
python dataset.py \
--config ./configs/dataset_3var.json \
--folder ./datasets/3Var_v2/Train/ \
--seed 2021 \
--testPoints 0 \
--force_threshold 1
# ------------------- 1-9 Var -------------------
python dataset.py \
--config ./configs/dataset_1-9var.json \
--folder ./datasets/1-9Var_v2/Test/ \
--seed 2023 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 111
python dataset.py \
--config ./configs/dataset_1-9var.json \
--folder ./datasets/1-9Var_v2/Val/ \
--seed 2022 \
--testPoints 1 \
--numSamplesEachEq 1 \
--force_threshold 0 \
--numSamples 111
# Note: we make 2,250,000 samples.
python dataset.py \
--config ./configs/dataset_1-9var.json \
--folder ./datasets/1-9Var_v2/Train/ \
--seed 2021 \
--testPoints 0 \
--numSamplesEachEq 25 \
--force_threshold 1 \
--numSamples 10000
|
#!/bin/bash
export PATH="$HOME/.composer/vendor/bin:$PATH"
if [ "$PHPCS" == 1 ]; then
ARGS="-p --extensions=php --standard=CakePHP .";
if [ -n "$PHPCS_IGNORE" ]; then
ARGS="$ARGS --ignore='$PHPCS_IGNORE'"
fi
if [ -n "$PHPCS_ARGS" ]; then
ARGS="$PHPCS_ARGS"
fi
eval "phpcs" $ARGS
exit $?
fi
# Move to APP
if [ -d ../cakephp/app ]; then
cd ../cakephp/app
fi
EXIT_CODE=0
if [ "$COVERALLS" == 1 ]; then
./Console/cake test $PLUGIN_NAME All$PLUGIN_NAME --stderr --coverage-clover build/logs/clover.xml
EXIT_CODE="$?"
elif [ -z "$FOC_VALIDATE" ]; then
./Console/cake test $PLUGIN_NAME All$PLUGIN_NAME --stderr
EXIT_CODE="$?"
fi
if [ "$EXIT_CODE" -gt 0 ]; then
exit 1
fi
exit 0
|
// Linear Search Algorithm
const linearSearch = (arr, targetValue) => {
// Iterate over the array
for (let i = 0; i < arr.length; i++) {
// check if the target value is equal to the element in the array
if (arr[i] === targetValue) {
// if it is, return the index
return i;
}
}
// else return false
return false;
}
const arr = [6, 3, 9, 7];
const number = 7;
const index = linearSearch(arr, number);
console.log(index); // Output: 3 |
<reponame>blockchainhelppro/Telegram-API-Integration-
/// <reference path="../common/models.ts" />
/// <reference path="config.ts" />
/// <reference path="utils.ts" />
///<reference path="interfaces.ts"/>
import * as moment from "moment";
import Config = require("./config");
import Models = require("../common/models");
import Utils = require("./utils");
import Interfaces = require("./interfaces");
class QuoteOrder {
constructor(public quote: Models.Quote, public orderId: string) { }
}
// aggregator for quoting
export class Quoter {
private _bidQuoter: ExchangeQuoter;
private _askQuoter: ExchangeQuoter;
constructor(broker: Interfaces.IOrderBroker,
exchBroker: Interfaces.IBroker) {
this._bidQuoter = new ExchangeQuoter(broker, exchBroker, Models.Side.Bid);
this._askQuoter = new ExchangeQuoter(broker, exchBroker, Models.Side.Ask);
}
public updateQuote = (q: Models.Timestamped<Models.Quote>, side: Models.Side): Models.QuoteSent => {
switch (side) {
case Models.Side.Ask:
return this._askQuoter.updateQuote(q);
case Models.Side.Bid:
return this._bidQuoter.updateQuote(q);
}
};
public cancelQuote = (s: Models.Timestamped<Models.Side>): Models.QuoteSent => {
switch (s.data) {
case Models.Side.Ask:
return this._askQuoter.cancelQuote(s.time);
case Models.Side.Bid:
return this._bidQuoter.cancelQuote(s.time);
}
};
public quotesSent = (s: Models.Side) => {
switch (s) {
case Models.Side.Ask:
return this._askQuoter.quotesSent;
case Models.Side.Bid:
return this._bidQuoter.quotesSent;
}
};
}
// wraps a single broker to make orders behave like quotes
export class ExchangeQuoter {
private _activeQuote: QuoteOrder = null;
private _exchange: Models.Exchange;
public quotesSent: QuoteOrder[] = [];
constructor(private _broker: Interfaces.IOrderBroker,
private _exchBroker: Interfaces.IBroker,
private _side: Models.Side) {
this._exchange = _exchBroker.exchange();
this._broker.OrderUpdate.on(this.handleOrderUpdate);
}
private handleOrderUpdate = (o: Models.OrderStatusReport) => {
switch (o.orderStatus) {
case Models.OrderStatus.Cancelled:
case Models.OrderStatus.Complete:
case Models.OrderStatus.Rejected:
const bySide = this._activeQuote;
if (bySide !== null && bySide.orderId === o.orderId) {
this._activeQuote = null;
}
this.quotesSent = this.quotesSent.filter(q => q.orderId !== o.orderId);
}
};
public updateQuote = (q: Models.Timestamped<Models.Quote>): Models.QuoteSent => {
if (this._exchBroker.connectStatus !== Models.ConnectivityStatus.Connected)
return Models.QuoteSent.UnableToSend;
if (this._activeQuote !== null) {
return this.modify(q);
}
return this.start(q);
};
public cancelQuote = (t: Date): Models.QuoteSent => {
if (this._exchBroker.connectStatus !== Models.ConnectivityStatus.Connected)
return Models.QuoteSent.UnableToSend;
return this.stop(t);
};
private modify = (q: Models.Timestamped<Models.Quote>): Models.QuoteSent => {
this.stop(q.time);
this.start(q);
return Models.QuoteSent.Modify;
};
private start = (q: Models.Timestamped<Models.Quote>): Models.QuoteSent => {
const existing = this._activeQuote;
const newOrder = new Models.SubmitNewOrder(this._side, q.data.size, Models.OrderType.Limit,
q.data.price, Models.TimeInForce.GTC, this._exchange, q.time, true, Models.OrderSource.Quote);
const sent = this._broker.sendOrder(newOrder);
const quoteOrder = new QuoteOrder(q.data, sent.sentOrderClientId);
this.quotesSent.push(quoteOrder);
this._activeQuote = quoteOrder;
return Models.QuoteSent.First;
};
private stop = (t: Date): Models.QuoteSent => {
if (this._activeQuote === null) {
return Models.QuoteSent.UnsentDelete;
}
const cxl = new Models.OrderCancel(this._activeQuote.orderId, this._exchange, t);
this._broker.cancelOrder(cxl);
this._activeQuote = null;
return Models.QuoteSent.Delete;
};
} |
using System;
using System.Collections.Generic;
public class RegisterDecoder
{
private static Dictionary<uint, string> GprRegisters = new Dictionary<uint, string>();
static RegisterDecoder()
{
GprRegisters.TryAdd(24, "$t8");
GprRegisters.TryAdd(25, "$t9");
GprRegisters.TryAdd(26, "$k0");
GprRegisters.TryAdd(27, "$k1");
GprRegisters.TryAdd(28, "$gp");
GprRegisters.TryAdd(29, "$sp");
GprRegisters.TryAdd(30, "$fp");
GprRegisters.TryAdd(31, "$ra");
}
public static string DecodeRegister(uint registerNumber)
{
if (GprRegisters.ContainsKey(registerNumber))
{
return GprRegisters[registerNumber];
}
else
{
return "UnknownRegister";
}
}
public static void Main()
{
// Test cases
Console.WriteLine(DecodeRegister(25)); // Output: $t9
Console.WriteLine(DecodeRegister(28)); // Output: $gp
Console.WriteLine(DecodeRegister(32)); // Output: UnknownRegister
}
} |
<gh_stars>1-10
/*******************************************************************************
* Copyright (c) 2008, 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* SAP AG - initial API and implementation
*******************************************************************************/
package org.eclipse.mat.hprof.extension;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.snapshot.ISnapshot;
/**
* Using this interface an implementor can provide additional information to a HPROF snapshot
*
* See the documentation on the org.eclipse.mat.hprof.enhancer extension point
*
*/
public interface IRuntimeEnhancer {
/**
* Returns addon information of certain type if such information is attached to the HPROF snapshot
*
* @param <A>
* The type of the additional information
* @param snapshot
* @param addon
* the class of the required extra information
* @return the extra information, or null
* @throws SnapshotException
*/
<A> A getAddon(ISnapshot snapshot, Class<A> addon) throws SnapshotException;
}
|
SELECT
name,
COUNT(*) AS num_occurrences
FROM users
GROUP BY name
ORDER BY num_occurrences DESC
LIMIT 1; |
go build -buildmode=c-shared -o output/mesos-master-zabbix-module.so
|
#!/bin/bash
# SPDX-license-identifier: Apache-2.0
##############################################################################
# Copyright (c) 2020
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
set -o pipefail
set -o xtrace
set -o errexit
set -o nounset
echo 'start... vpp'
/usr/bin/vpp -c /etc/vpp/startup.conf
echo 'wait vpp be up ...'
until vppctl show ver; do
sleep 1;
done
# Configure VPP for vPacketGenerator
nic=eth0
ip_addr=$(ip addr show $nic | grep inet | awk '{print $2}')
vppctl create host-interface name "$nic"
vppctl set int state "host-$nic" up
vppctl set int ip address "host-$nic" "$ip_addr"
vppctl ip route add "$PROTECTED_NET_CIDR" via "$FW_IPADDR"
vppctl loop create
vppctl set int ip address loop0 11.22.33.1/24
vppctl set int state loop0 up
# Install packet streams
for i in $(seq 1 10); do
cat <<EOL > "/opt/pg_streams/stream_fw_udp"
packet-generator new {
name fw_udp$i
rate 10
node ip4-input
size 64-64
no-recycle
interface loop0
data {
UDP: ${ip_addr%/*} -> $SINK_IPADDR
UDP: 15320 -> 8080
length 128 checksum 0 incrementing 1
}
}
EOL
vppctl exec "/opt/pg_streams/stream_fw_udp"
done
vppctl packet-generator enable
# Start HoneyComb
/opt/honeycomb/honeycomb
|
<gh_stars>0
package main
import (
"fmt"
"time"
)
type Sugar struct {
Date time.Time
Record float64
Unit string
}
type SpO2 struct {
Date time.Time
Record float64
Unit string
}
type Preassure struct {
Date time.Time
Record float64
Unit string
}
type HR struct {
Date time.Time
Record float64
Unit string
}
func main() {
fmt.Println("Add data...")
} |
<filename>lang/py/cookbook/v2/source/cb2_19_4_sol_1.py
def peel(iterable, arg_cnt=1):
""" Yield each of the first arg_cnt items of the iterable, then
finally an iterator for the rest of the iterable. """
iterator = iter(iterable)
for num in xrange(arg_cnt):
yield iterator.next()
yield iterator
if __name__ == '__main__':
t5 = range(1, 6)
a, b, c = peel(t5, 2)
print a, b, list(c)
# emits: 1 2 [3, 4, 5]
|
function mostFrequentWord(str) {
const words = str.split(' ');
let count = {};
words.forEach(word => {
if (count[word]) {
count[word]++;
} else {
count[word] = 1;
}
});
let mostFrequentWord = '';
let highestFrequency = 0;
Object.keys(count).forEach(word => {
if (count[word] > highestFrequency) {
mostFrequentWord = word;
highestFrequency = count[word];
}
});
return mostFrequentWord;
} |
import numpy as np
def listify_mat(matrix):
matrix = np.array(matrix).astype(str)
if len(matrix.shape) > 1:
matrix_list = []
for row in matrix:
try:
matrix_list.append(list(row))
except Exception as e:
print(f"An exception occurred: {e}")
# Handle the exception by skipping the row or taking alternative action
# Example: matrix_list.append(['Error'] * len(row)) # Replace the row with a list of 'Error' values
return matrix_list
else:
raise ValueError("Input is not a 2D matrix") |
#!/usr/bin/env bash
set -eux
version=$1
coursier fetch \
org.scoverage:scalac-scoverage-plugin_2.11.12:$version \
org.scoverage:scalac-scoverage-plugin_2.12.8:$version \
org.scoverage:scalac-scoverage-plugin_2.12.9:$version \
org.scoverage:scalac-scoverage-plugin_2.12.10:$version \
org.scoverage:scalac-scoverage-plugin_2.12.11:$version \
org.scoverage:scalac-scoverage-plugin_2.12.12:$version \
org.scoverage:scalac-scoverage-plugin_2.12.13:$version \
org.scoverage:scalac-scoverage-plugin_2.12.14:$version \
org.scoverage:scalac-scoverage-plugin_2.12.15:$version \
org.scoverage:scalac-scoverage-plugin_2.13.0:$version \
org.scoverage:scalac-scoverage-plugin_2.13.1:$version \
org.scoverage:scalac-scoverage-plugin_2.13.2:$version \
org.scoverage:scalac-scoverage-plugin_2.13.3:$version \
org.scoverage:scalac-scoverage-plugin_2.13.4:$version \
org.scoverage:scalac-scoverage-plugin_2.13.5:$version \
org.scoverage:scalac-scoverage-plugin_2.13.5:$version \
org.scoverage:scalac-scoverage-plugin_2.13.6:$version \
org.scoverage:scalac-scoverage-plugin_2.13.7:$version \
org.scoverage:scalac-scoverage-runtime_2.11:$version \
org.scoverage:scalac-scoverage-runtime_2.12:$version \
org.scoverage:scalac-scoverage-runtime_2.13:$version \
org.scoverage:scalac-scoverage-runtime_sjs1_2.11:$version \
org.scoverage:scalac-scoverage-runtime_sjs1_2.12:$version \
org.scoverage:scalac-scoverage-runtime_sjs1_2.13:$version \
|
<reponame>forrl/typescript-error-reporter-action
import type { Diagnostic } from 'typescript'
import { issueCommand } from '@actions/core/lib/command'
type TS = typeof import('typescript')
export const reporter = (ts:TS) => (diagnostic:Diagnostic) => {
switch (diagnostic.category) {
case ts.DiagnosticCategory.Error: {
return issueCommand('error', readProperties(diagnostic), ts.flattenDiagnosticMessageText(diagnostic.messageText, '\n'))
}
case ts.DiagnosticCategory.Warning: {
return issueCommand('warning', readProperties(diagnostic), ts.flattenDiagnosticMessageText(diagnostic.messageText, '\n'))
}
}
}
export const readProperties = ({ start, file }:Diagnostic) => {
const fileName = file && file.fileName
if (!fileName) return {}
if (!start) return { file: fileName }
const content = file!.getFullText()
const { line, column } = parseLocation(content, start)
return { file: fileName, line: `${line}`, col: `${column}` }
}
export const parseLocation = (content:string, position:number) => {
let l = 1
let c = 0
for (let i = 0; i < content.length && i < position; i++) {
const cc = content[i]
if (cc === '\n') {
c = 0
l++
} else {
c++
}
}
return { line: l, column: c };
}
|
aws configure
|
<gh_stars>0
var express = require('express');
var router = express.Router();
var User = require('../models/user');
/* GET home page. */
function checkSigninUser(req, res, next) {
if (req.session.user) {
next();
} else {
res.render('index', {
message: "User not logged in!"
});;
}
}
router.get('/', checkSigninUser, function (req, res, next) {
User.find({}).then(users => {
res.render('home');
}).catch(err => {
res.json({
success: false,
message: "No user found!",
error: err
});
});
});
router.get('/logout', (req, res) => {
req.session.destroy();
res.redirect('/');
});
module.exports = router; |
public static int[] rotateArray(int[] arr) {
int temp = arr[0];
int i;
for (i = 0; i < arr.length - 1; i++)
arr[i] = arr[i + 1];
arr[i] = temp;
return arr;
} |
package serenitylabs.tutorials.trains.search;
import com.serenity.annotation.processor.StepData;
import net.serenitybdd.screenplay.Actor;
import net.serenitybdd.screenplay.Interaction;
import net.serenitybdd.screenplay.actions.Click;
import net.serenitybdd.screenplay.targets.Target;
import net.serenitybdd.screenplay.waits.WaitUntil;
import net.thucydides.core.annotations.Step;
import static com.serenity.annotation.processor.StepOrder.*;
import static net.serenitybdd.screenplay.matchers.WebElementStateMatchers.isVisible;
class EnterDate implements Interaction {
@StepData(value = "of", order = First)
DepartureDay departureDay;
@StepData("into")
Target dateField;
@Override
@Step("{0} selects day #departureDay")
public <T extends Actor> void performAs(T actor) {
actor.attemptsTo(
Click.on(dateField),
WaitUntil.the(DatePicker.DATE_SELECTION_DONE, isVisible()),
Click.on(DatePicker.DAY.of(departureDay.getDepartureDay())),
Click.on(DatePicker.DATE_SELECTION_DONE)
);
}
}
|
<reponame>dbatten5/dagster
from dagster.core.execution.plan.handle import (
ResolvedFromDynamicStepHandle,
StepHandle,
UnresolvedStepHandle,
)
def test_step_handles():
plain = StepHandle.parse_from_key("foo")
assert isinstance(plain, StepHandle)
unresolved = StepHandle.parse_from_key("foo[?]")
assert isinstance(unresolved, UnresolvedStepHandle)
resolved = StepHandle.parse_from_key("foo[bar]")
assert isinstance(resolved, ResolvedFromDynamicStepHandle)
assert unresolved.resolve("bar") == resolved
assert resolved.unresolved_form == unresolved
|
#!/bin/bash
# If an error occurs, quit the script and inform the user. This ensures scripts
# like ./build-macosx etc. don't continue on if Autotools isn't installed.
function finish {
if [ $success -eq 0 ]; then
echo 'autogen.sh failed to complete: verify that GNU Autotools is installed on the system and try again'
fi
}
success=0
trap finish EXIT
set -e
echo "Generating build information using aclocal, autoheader, automake and autoconf"
echo "This may take a while ..."
# Regenerate configuration files.
aclocal
autoheader
automake --include-deps --add-missing --copy
autoconf
echo "Now you are ready to run ./configure."
echo "You can also run ./configure --help for extra features to enable/disable."
# Don't quit on errors again from here on out (for calling scripts).
set +e
success=1
|
package info.u250.c2d.engine.cmd;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics.DisplayMode;
import info.u250.c2d.engine.Engine;
import info.u250.c2d.engine.EngineCallback;
import info.u250.c2d.engine.resources.AliasResourceManager;
import info.u250.c2d.engine.resources.looper.LoopLoader;
import java.io.File;
import java.net.URL;
import java.security.CodeSource;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* @author xjjdog
*/
public class JarExportableCmd {
/**
* This command only run on desktop . Add this , you can export the project
* into a jar to run it
*/
public static final void process() {
final EngineCallback callback = Engine.getEngineCallback();
Engine.setEngineCallback(new EngineCallback() {
@Override
public void preLoad(DisplayMode mode, String[] assets) {
//the sort is important , do not change it
Engine.getAliasResourceManager().setLoopLoader(new LoopLoaderJar());
callback.preLoad(mode, assets);
}
@Override
public void postLoad() {
callback.postLoad();
}
});
}
}
class LoopLoaderJar extends LoopLoader {
@Override
public void loadResource(String dataDir) {
if (runningFromJar()) {
loadJars(dataDir);
} else {
loadDesktop(dataDir);
}
}
private void loadJars(String dataDir) {
try {
CodeSource src = AliasResourceManager.class.getProtectionDomain().getCodeSource();
if (src != null) {
URL jar = src.getLocation();
ZipInputStream zip = new ZipInputStream(jar.openStream());
ZipEntry zipEntry = null;
while ((zipEntry = zip.getNextEntry()) != null) {
String name = zipEntry.getName();
if (name.startsWith(dataDir)) {
if (!zipEntry.isDirectory()) {
loadFile(Gdx.files.internal(name));
}
}
}
zip.close();
} else {
/* Fail... */
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void loadDesktop(String dataDir) {
File file = new File("bin/" + dataDir);
if (file.isDirectory()) {
for (String f : file.list()) {
loadDesktop((dataDir.endsWith("/") ? dataDir : (dataDir + "/")) + f);
}
} else {
loadFile(Gdx.files.internal(dataDir));
}
}
private boolean runningFromJar() {
String className = this.getClass().getName().replace('.', '/');
String classJar = this.getClass().getResource("/" + className + ".class").toString();
if (classJar.startsWith("jar:")) {
return true;
}
return false;
}
} |
<reponame>sliwei/blog-serve
/* jshint indent: 2 */
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
return sequelize.define('bstu_blog', {
id: {
type: DataTypes.INTEGER(11),
allowNull: false,
primaryKey: true,
autoIncrement: true
},
u_id: {
type: DataTypes.INTEGER(11),
allowNull: true
},
title: {
type: DataTypes.STRING(200),
allowNull: true
},
content: {
type: DataTypes.TEXT,
allowNull: true
},
is_draft: {
type: DataTypes.INTEGER(1),
allowNull: true,
defaultValue: '0'
},
is_del: {
type: DataTypes.INTEGER(1),
allowNull: true,
defaultValue: '0'
},
good: {
type: DataTypes.INTEGER(11),
allowNull: true,
defaultValue: '0'
},
bad: {
type: DataTypes.INTEGER(11),
allowNull: true,
defaultValue: '0'
},
time: {
type: DataTypes.DATE,
get() {
return moment(this.getDataValue('time')).format('YYYY/MM/DD HH:mm:ss');
},
allowNull: true
},
img: {
type: DataTypes.STRING(255),
allowNull: true
},
is_evaluate: {
type: DataTypes.INTEGER(1),
allowNull: false,
defaultValue: '0'
},
markdown: {
type: DataTypes.TEXT,
allowNull: true
},
create_time: {
type: DataTypes.DATE,
get() {
return moment(this.getDataValue('create_time')).format('YYYY/MM/DD HH:mm:ss');
},
allowNull: false,
defaultValue: sequelize.literal('CURRENT_TIMESTAMP')
},
code: {
type: DataTypes.CHAR(10),
allowNull: true
},
category_id: {
type: DataTypes.INTEGER(11),
allowNull: true
}
}, {
tableName: 'bstu_blog',
});
};
|
const { parse } = require('./marked');
const YFM = require('yaml-front-matter');
const angularNonBindAble = require('./angular-nonbindable');
module.exports = function parseDemoMd(file) {
// 获取meta信息
const meta = YFM.loadFront(file);
const content = meta.__content;
delete meta.__content;
const remark = require('remark')();
const ast = remark.parse(content);
// 分离中英文
let isAfterENHeading = false;
let zhPart = '';
let enPart = '';
for (let i = 0; i < ast.children.length; i++) {
const child = ast.children[i];
if (child.type === 'heading' && child.depth === 2 && child.children[0].value === 'en-US') {
isAfterENHeading = true;
}
if (!(child.type === 'heading' && child.depth === 2)) {
if (!isAfterENHeading) {
zhPart += parse(remark.stringify(child));
} else {
enPart += parse(remark.stringify(child));
}
}
}
return {
meta: meta,
zh : angularNonBindAble(zhPart),
en : angularNonBindAble(enPart)
};
}; |
package com.github.goober.sonarqube.plugin.decorator.sonarqube.model;
import lombok.RequiredArgsConstructor;
import lombok.Value;
import java.util.Set;
@Value
@RequiredArgsConstructor
public class MetricsResponse {
Set<Metric> metrics;
}
|
public class Formatter {
public String formatCsvLine(String csvLine) {
String[] tokens = csvLine.split("; ");
StringBuilder builder = new StringBuilder();
for (int i = 0; i < tokens.length; i++) {
builder.append('"');
builder.append(tokens[i]);
builder.append('"');
if (i + 1 < tokens.length) {
builder.append(", ");
}
}
return builder.toString();
}
} |
#!/usr/bin/env bash
TMP_DIR="$HOME/.dotfiles_tmp_install"
mkdir -p "$TMP_DIR"
cd "$TMP_DIR" || exit 1
M4_VERSION=1.4.19
wget -nc "https://ftp.gnu.org/gnu/m4/m4-$M4_VERSION.tar.gz"
tar xf "m4-$M4_VERSION.tar.gz"
cd "m4-$M4_VERSION" || exit 1
./configure --prefix=$HOME/.local
make install
|
#!/usr/bin/env sh
# Prepare environment
find /tmp/pineapple/* ! -name '*.tar.gz' 2>/dev/null | sort -n -r | xargs rm -rf --
mkdir -p /tmp/pineapple && cd /tmp/pineapple
#Define the functions
makealias() {
ryualias='alias ryujinx="'$arg' GDK_BACKEND=x11 /home/'${USER}'/.local/share/Ryujinx/Ryujinx"'
if [ -z "${SHELL##*zsh*}" ]; then
printf "Detected shell: ZSH\n"
FILE="/home/${USER}/.zshrc"
elif [ -z "${SHELL##*bash*}" ]; then
printf "Detected shell: BASH\n"
FILE="/home/${USER}/.bashrc"
else
printf "Unsupported shell, no alias will be created!\n"
return 1
fi
if [ -f $FILE ]; then
sed -i '/alias ryujinx/d' $FILE
echo $ryualias >> $FILE
else
printf "$FILE does not exist, creating new file..."
echo $ryualias > $FILE
fi
printf "Alias created successfully, use the command ryujinx the next time you open your terminal.\n"
}
removealias() {
if [ -z "${SHELL##*zsh*}" ]; then
FILE="/home/${USER}/.zshrc"
elif [ -z "${SHELL##*bash*}" ]; then
FILE="/home/${USER}/.bashrc"
else
return 1
fi
sed -i '/alias ryujinx/d' $FILE
}
install () {
jobid=$(curl -s https://ci.appveyor.com/api/projects/gdkchan/ryujinx/branch/master | grep -Po '"jobId":.*?[^\\]",' |sed 's/"jobId":"\(.*\)",/\1/' )
printf "Downloading $version...\n"
curl -L "https://ci.appveyor.com/api/buildjobs/${jobid}/artifacts/ryujinx-${version}-linux_x64.tar.gz" > ryujinx-${version}-linux_x64.tar.gz
tar -xf ryujinx-${version}-linux_x64.tar.gz
arch_dir=$(tar --exclude='*/*' -tf ryujinx-${version}-linux_x64.tar.gz)
if [ -d "$arch_dir" ]; then
printf "Extraction successful!\n"
mkdir -p /home/${USER}/.local/share/Ryujinx
cp -a $arch_dir/. /home/${USER}/.local/share/Ryujinx
else
printf "Extraction failed!\nAborting...\n"
exit
fi
curl -L "https://raw.githubusercontent.com/edisionnano/Pine-jinx/main/Ryujinx.desktop" > Ryujinx.desktop
curl -L "https://raw.githubusercontent.com/edisionnano/Pine-jinx/main/Ryujinx.png" > Ryujinx.png
curl -L "https://raw.githubusercontent.com/edisionnano/Pine-jinx/main/Ryujinx.xml" > Ryujinx.xml
if ! [ "$(command -v gamemoderun)" ]; then
printf "Warning:Gamemode not found!\nIf you want to use it you'll have to install it.\n"
printf "\e[91m$(tput bold)This means that if you choose Y you will have to install it manually yourself (sudo pacman -Syu gamemode on arch)!\e[0m\n"
fi
printf "Gamemode is a tool that improves performance on non custom kernels.\n"
read -p "Do you want to use it? [y/N]: " gamemode
if [ "$gamemode" = "y" ] || [ "$gamemode" = "Y" ]; then
arg1="gamemoderun "
else
arg1=""
fi
read -p "Optimize Ryujinx for 1)Nvidia 2)Intel and AMD 3)None: " gpuopt
if [ "$gpuopt" = "1" ]; then
arg2='env __GL_THREADED_OPTIMIZATIONS=0 __GL_SYNC_TO_VBLANK=0 '
elif [ "$gpuopt" = "2" ]; then
arg2="env AMD_DEBUG=w32ge,w32ps,w32cs,nohyperz,nofmask glsl_zero_init=true radeonsi_clamp_div_by_zero=true force_integer_tex_nearest=true mesa_glthread=false vblank_mode=0 "
printf "MESA_NO_ERROR can give performance boosts in games like Monster Hunter Rise and Animal Crossing but potentially crash others like Splaton 2 and Mario Odyssey\n"
read -p "Do you want to use it? [y/N]: " mesanoerror
if [ "$mesanoerror" = "y" ] || [ "$mesanoerror" = "Y" ]; then
arg3="MESA_NO_ERROR=1 "
else
arg3=""
fi
else
arg2=''
fi
arg="$arg2$arg3$arg1"
#Desktop entries do not accept relative paths so the user's name must be in the file
sed -i "s/dummy/${USER}/g" Ryujinx.desktop
#Append any optimizations
sed -i "s/^Exec=/Exec=${arg}/" Ryujinx.desktop
#Place desktop entry
mkdir -p /home/${USER}/.local/share/applications && cp Ryujinx.desktop /home/${USER}/.local/share/applications
#Place icon
mkdir -p /home/${USER}/.local/share/icons && cp Ryujinx.png /home/${USER}/.local/share/icons
#Place mime entry
mkdir -p /home/${USER}/.local/share/mime/packages && cp Ryujinx.xml /home/${USER}/.local/share/mime/packages
#Set the rights
chmod +x /home/${USER}/.local/share/Ryujinx/Ryujinx
#Update the MIME database
update-mime-database /home/${USER}/.local/share/mime
#Update the application database
update-desktop-database /home/${USER}/.local/share/applications
read -p "Do you want PineJinx to setup an alias for ryujinx? [y/N]: " alias
if [ "$alias" = "y" ] || [ "$alias" = "Y" ]; then
makealias
else
:
fi
printf "Installation successful, launch Ryujinx from your app launcher.\n"
printf "Also don't forget to show your love on Patreon at https://www.patreon.com/ryujinx\n"
}
uninstall () {
printf "Uninstalling..."
rm -rf /home/${USER}/.local/share/Ryujinx
rm -rf /home/${USER}/.local/share/mime/packages/Ryujinx.xml
rm -rf /home/${USER}/.local/share/applications/Ryujinx.desktop
rm -rf /home/${USER}/.local/share/icons/Ryujinx.png
update-mime-database /home/${USER}/.local/share/mime
update-desktop-database /home/${USER}/.local/share/applications
printf "\nUninstallation successful!\n"
removealias
}
printf "Welcome to PinEApple-Ryujinx\n"
printf "Fetching latest version info from the slow AppVeyor api...\n"
version=$(curl -s https://ci.appveyor.com/api/projects/gdkchan/ryujinx/branch/master | grep -Po '"version":.*?[^\\]",' | sed 's/"version":"\(.*\)",/\1/')
printf "Latest version is: $version\n"
printf "[1] Install it\n"
printf "[2] Uninstall\n"
printf "[3] Reinstall\Repair\n"
read -p "Choose an option (or anything else to quit): " option
if [ "$option" = "1" ]; then
install
elif [ "$option" = "2" ]; then
uninstall
elif [ "$option" = "3" ]; then
uninstall
install
else
:
fi
exit
|
<filename>C2CRIBuildDir/projects/C2C-RI/src/C2CRIReportsDBLibrary/src/org/fhwa/c2cri/reports/dao/C2CRIAppLayerTestCaseDataDAO.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.reports.dao;
import java.io.File;
import java.sql.DriverManager;
import java.util.ArrayList;
import org.fhwa.c2cri.reports.C2CRIAppLayerTestCaseData;
/**
* Provides support for the C2CRI_AppLayerTestCaseData Table.
* @author TransCore ITS
*/
public class C2CRIAppLayerTestCaseDataDAO extends ReportsDAO {
// Keep a cache of C2CRIAppLayerTestCaseData objects. To speed up processing they will all be written out in batches.
private ArrayList<C2CRIAppLayerTestCaseData> storedTestCaseDataList = new ArrayList<>();
// The maximum number of objects to hold before writing them out.
private int maxHold = 100;
public C2CRIAppLayerTestCaseDataDAO(File outdb) throws Exception {
// Create a SQLite connection
Class.forName("org.sqlite.JDBC");
super.conn = DriverManager.getConnection("jdbc:sqlite:" + outdb.getAbsolutePath());
}
/**
* Write a list of Test Case Data objects in a batch operation.
*
* @param testCaseDataList the list of test case data objects.
*/
public void insert(ArrayList<C2CRIAppLayerTestCaseData> testCaseDataList) {
try {
// Disable auto-commit
conn.setAutoCommit(false);
pstmt = super.conn.prepareStatement("INSERT INTO " + APPLAYERTESTCASEDATA_TABLE + "("
+ "id, Standard, TestCase, Iteration, VariableName, Description, "
+ "DataType, VariableValue, ValidValues, TestCaseIndex) VALUES (?,?,?,?,?,?,?,?,?,?)");
for (C2CRIAppLayerTestCaseData testCaseData : testCaseDataList) {
int col = 1;
pstmt.setInt(col++, testCaseData.getId());
pstmt.setString(col++, testCaseData.getStandard());
pstmt.setString(col++, testCaseData.getTestCase());
pstmt.setString(col++, testCaseData.getIteration());
pstmt.setString(col++, testCaseData.getVariableName());
pstmt.setString(col++, testCaseData.getDescription());
pstmt.setString(col++, testCaseData.getDataType());
pstmt.setString(col++, testCaseData.getVariableValue());
pstmt.setString(col++, testCaseData.getValidValues());
pstmt.setInt(col++, testCaseData.getTestCaseIndex());
pstmt.addBatch();
}
executeBatch(pstmt);
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* Store the provided test case data object. If we've reached the maxHold value, write out the current records.
*
* @param testCaseData the current test case data object.
*/
public void insert(C2CRIAppLayerTestCaseData testCaseData) {
storedTestCaseDataList.add(testCaseData);
if (storedTestCaseDataList.size() == maxHold){
insert(storedTestCaseDataList);
storedTestCaseDataList.clear();
System.out.println("Wrote out "+maxHold+" test Data Events.");
}
}
/**
* Write out all remaining data objects in the stored test case list.
*/
public void flush() {
if (storedTestCaseDataList.size() > 0){
insert(storedTestCaseDataList);
System.out.println("Wrote out the remaining "+storedTestCaseDataList.size()+" Test Case Data Elements.");
storedTestCaseDataList.clear();
}
}
}
|
<filename>src/common/selector/selector_test.go
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package selector_test
import (
"fmt"
"strings"
"testing"
"configcenter/src/common/selector"
"github.com/stretchr/testify/assert"
)
func TestLabel(t *testing.T) {
// assert normal
var errorKey string
var err error
var labels selector.Labels
key := "key"
value := "value"
for _, key = range []string{"key", "0key", "key-.k", strings.Repeat("k", 63)} {
labels := selector.Labels{
key: value,
}
errorKey, err := labels.Validate()
assert.Nil(t, err)
assert.Empty(t, errorKey)
}
// assert key err
for _, key = range []string{"-key", ".key", "_key", "key-", "key_", "key.", strings.Repeat("k", 64)} {
labels := selector.Labels{
key: value,
}
errorKey, err = labels.Validate()
assert.NotNil(t, err)
assert.Equal(t, key, errorKey)
}
// assert value err
key = "key"
for _, value = range []string{"value", "0value", "value-_.v", strings.Repeat("v", 63)} {
for _, value = range []string{"-value", ".value", "_value", "value-", "value_", "value.", strings.Repeat("v", 64)} {
labels := selector.Labels{
key: value,
}
errorKey, err = labels.Validate()
assert.NotNil(t, err)
assert.Equal(t, fmt.Sprintf("%s:%s", key, value), errorKey)
}
}
// assert update
labels = selector.Labels{
"key1": "value1",
}
labels2 := selector.Labels{
"key2": "value2",
}
labels.AddLabel(labels2)
labels.RemoveLabel([]string{"key1", "key2", "key3"})
assert.Empty(t, labels)
}
func TestSelector(t *testing.T) {
// assert normal
sl := selector.Selectors{
{
Key: "key",
Operator: "=",
Values: []string{"value"},
}, {
Key: "key",
Operator: "!=",
Values: []string{"value"},
}, {
Key: "key",
Operator: "in",
Values: []string{"value", "value1"},
}, {
Key: "key",
Operator: "notin",
Values: []string{"value", "value1"},
}, {
Key: "key",
Operator: "exists",
Values: []string{},
}, {
Key: "key",
Operator: "!",
Values: []string{},
},
}
errKey, err := sl.Validate()
assert.Nil(t, err)
assert.Empty(t, errKey)
filter, err := sl.ToMgoFilter()
assert.Nil(t, err)
assert.NotEmpty(t, filter)
// assert abnormal
ss := selector.Selectors{
{
Key: "key",
Operator: "=",
Values: []string{"value", "value2"},
}, {
Key: "key",
Operator: "!=",
Values: []string{"value", "value2"},
}, {
Key: "key",
Operator: "in",
Values: []string{},
}, {
Key: "key",
Operator: "notin",
Values: []string{},
}, {
Key: "key",
Operator: "exists",
Values: []string{"value"},
}, {
Key: "key",
Operator: "!",
Values: []string{"value"},
}, {
Key: ".key",
Operator: "=",
Values: []string{"value"},
}, {
Key: "key",
Operator: "?",
Values: []string{"value"},
},
}
for _, sl := range ss {
errKey, err = sl.Validate()
assert.NotNil(t, err)
assert.NotEmpty(t, errKey)
sl.ToMgoFilter()
}
// assert to filter abnormal
ss = selector.Selectors{
{
Key: "key",
Operator: "=",
Values: []string{},
}, {
Key: "key",
Operator: "!=",
Values: []string{},
},
}
for _, sl := range ss {
filter, err := sl.ToMgoFilter()
assert.NotNil(t, err)
assert.Empty(t, filter)
}
key, err := ss.Validate()
assert.NotEmpty(t, key)
assert.NotNil(t, err)
filter, err = ss.ToMgoFilter()
assert.Empty(t, filter)
assert.NotNil(t, err)
}
|
#!/bin/sh
function get_help_msg() {
local USAGE_MSG=
local USAGE_MSG_FILE=
USAGE_MSG="$1"
USAGE_MSG_FILE="$2"
if [ -z $USAGE_MSG ]; then
if [[ -n $USAGE_MSG_FILE && -e $USAGE_MSG_FILE ]]; then
USAGE_MSG=$(cat $USAGE_MSG_FILE)
else
USAGE_MSG="no help msg and file"
fi
fi
echo "$USAGE_MSG"
}
|
// JavaScript Document
var isSubmit = false;
Company = new function(){
var $this = this;
$this.url_ajax = {
search : null,
apply : null,
user_list : null,
user_remove : null,
remarks : null,
upLogo : null,
};
$this.url_index = null;
$this.view_join = {
is_default : true,
is_loading : 0,
is_empty : 0,
list:[],
key:''
};
$this.view_join_form = {
id:0,
company_name:'',
apply_txt:'',
show_tips :false,
is_loading : 0,
}
$this.initJoin = function(){
$this.view_join = new Vue({
el:'#page_company_list',
data : $this.view_join,
methods : {
flushList : function(){
this.key = $('#key').val();
this.is_default = this.is_empty = 0;
this.is_loading = 1;
this.list = [];
$.post($this.url_ajax.search,{key:this.key},function(rs){
$this.view_join.is_loading = 0;
if(rs.status == 1){
$this.view_join.is_empty = rs.data.length == 0;
$this.view_join.list = rs.data;
}else{
$this.view_join.is_empty = 1;
}
});
},
showApplyForm : function(item){
$this.view_join_form.id = item.id;
$this.view_join_form.company_name = item.company_name;
$this.view_join_form.apply_txt = '';
$this.view_join_form.show_tips = 0;
$(".now_tan").fadeIn(300);
}
}
});
$this.view_join_form = new Vue({
el:'.now_tan',
data : $this.view_join_form,
methods :{
hideForm : function(){
$(".now_tan").fadeOut(100);
},
postForm : function(){
if(this.apply_txt == ''){
return false;
}
this.is_loading = layer.load(1, {
shade: [0.1,'#fff'] //0.1透明度的白色背景
});
var self = this;
$.post($this.url_ajax.apply,{company_id:this.id,content:this.apply_txt},function(rs){
layer.msg(rs.msg);
layer.close(self.is_loading);
window.setTimeout(function(){
window.location.href = $this.url_index;
},1500);
});
},
changeTxt:function(){
this.apply_txt = $('#apply_txt').val();
}
}
});
};
/******* 企业首页开始 ********/
$this.view_home = {
company_id : 0,//企业ID
company_name:'',//企业名
company_head_portrait:'',//头像
remarks:'',//简介
user_number:0,//用户量
apply_number:0,//申请数量
qrcode_path:'',//二维码
is_manage : 0,
open_edit : 0
};
$this.initHome = function(){
$this.view_home = new Vue({
el:"#page_company_home",
data:$this.view_home,
methods : {
editRemarks:function(){
this.open_edit = 1;
},
cancelEditRemarks:function(){
this.open_edit = 0;
},
saveRemarks : function(){
var remarks = $('#company_remarks').val();
$.post($this.url_ajax.remarks, {remarks:remarks}, function(rs){
if(rs.status){
$this.view_home.remarks = rs.data;
$this.view_home.open_edit = 0;
}else{
layer.msg(rs.msg);
}
});
},
saveLogo : function(){
var file = $('#company_logo')[0];
var formData = new FormData();
formData.append("file",file.files[0]);
$.ajax({
url : $this.url_ajax.upLogo,
type : 'POST',
data : formData,
processData : false, // 告诉jQuery不要去处理发送的数据
contentType : false,// 告诉jQuery不要去设置Content-Type请求头
beforeSend:function(){
console.log("正在进行,请稍候");
},
success : function(rs) {
if(rs.status===1){
layer.alert('上传成功',function(){location.href=location.href});
}else{
layer.msg("上传失败");
}
},
error : function(responseStr) {
layer.msg("上传失败");
}
});
},
}
});
};
/* 企业首页结束 */
/************** 用户列表页 *******************/
$this.view_users = new function(){
var $this = this;
$this.user_list = [];
$this.page_size = 5;
$this.page = 1;
$this.is_end = false;
$this.is_loading = true;
$this.getUserList = function(){
$this.is_loading = true;
$.post(Company.url_ajax.user_list, {
page:$this.page,
page_size:$this.page_size
}, function(rs){
if(rs.status){
$this.user_list=$this.user_list.concat(rs.data);
if(rs.data.length < $this.page_size){
$this.is_end = true;
}else{
$this.page +=1;
}
}
$this.is_loading = false;
});
};
$this.remove_loading = null;
$this.removeUser = function(user_id){
layer.confirm('是否要移除些用户?', function(index){
$this.remove_loading = layer.load(2);
console.log(user_id);
$.post(Company.url_ajax.user_remove, {user_id:user_id}, function(rs){
layer.close($this.remove_loading);
if(rs.status == 1){
$this.user_list = [];
$this.is_loading = true;
$this.page = 1;
$this.getUserList();
}else{
layer.alert(rs.msg);
}
});
layer.close(index);
});
}
};
$this.initUsers = function(){
$this.view_users = new Vue({
el:"#page_company_users",
data:$this.view_users
});
$this.view_users.getUserList();
}
/*############### 用户列表页 #####################*/
/************ 创建企业 *******************/
$this.view_create = new function(){
var $this = this;
$this.step = 1;
$this.step1_validate = null;
$this.step2_validate = null;
$this.step3_validate = null;
$this.nextStep = function(){
switch($this.step){
case 1:
if(!$this.step1_validate.form()) return;
break;
case 2:
if(!$this.step2_validate.form()) return;
break;
}
$this.step += 1;
}
$this.prevStep = function(){
$this.step -= 1;
}
$this.err_bank = 0;
$this.checkAliName = function(){
var _v = $('#personal_alipay_name').val() == '' ? 0 : 1;
$('#personal_alipay_bind').val(_v);
return _v;
}
$this.checkWx = function(){
var _v = $('#personal_wechat_name').val() == '' ? 0 : 1;
$('#personal_wechat_bind').val(_v);
return _v;
}
$this.checkBank = function(){
var _v = 0;
_v += $('#personal_bank_user').val() == '' ? 1 : 0;
_v += $('#personal_bank_name').val() == '' ? 2 : 0;
_v += $('#personal_bank_number').val() == '' ? 4 : 0;
if(_v == 7){//全空
$('#personal_bank_bind').val(0);
$this.err_bank = 0;
}else{
$this.err_bank = _v;
$('#personal_bank_bind').val(1);
}
};
$this.submit = function(){
if(isSubmit){
return;
}
var _data = {
personal_alipay_bind : $('#personal_alipay_bind').val(),
personal_bank_bind : $('#personal_bank_bind').val(),
personal_wechat_bind : $('#personal_wechat_bind').val(),
company_name : $('#company_name').val(),
credit_code : $('#credit_code').val(),
business_address : $('#business_address').val(),
legal_person : $('#legal_person').val(),
contact_name : $('#contact_name').val(),
contact_phone : $('#contact_phone').val(),
contact_email : $('#contact_email').val(),
company_type : $('#company_type').val(),
company_account : $('#company_account').val(),
company_bank_name : $('#company_bank_name').val(),
company_address : $('#company_address').val(),
tax_number : $('#tax_number').val(),
ticket_phone : $('#ticket_phone').val(),
personal_alipay_name : $('#personal_alipay_name').val(),
personal_wechat_name : $('#personal_wechat_name').val(),
personal_bank_user : $('#personal_bank_user').val(),
personal_bank_name : $('#personal_bank_name').val(),
personal_bank_number : $('#personal_bank_number').val(),
};
if(_data.personal_alipay_bind == 0 && _data.personal_bank_bind == 0 && _data.personal_wechat_bind == 0){
layer.alert('至少绑定一种结算方式');
return false;
}
isSubmit = true;
$.post('',_data,function(rs){
if(rs.status){
window.location.href=Company.url_index;
}else{
isSubmit = false;
layer.msg(rs.msg);
}
});
}
};
$this.initCreate = function(){
$this.view_create = new Vue({
el : "#page_create",
data : $this.view_create
});
$this.view_create.step1_validate = $('#create_form').validate({
rules: {
company_name : "required",
//license_path : "required",
credit_code : "required",
business_address : "required",
legal_person : "required",
contact_name : "required",
contact_phone : "required",
contact_email : "required",
company_type : "required"
},
messages: {
company_name : "企业名称必填",
license_path : "营业执照必填",
credit_code : "信用代码必填",
business_address : "办公地址必填",
legal_person : "企业法人必填",
contact_name : "创建联系人必填",
contact_phone : "联系电话必填",
contact_email : {
required : "邮箱必填",
email : "E-Mail格式不正确"
},
company_type : "请选择企业角色"
}
});
$this.view_create.step2_validate = $('#form_step_2').validate({
rules: {
company_name2: "required",
company_account:"required",
company_bank_name: "required",
company_address:"required",
tax_number:"required",
ticket_phone: "required",
},
messages: {
company_name2: "企业名称必填",
company_account:"银行账号必填",
company_bank_name: "开户行必填",
company_address:"单位地址必填",
tax_number:"税务登记号必填",
ticket_phone: "联系电话必填",
}
});
}
/*########### End 创建企业#################*/
};
|
import { createRouter, createWebHashHistory, RouteRecordRaw } from 'vue-router'
import Home from '../views/Home.vue'
const routes: Array<RouteRecordRaw> = [
{
path: '/',
name: 'Home',
component: Home
},
{
path: '/about',
name: 'About',
component: () => import(/* webpackChunkName: "about" */ '../views/About.vue')
},
{
path: '/button',
name: 'Button',
component: () => import(/* webpackChunkName: "button" */ '../views/button.vue')
},
{
path: '/icon',
name: 'icon',
component: () => import(/* webpackChunkName: "icon" */ '../views/icon.vue')
},
// {
// path: '/mask',
// name: 'mask',
// component: () => import(/* webpackChunkName: "mask" */ '../views/mask.vue')
// },
{
path: '/notification',
name: 'notification',
component: () => import(/* webpackChunkName: "notification" */ '../views/notification.vue')
},
{
path: '/card',
name: 'card',
component: () => import(/* webpackChunkName: "card" */ '../views/card.vue')
},
{
path: '/toast',
name: 'toast',
component: () => import(/* webpackChunkName: "toast" */ '../views/toast.vue')
},
{
path: '/mask',
name: 'mask',
component: () => import(/* webpackChunkName: "mask" */ '../views/mask.vue')
},
{
path: '/pager',
name: 'pager',
component: () => import(/* webpackChunkName: "pager" */ '../views/pager.vue')
},
{
path: '/tabs',
name: 'tabs',
component: () => import(/* webpackChunkName: "tabs" */ '../views/tabs.vue')
},
{
path: '/loading',
name: 'loading',
component: () => import(/* webpackChunkName: "loading" */ '../views/loading.vue')
},
{
path: '/modal',
name: 'modal',
component: () => import(/* webpackChunkName: "modal" */ '../views/modal.vue')
},
{
path: '/drawer',
name: 'drawer',
component: () => import(/* webpackChunkName: "drawer" */ '../views/drawer.vue')
},
{
path: '/actionsheet',
name: 'actionsheet',
component: () => import(/* webpackChunkName: "actionsheet" */ '../views/actionsheet.vue')
},
{
path: '/message',
name: 'message',
component: () => import(/* webpackChunkName: "message" */ '../views/message.vue')
},
{
path: '/alert',
name: 'alert',
component: () => import(/* webpackChunkName: "alert" */ '../views/alert.vue')
},
{
path: '/progress',
name: 'progress',
component: () => import(/* webpackChunkName: "progress" */ '../views/progress.vue')
},
{
path: '/popconfirm',
name: 'popconfirm',
component: () => import(/* webpackChunkName: "popconfirm" */ '../views/popconfirm.vue')
},
{
path: '/popover',
name: 'popover',
component: () => import(/* webpackChunkName: "popover" */ '../views/popover.vue')
},
{
path: '/tooltip',
name: 'tooltip',
component: () => import(/* webpackChunkName: "tooltip" */ '../views/tooltip.vue')
},
{
path: '/list',
name: 'list',
component: () => import(/* webpackChunkName: "list" */ '../views/list.vue')
},
{
path: '/accordion',
name: 'accordion',
component: () => import(/* webpackChunkName: "accordion" */ '../views/accordion.vue')
},
{
path: '/table',
name: 'table',
component: () => import(/* webpackChunkName: "table" */ '../views/table.vue')
},
{
path: '/blockquote',
name: 'blockquote',
component: () => import(/* webpackChunkName: "blockquote" */ '../views/blockquote.vue')
},
{
path: '/divider',
name: 'divider',
component: () => import(/* webpackChunkName: "divider" */ '../views/divider.vue')
},
{
path: '/tag',
name: 'tag',
component: () => import(/* webpackChunkName: "tag" */ '../views/tag.vue')
},
{
path: '/dropdown',
name: 'dropdown',
component: () => import(/* webpackChunkName: "dropdown" */ '../views/dropdown.vue')
},
{
path: '/step',
name: 'step',
component: () => import(/* webpackChunkName: "step" */ '../views/step.vue')
},
{
path: '/anchor',
name: 'anchor',
component: () => import(/* webpackChunkName: "anchor" */ '../views/anchor.vue')
},
{
path: '/carousel',
name: 'carousel',
component: () => import(/* webpackChunkName: "carousel" */ '../views/carousel.vue')
},
{
path: '/tree',
name: 'tree',
component: () => import(/* webpackChunkName: "tree" */ '../views/tree.vue')
},
{
path: '/tmp',
name: 'tmp',
component: () => import(/* webpackChunkName: "tmp" */ '../views/tmp.vue')
},
{
path: '/form',
name: 'form/form',
component: () => import(/* webpackChunkName: "form" */ '../views/form/index.vue')
},
{
path: '/validator',
name: 'form/validator',
component: () => import(/* webpackChunkName: "validator" */ '../views/form/validator.vue')
},
{
path: '/input',
name: 'form/input',
component: () => import(/* webpackChunkName: "input" */ '../views/form/input.vue')
},
{
path: '/select',
name: 'form/select',
component: () => import(/* webpackChunkName: "select" */ '../views/form/select.vue')
}
]
const router = createRouter({
history: createWebHashHistory(process.env.BASE_URL),
routes
})
export default router
|
package progress
import (
"context"
"fmt"
"io"
"testing"
"time"
"github.com/stretchr/testify/assert"
"golang.org/x/sync/errgroup"
)
func TestProgress(t *testing.T) {
t.Parallel()
s, err := calc(context.TODO(), 4, "calc")
assert.NoError(t, err)
assert.Equal(t, 10, s)
eg, ctx := errgroup.WithContext(context.Background())
pr, ctx, cancelProgress := NewContext(ctx)
var trace trace
eg.Go(func() error {
return saveProgress(ctx, pr, &trace)
})
pw, _, ctx := FromContext(ctx, WithMetadata("tag", "foo"))
s, err = calc(ctx, 5, "calc")
pw.Close()
assert.NoError(t, err)
assert.Equal(t, 15, s)
cancelProgress()
err = eg.Wait()
assert.NoError(t, err)
assert.True(t, len(trace.items) > 5)
assert.True(t, len(trace.items) <= 7)
for _, p := range trace.items {
v, ok := p.Meta("tag")
assert.True(t, ok)
assert.Equal(t, v.(string), "foo")
}
}
func TestProgressNested(t *testing.T) {
t.Parallel()
eg, ctx := errgroup.WithContext(context.Background())
pr, ctx, cancelProgress := NewContext(ctx)
var trace trace
eg.Go(func() error {
return saveProgress(ctx, pr, &trace)
})
s, err := reduceCalc(ctx, 3)
assert.NoError(t, err)
assert.Equal(t, 6, s)
cancelProgress()
err = eg.Wait()
assert.NoError(t, err)
assert.True(t, len(trace.items) > 9) // usually 14
assert.True(t, len(trace.items) <= 15)
}
func calc(ctx context.Context, total int, name string) (int, error) {
pw, _, ctx := FromContext(ctx)
defer pw.Close()
sum := 0
pw.Write(name, Status{Action: "starting", Total: total})
for i := 1; i <= total; i++ {
select {
case <-ctx.Done():
return 0, ctx.Err()
case <-time.After(10 * time.Millisecond):
}
if i == total {
pw.Write(name, Status{Action: "done", Total: total, Current: total})
} else {
pw.Write(name, Status{Action: "calculating", Total: total, Current: i})
}
sum += i
}
return sum, nil
}
func reduceCalc(ctx context.Context, total int) (int, error) {
eg, ctx := errgroup.WithContext(ctx)
pw, _, ctx := FromContext(ctx)
defer pw.Close()
pw.Write("reduce", Status{Action: "starting"})
// sync step
sum, err := calc(ctx, total, "synccalc")
if err != nil {
return 0, err
}
// parallel steps
for i := 0; i < 2; i++ {
func(i int) {
eg.Go(func() error {
_, err := calc(ctx, total, fmt.Sprintf("calc-%d", i))
return err
})
}(i)
}
if err := eg.Wait(); err != nil {
return 0, err
}
return sum, nil
}
type trace struct {
items []*Progress
}
func saveProgress(ctx context.Context, pr Reader, t *trace) error {
for {
p, err := pr.Read(ctx)
if err != nil {
if err == io.EOF {
return nil
}
return err
}
t.items = append(t.items, p...)
}
}
|
#ifndef _FAST_NN_BLOCK_H_
#define _FAST_NN_BLOCK_H_
typedef struct _block {
long long id;
int num_dimensions;
long long capacity;
long long* indices;
float* data;
long long size;
} block_t;
enum {
BLOCK_INIT_WITH_DATA = 0x01,
BLOCK_INIT_WITH_INDICES = 0x02,
BLOCK_INIT_ALL = 0xff
};
void block_init(block_t* block, long long block_id, int num_dimensions, long long initial_capacity,
int init_flags);
void block_destroy(block_t* block);
void block_push(block_t* block, long long index, const float* vector);
void block_realloc(block_t* block, long long new_capacity);
void block_set_id(block_t* block, long long block_id);
#endif // _FAST_NN_BLOCK_H_
|
#!/bin/bash
# version="0.1"
#
#
# ARG_OPTIONAL_SINGLE([all],[a],[Do all configurations OIDC, IM Connection and secret and navigation updates],[false])
# ARG_OPTIONAL_SINGLE([oidc-only],[o],[Only do the configuration for OIDC and SSO],[true])
# ARG_OPTIONAL_SINGLE([nav-only],[n],[Only add Infrastructure Management to navigation],[false])
# ARG_OPTIONAL_SINGLE([hostname],[s],[Add hostname for Infrastructure Management install],[inframgmtinstall])
die()
{
local _ret=$2
test -n "$_ret" || _ret=1
test "$_PRINT_HELP" = yes && print_help >&2
echo "$1" >&2
exit ${_ret}
}
begins_with_short_option()
{
local first_option all_short_options='aonsh'
first_option="${1:0:1}"
test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0
}
# THE DEFAULTS INITIALIZATION - OPTIONALS
#_arg_all=""
#_arg_oidc_only=""
#_arg_nav_only=""
_arg_hostname="inframgmtinstall"
print_help()
{
printf '\t%s\n' " "
printf '%s\n' "This script configures OIDC, Operators and Navigation items for the IBM CloudPak and Infrastructure Management."
printf '%s\n' "***** You must have the oc client and cloudctl authenticated ******"
printf '\t%s\n' " "
printf 'Usage: %s [-a|--all <arg>] [-o|--oidc-only <arg>] [-n|--nav-only <arg>] [-s|--hostname <arg>] [-h|--help]\n' "$0"
printf '\t%s\n' "-a, --all: Configure OIDC, IM Connection and the Navigation menu"
printf '\t%s\n' "-o, --oidc-only: Only configure OIDC"
printf '\t%s\n' "-n, --nav-only: Only add Infrastructure Management to navigation menu"
printf '\t%s\n' "-s, --hostname: Add hostname for Infrastructure Management install (default: 'inframgmtinstall')"
printf '\t%s\n' "-e, --edit-nav: Edit Infrastructure Management navigation menu"
printf '\t%s\n' "-h, --help: Prints help"
}
parse_commandline()
{
while test $# -gt 0
do
_key="$1"
case "$_key" in
-a|--all)
test $# -lt 2
_arg_all="true"
shift
;;
--all=*)
_arg_all="${_key##--all=}"
;;
-a*)
_arg_all="${_key##-a}"
;;
-o|--oidc-only)
test $# -lt
_arg_oidc_only="true"
shift
;;
--oidc-only=*)
_arg_oidc_only="${_key##--oidc-only=}"
;;
-o*)
_arg_oidc_only="${_key##-o}"
;;
-n|--nav-only)
test $# -lt 2
_arg_nav_only="nav only"
shift
;;
--nav-only=*)
_arg_nav_only="${_key##--nav-only=}"
;;
-n*)
_arg_nav_only="${_key##-n}"
;;
-e|--edit-nav)
test $# -lt 2
_arg_edit_nav="edit"
shift
;;
--edit-nav=*)
_arg_edit_nav="${_key##--edit_nav=}"
;;
-e*)
_arg_edit_nav="${_key##-e}"
;;
-s|--hostname)
test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1
_arg_hostname="$2"
shift
;;
--hostname=*)
_arg_hostname="${_key##--hostname=}"
;;
-s*)
_arg_hostname="${_key##-s}"
;;
-h|--help)
print_help
exit 0
;;
-h*)
print_help
exit 0
;;
*)
_PRINT_HELP=yes die "FATAL ERROR: Got an unexpected argument '$1'" 1
;;
esac
shift
done
}
parse_commandline "$@"
# Check for kubectl being installed
check_kubectl_installed() {
echo "Checking if kubectl is installed..."
command -v kubectl >/dev/null 2>&1 || {
echo >&2 "kubectl is not installed... Aborting."
exit 1
}
echo "kubectl is installed"
}
oidc()
{
#!/bin/sh
echo "Creating client id and client secret."
client_id_random=$((RANDOM * 100000000))
client_secret_random=$((RANDOM * 52300000000 + RANDOM))
client_id=`echo ${client_id_random} |base64`
client_secret=`echo ${client_secret_random} |base64`
echo "Created client id: $client_id and client secret: $client_secret"
echo "------------"
while true; do
echo "Access tokens are not required for podified. By default, it will use internal service communication."
read -p "Do you wish to enable access token support? (y/n)" yn
case $yn in
[Yy]* ) bypass="false"
im_url_bypass=$im_url
echo "Adding access token information"
echo "Input your ldap user credentials. These will be deleted when a token is generated."
read -p "Enter User Name: " myuser
echo -n Enter User Password:
read -s mypassword
echo
break;;
[Nn]* )
bypass="true"
im_url_bypass="web-service.management-infrastructure-management.svc.cluster.local:3000"
break;;
* ) echo "Please answer yes or no.";;
esac
done
echo $myuser
pw=$mypassword
echo "Generating OIDC file: "
echo "-----------"
if [ -n "$myuser" ]; then
tee im-oidc-secret.yaml << EOF
kind: Secret
apiVersion: v1
metadata:
name: imconnectionsecret
stringData:
cpconsole: $cp_url
clientid: $client_id
clientsecret: $client_secret
oauth_username: $myuser
oauth_password: $pw
oidc.conf: |-
LoadModule auth_openidc_module modules/mod_auth_openidc.so
ServerName $im_url
LogLevel debug
OIDCCLientID $client_id
OIDCClientSecret $client_secret
OIDCRedirectURI $im_url/oidc_login/redirect_uri
OIDCCryptoPassphrase alphabeta
OIDCOAuthRemoteUserClaim sub
OIDCRemoteUserClaim name
# OIDCProviderMetadataURL missing
OIDCProviderIssuer https://127.0.0.1:443/idauth/oidc/endpoint/OP
OIDCProviderAuthorizationEndpoint $cp_url/idprovider/v1/auth/authorize
OIDCProviderTokenEndpoint $cp_url/idprovider/v1/auth/token
OIDCOAuthCLientID $client_id
OIDCOAuthClientSecret $client_secret
OIDCOAuthIntrospectionEndpoint $cp_url/idprovider/v1/auth/introspect
# ? OIDCOAuthVerifyJwksUri $cp_url/oidc/endpoint/OP/jwk
OIDCProviderJwksUri $cp_url/oidc/endpoint/OP/jwk
OIDCProviderEndSessionEndpoint $cp_url/idprovider/v1/auth/logout
OIDCScope "openid email profile"
OIDCResponseMode "query"
OIDCProviderTokenEndpointAuth client_secret_post
OIDCOAuthIntrospectionEndpointAuth client_secret_basic
OIDCPassUserInfoAs json
OIDCSSLValidateServer off
OIDCHTTPTimeoutShort 10
OIDCCacheEncrypt On
<Location /oidc_login>
AuthType openid-connect
Require valid-user
LogLevel debug
</Location>
<LocationMatch ^/api(?!\/(v[\d\.]+\/)?product_info$)>
SetEnvIf Authorization '^Basic +YWRtaW46' let_admin_in
SetEnvIf X-Auth-Token '^.+$' let_api_token_in
SetEnvIf X-MIQ-Token '^.+$' let_sys_token_in
SetEnvIf X-CSRF-Token '^.+$' let_csrf_token_in
AuthType oauth20
AuthName "External Authentication (oauth20) for API"
Require valid-user
Order Allow,Deny
Allow from env=let_admin_in
Allow from env=let_api_token_in
Allow from env=let_sys_token_in
Allow from env=let_csrf_token_in
Satisfy Any
LogLevel debug
</LocationMatch>
OIDCSSLValidateServer Off
OIDCOAuthSSLValidateServer Off
RequestHeader unset X_REMOTE_USER
RequestHeader set X_REMOTE_USER %{OIDC_CLAIM_PREFERRED_USERNAME}e env=OIDC_CLAIM_PREFERRED_USERNAME
RequestHeader set X_EXTERNAL_AUTH_ERROR %{EXTERNAL_AUTH_ERROR}e env=EXTERNAL_AUTH_ERROR
RequestHeader set X_REMOTE_USER_EMAIL %{OIDC_CLAIM_EMAIL}e env=OIDC_CLAIM_EMAIL
RequestHeader set X_REMOTE_USER_FIRSTNAME %{OIDC_CLAIM_GIVEN_NAME}e env=OIDC_CLAIM_GIVEN_NAME
RequestHeader set X_REMOTE_USER_LASTNAME %{OIDC_CLAIM_FAMILY_NAME}e env=OIDC_CLAIM_FAMILY_NAME
RequestHeader set X_REMOTE_USER_FULLNAME %{OIDC_CLAIM_NAME}e env=OIDC_CLAIM_NAME
RequestHeader set X_REMOTE_USER_GROUPS %{OIDC_CLAIM_GROUPS}e env=OIDC_CLAIM_GROUPS
RequestHeader set X_REMOTE_USER_DOMAIN %{OIDC_CLAIM_DOMAIN}e env=OIDC_CLAIM_DOMAIN
EOF
else
tee im-oidc-secret.yaml << EOF
kind: Secret
apiVersion: v1
metadata:
name: imconnectionsecret
stringData:
oidc.conf: |-
LoadModule auth_openidc_module modules/mod_auth_openidc.so
ServerName $im_url
LogLevel debug
OIDCCLientID $client_id
OIDCClientSecret $client_secret
OIDCRedirectURI $im_url/oidc_login/redirect_uri
OIDCCryptoPassphrase alphabeta
OIDCOAuthRemoteUserClaim sub
OIDCRemoteUserClaim name
# OIDCProviderMetadataURL missing
OIDCProviderIssuer https://127.0.0.1:443/idauth/oidc/endpoint/OP
OIDCProviderAuthorizationEndpoint $cp_url/idprovider/v1/auth/authorize
OIDCProviderTokenEndpoint $cp_url/idprovider/v1/auth/token
OIDCOAuthCLientID $client_id
OIDCOAuthClientSecret $client_secret
OIDCOAuthIntrospectionEndpoint $cp_url/idprovider/v1/auth/introspect
# ? OIDCOAuthVerifyJwksUri $cp_url/oidc/endpoint/OP/jwk
OIDCProviderJwksUri $cp_url/oidc/endpoint/OP/jwk
OIDCProviderEndSessionEndpoint $cp_url/idprovider/v1/auth/logout
OIDCScope "openid email profile"
OIDCResponseMode "query"
OIDCProviderTokenEndpointAuth client_secret_post
OIDCOAuthIntrospectionEndpointAuth client_secret_basic
OIDCPassUserInfoAs json
OIDCSSLValidateServer off
OIDCHTTPTimeoutShort 10
OIDCCacheEncrypt On
<Location /oidc_login>
AuthType openid-connect
Require valid-user
LogLevel debug
</Location>
<LocationMatch ^/api(?!\/(v[\d\.]+\/)?product_info$)>
SetEnvIf Authorization '^Basic +YWRtaW46' let_admin_in
SetEnvIf X-Auth-Token '^.+$' let_api_token_in
SetEnvIf X-MIQ-Token '^.+$' let_sys_token_in
SetEnvIf X-CSRF-Token '^.+$' let_csrf_token_in
AuthType oauth20
AuthName "External Authentication (oauth20) for API"
Require valid-user
Order Allow,Deny
Allow from env=let_admin_in
Allow from env=let_api_token_in
Allow from env=let_sys_token_in
Allow from env=let_csrf_token_in
Satisfy Any
LogLevel debug
</LocationMatch>
OIDCSSLValidateServer Off
OIDCOAuthSSLValidateServer Off
RequestHeader unset X_REMOTE_USER
RequestHeader set X_REMOTE_USER %{OIDC_CLAIM_PREFERRED_USERNAME}e env=OIDC_CLAIM_PREFERRED_USERNAME
RequestHeader set X_EXTERNAL_AUTH_ERROR %{EXTERNAL_AUTH_ERROR}e env=EXTERNAL_AUTH_ERROR
RequestHeader set X_REMOTE_USER_EMAIL %{OIDC_CLAIM_EMAIL}e env=OIDC_CLAIM_EMAIL
RequestHeader set X_REMOTE_USER_FIRSTNAME %{OIDC_CLAIM_GIVEN_NAME}e env=OIDC_CLAIM_GIVEN_NAME
RequestHeader set X_REMOTE_USER_LASTNAME %{OIDC_CLAIM_FAMILY_NAME}e env=OIDC_CLAIM_FAMILY_NAME
RequestHeader set X_REMOTE_USER_FULLNAME %{OIDC_CLAIM_NAME}e env=OIDC_CLAIM_NAME
RequestHeader set X_REMOTE_USER_GROUPS %{OIDC_CLAIM_GROUPS}e env=OIDC_CLAIM_GROUPS
RequestHeader set X_REMOTE_USER_DOMAIN %{OIDC_CLAIM_DOMAIN}e env=OIDC_CLAIM_DOMAIN
EOF
fi
echo "Finished generating OIDC file"
echo
echo
echo "------------------------"
echo "Generating registration.json file"
tee registration.json << EOF
{
"token_endpoint_auth_method":"client_secret_basic",
"client_id": "$client_id",
"client_secret": "$client_secret",
"scope":"openid profile email",
"grant_types":[
"authorization_code",
"client_credentials",
"password",
"implicit",
"refresh_token",
"urn:ietf:params:oauth:grant-type:jwt-bearer"
],
"response_types":[
"code",
"token",
"id_token token"
],
"application_type":"web",
"subject_type":"public",
"post_logout_redirect_uris":["$cp_url"],
"preauthorized_scope":"openid profile email general",
"introspect_tokens":true,
"trusted_uri_prefixes":["$cp_url/"],
"redirect_uris":["$cp_url/auth/liberty/callback","$im_url/oidc_login/redirect_uri"]
}
EOF
echo
echo 'Finished creating registration.json file'
echo '----------------------'
echo "Register OIDC endpoint with IAM: "
oc project kube-system
cloudctl target -n kube-system
cloudctl iam oauth-client-register -f registration.json
echo "Done with end point registration"
echo "----------------------"
echo "Creating OIDC secret:"
oc apply -f im-oidc-secret.yaml -n management-infrastructure-management
echo "----------------------"
echo "OIDC registration complete"
}
config_operator(){
echo "Creating Connection for opeators"
tee connection.yaml << EOF
apiVersion: infra.management.ibm.com/v1alpha1
kind: Connection
metadata:
annotations:
BypassAuth: "$bypass"
labels:
controller-tools.k8s.io: "1.0"
name: imconnection
namespace: "management-infrastructure-management"
spec:
cfHost: "$im_url_bypass"
secrets:
accessToken:
secretKeyRef:
name: imconnectionsecret
key: accesstoken
EOF
echo "Connection created"
oc apply -f connection.yaml -n management-infrastructure-management
}
import_navigation_file() {
# run kubectl command
echo "Running kubectl command to retrieve navigation items..."
echo "*** A backup cr file is stored in ./navconfigurations.orginal"
feedback=$(bash -c 'kubectl get navconfigurations.foundation.ibm.com multicluster-hub-nav -n kube-system -o yaml > navconfigurations.orginal' 2>&1)
echo $feedback
cp navconfigurations.orginal navconfigurations.yaml
echo "Finished importing into navconfigurations.yaml"
echo "Verifying..."
#check if yaml file is valid
if grep -Fxq "kind: NavConfiguration" navconfigurations.yaml; then
echo "Navconfigurations.yaml file is valid"
else
echo "Failed to validate navconfigurations.yaml. Check above for errors. Ensure kubectl is authenticated."
exit 1
fi
}
check_exist() {
if grep -iq "id: $id" navconfigurations.yaml; then
echo "$product_name navigation menu item already exists. Aborting..."
exit 1
fi
}
add_navigation_items() {
id="cloudforms"
product_name="Infrastructure management"
check_exist
echo "Adding new navigation items to file..."
inframgmt_nav_item=" - id: cloudforms\n isAuthorized:\n - ClusterAdministrator\n - AccountAdministrator\n - Administrator\n - Operator\n - Editor\n - Viewer\n label: Infrastructure management\n parentId: automate\n serviceId: mcm-ui\n target: _blank\n url: $im_url"
awk_output="$(awk -v cloud="$inframgmt_nav_item" '1;/navItems:/{print cloud}' navconfigurations.yaml)"
echo "$awk_output" >navconfigurations.yaml
}
# Update CR with augmented file.
apply_new_items() {
echo "Updating MCM with new items..."
feedback=$(bash -c 'kubectl apply -n kube-system -f navconfigurations.yaml --validate=false' 2>&1)
if echo $feedback | grep -q 'Error from server (NotFound): the server could not find the requested resource'; then
echo "Failed running kubectl apply. Error from server (NotFound): the server could not find the requested resource. The kubectl version needs to be updated."
fi
echo "Finished updating MCM"
}
discover_im_cp_urls(){
echo "------------"
echo "Find the Infrastructure Management and CP4MCM console URL."
cp_url=`oc get routes cp-console -o=jsonpath='{.spec.host}' -n ibm-common-services`
cp_url="https://$cp_url"
echo $cp_url
im_domain=${cp_url#"https://cp-console."}
im_url="https://$_arg_hostname.$im_domain"
echo "CP4MCM URL: $cp_url"
echo "IM URL: $im_url"
}
if [ -n "$_arg_all" ]; then
echo "******** Installing all components"
discover_im_cp_urls
check_kubectl_installed
oidc
config_operator
import_navigation_file
add_navigation_items
apply_new_items
config_operator
exit 0
fi
if [ -n "$_arg_oidc_only" ]; then
echo "**** Installing oidc only"
discover_im_cp_urls
check_kubectl_installed
oidc
exit 0
fi
if [ -n "$_arg_nav_only" ]; then
echo "***** Installing navigation link only"
check_kubectl_installed
discover_im_cp_urls
import_navigation_file
add_navigation_items
apply_new_items
exit 0
fi
if [ -n "$_arg_edit_nav" ]; then
echo "***** Editing navigation menu "
kubectl edit navconfigurations.foundation.ibm.com multicluster-hub-nav -n kube-system --validate=false
exit 0
fi
print_help
exit 0
|
// Define the WrappedEnum type
protocol WrappedEnum {
// Define any required properties or methods for the WrappedEnum type
}
// Define the Resources enum
enum Resources {
case ErrorEnumFacadeAddedCase
// Define other cases as needed
}
// Define the noChange migration operation
func noChange(target: Any) -> Any {
// Implement the noChange migration operation
return target
}
// Implement the getMigrationResult function
func getMigrationResult(migration: (Any) -> Any, target: Any) -> Any {
if let enumFacade = migration(target) as? WrappedEnum {
return enumFacade
} else {
fatalError("Migration failed.")
}
}
// Example usage
// Define a concrete type conforming to WrappedEnum
struct ConcreteWrappedEnum: WrappedEnum {
// Implement the required properties or methods for the WrappedEnum type
}
// Call the getMigrationResult function with the noChange migration and the target resource
let result = getMigrationResult(migration: noChange, target: Resources.ErrorEnumFacadeAddedCase.rawValue) |
<gh_stars>0
/**
* Copyright 2018 hubohua
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.demoncat.dcapp;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
import com.demoncat.dcapp.common.Constants;
import com.demoncat.dcapp.utils.BitmapCache;
import com.demoncat.dcapp.utils.CommonUtils;
import com.demoncat.dcapp.widget.InfiniteHorizontalScrollView;
/**
* @Class: InfiniteHorizontalScrollActivity
* @Description: Infinite scroll horizontally
* @Author: hubohua
* @CreateDate: 2018/4/23
*/
public class InfiniteHorizontalScrollActivity extends Activity implements
InfiniteHorizontalScrollView.OnScrollChangeListener {
private static final String TAG = InfiniteHorizontalScrollActivity.class.getSimpleName();
private InfiniteHorizontalScrollView mScrollView;
private Button mBtnAutoAngle;
private ImageView mIvCarStatus;
private BitmapCache mVehicleCache;
private Handler mHandler = new Handler();
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mVehicleCache = new BitmapCache(getApplicationContext());
setContentView(R.layout.activity_infinite_scroll);
// init scroll layout
mScrollView = (InfiniteHorizontalScrollView) findViewById(R.id.hori_scroll_view);
mScrollView.registerScrollChangeListener(this);
mIvCarStatus = (ImageView) findViewById(R.id.img_vehicle);
// init reset button
mBtnAutoAngle = findViewById(R.id.btn_reset);
mBtnAutoAngle.setText(mScrollView.isAutoFixAngle() ? "随意角度" : "修正角度");
mBtnAutoAngle.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mScrollView != null) {
// mScrollView.resetScroll();
mScrollView.setAutoFixAngle(!mScrollView.isAutoFixAngle());
mBtnAutoAngle.setText(mScrollView.isAutoFixAngle() ? "随意角度" : "修正角度");
}
}
});
}
@Override
public void onScrollChanged(final int index) {
if (index >= 0) {
mHandler.post(new Runnable() {
@Override
public void run() {
getImageResource(index);
}
});
}
}
/**
* Get image resource id
* @param index
* @return
*/
private void getImageResource(int index) {
Log.d(TAG, "getImageResource index: " + index);
if (index >= 0 && index < 360) {
index = splitIndex(index + 1);
String num = String.valueOf(index);
Log.d(TAG, "getImageResource num: " + num);
Bitmap bitmap = mVehicleCache.getBitmapFromMemCache(num);
try {
if (bitmap == null || bitmap.isRecycled()) {
String drawableId = "p" + num;
// int resId = getResources().getIdentifier(drawableId,
// "drawable", getPackageName());
int resId = CommonUtils.getResId(drawableId, R.drawable.class);
Log.d(TAG, "getImageResource resId: " + resId);
bitmap = BitmapFactory.decodeResource(getResources(), resId);
mVehicleCache.addBitmapToMemoryCache(num,
bitmap);
}
mIvCarStatus.setImageBitmap(bitmap);
} catch (Exception e) {
e.printStackTrace();
} catch (OutOfMemoryError outOfMemoryError) {
outOfMemoryError.printStackTrace();
}
}
}
// calculate the index from 360 angle to current vehicle image count
private int splitIndex(int index) {
// 52 image count and 360 angles
index = (int) Math.round((index / 360.0f) * 52.0f + 0.5);
if (index <= 0) {
index = 1;
} else if (index > 52) { // max image number
index = 52;
}
return index;
}
@Override
public void onScrollStateChanged(InfiniteHorizontalScrollView.ScrollType type) {
Log.d(TAG, "onScrollStateChanged type: " + type);
}
@Override
public void onVehicleClick() {
}
}
|
/*
* Copyright 2019 BROCKHAUS AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package util;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import io.openvalidation.common.utils.NumberParsingUtils;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.ValueSource;
public class NumberParsinUtilsTest {
@ParameterizedTest
@CsvSource({"18,18.0", "18.01,18.01", "a 18 b, 18.0", "a 18.01 b, 18.01", "a 0 b, 0"})
public void parse_decimal_numbers(String value, Double expected) throws Exception {
Number result = NumberParsingUtils.extractDouble(value);
assertThat(result, is(expected));
}
@ParameterizedTest
@ValueSource(strings = {"abcd", "0001"})
public void string_should_not_contains_a_number(String input) {
boolean result = NumberParsingUtils.containsNumber(input);
assertThat(result, is(false));
}
}
|
<filename>dist/service/Writer.d.ts
import { IGeneratorResult } from '../interface/Generator';
export declare class WriterService {
constructor();
/** Zip results and write to disk */
zip(path: string, results: IGeneratorResult[]): Promise<void>;
/** Write results to disk */
writeMany(root: string, results: IGeneratorResult[]): Promise<void>;
/** Write on result to disk */
write(root: string, result: IGeneratorResult): Promise<void>;
}
|
<reponame>Gesserok/Market
package stream_tasks;
import java.util.ArrayList;
/**
* Created by ENIAC on 06.12.2017.
*/
public class UserArrayList {
public static ArrayList<User> userArrayList() {
ArrayList<User> userArrayList = new ArrayList<>();
userArrayList.add(new User(0, 29, 120, "Alla", "female", "Kiev", "AO"));
userArrayList.add(new User(1, 26, 115, "Jora", "male", "Kiev", "OA"));
userArrayList.add(new User(2, 27, 150, "Anna", "female", "Odessa", "OAO"));
userArrayList.add(new User(3, 28, 475, "Anton", "male", "Odessa", "OA"));
userArrayList.add(new User(4, 29, 100, "Galla", "female", "Odessa", "OA"));
userArrayList.add(new User(5, 40, 320, "Igor", "male", "Kiev", "OAO"));
userArrayList.add(new User(6, 25, 100, "Vanessa", "female", "Kiev", "OA"));
userArrayList.add(new User(7, 27, 425, "Nastia", "female", "Kiev", "AO"));
userArrayList.add(new User(8, 26, 150, "Nadia", "female", "Kiev", "OAO"));
return userArrayList;
}
}
|
import { Expose } from 'class-transformer';
import { IsIn, IsLowercase, IsNotEmpty, IsString } from 'class-validator';
import { Locale, Locales } from '../../../../../localisation/types/Locale';
import { BaseValidatable } from '../../../../../validation/BaseValidatable';
export class GetMovieDetailByCodeRequest extends BaseValidatable {
public constructor(
code: string,
locale: Locale) {
super();
this.code = code?.trim().toLowerCase();
this.locale = locale;
}
@Expose()
@IsString()
@IsNotEmpty()
@IsLowercase()
public readonly code: string;
@Expose()
@IsIn([...Locales])
public readonly locale: Locale;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.