text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Script for assembling an ARFF dataset with X-ray flux data.
# The script takes the satellite name and year and assembles
# all the files from that satellite and year into a single file.
#
# This version works for year 2008 and years before.
#
# Author: Andre Leon S. Gradvol, Dr.
# Last update: out 21 13:40:58 -02 2017
#
# Function to generate the header of the ARFF file in the variable HEADER.
# @param $1 = SATELLITE
# @param $2 = YEAR
addHEADER() {
CURRENTDATE=`date +"%Y-%m-%d %T"`
HEADER="% Title: X-ray flux observations"
HEADER="${HEADER}\n% This dataset was assembled with data from NOAA"
HEADER="${HEADER}\n% Author: Andre Leon Sampaio Gradvohl, Dr."
HEADER="${HEADER}\n% The date (yyyy-mm-dd) hh:mm:ss)"
HEADER="${HEADER} the data was assembled by the author: ${CURRENTDATE}\n%"
HEADER="${HEADER}\n% Original data source: https://satdat-vip.ngdc.noaa.gov/sem/goes/data/new_full/$2"
HEADER="${HEADER}\n% Originating_agency = \"DOC/NOAA/NCEP/NWS/SWPC\""
HEADER="${HEADER}\n% Archiving_agency = \"DOC/NOAA/NESDIS/NCEI\""
HEADER="${HEADER}\n% Year: $2"
HEADER="${HEADER}\n% Satellite: $1\n%"
HEADER="${HEADER}\n% Data description:"
HEADER="${HEADER}\n%\ttime_tag: Date and time for each observation in the format YYYY-mm-dd hh:mm:ss.SSS UTC\""
HEADER="${HEADER}\n%\txs: \"X-ray short wavelength channel irradiance (0.5 - 0.3 nm)\";"
HEADER="${HEADER}\n%\txl: \"X-ray long wavelength channel irradiance (0.1 - 0.8 nm)\";"
HEADER="${HEADER}\n%\t\"-99999\" indicates missing values.\n%"
HEADER="${HEADER}\n@RELATION x-ray_flux_$1_$2\n"
HEADER="${HEADER}\n@ATTRIBUTE time_tag DATE YYYY-mm-dd hh:mm:ss.SSS"
HEADER="${HEADER}\n@ATTRIBUTE xs NUMERIC"
HEADER="${HEADER}\n@ATTRIBUTE xl NUMERIC\n"
HEADER="${HEADER}\n@DATA"
HEADER="${HEADER}\n%timetag,xs,xl"
echo -e ${HEADER} > $3
}
# The main script starts here
NUMARGS=$#
if [[ ${NUMARGS} -lt 2 ]]; then
echo -e "\nUsage:\n\t $0 <satellite> <year> [<outputfile>]"
echo -e "\t<outputfile> is an optional argument"
exit 0
fi
SATELLITE=$1
YEAR=$2
if [[ ${NUMARGS} -eq 3 ]]; then
OUTPUTFILE=$3
STDOUTPUTSET=0
if [[ -f ${OUTPUTFILE} ]]; then
echo "File ${OUTPUTFILE} exists!"
read -p "Are you sure you want to overwrite it? " -n 1 -r
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo -e "\nQuitting the script"
exit 1
else
echo -e "\nOverwriting file ${OUTPUTFILE}"
fi
else
echo -e "\nAssembling file ${OUTPUTFILE}\n"
fi
else # if ${NUMARGS} -eq 2
OUTPUTFILE=$(mktemp /tmp/solarDataXXXX.arff)
STDOUTPUTSET=1
fi
addHEADER ${SATELLITE} ${YEAR} ${OUTPUTFILE}
NUM=0
for ARQ in `ls ${SATELLITE}_xrs_*_${YEAR}*_${YEAR}*.csv`; do
if [[ STDOUTPUTSET -eq 0 ]]; then
echo "Adding the ${ARQ} file"
((NUM++))
fi
# Cleaning the in the input file and the ^M (\r) in the end of each line.
awk -F, 'NR>157{sub(/\r/,""); print $1","$2","$3}' ${ARQ} >> ${OUTPUTFILE}
done
if [[ STDOUTPUTSET -eq 0 ]]; then
echo "Operation complete! ${NUM} files assembled in ${OUTPUTFILE}"
else
cat ${OUTPUTFILE}
rm -f ${OUTPUTFILE}
fi
exit 0
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
rm -R -f fifo/*
rm -R -f work/*
mkdir work/kat/
mkfifo fifo/gul_P1
mkfifo fifo/gul_P2
mkfifo fifo/gul_P3
mkfifo fifo/gul_P4
mkfifo fifo/gul_P5
mkfifo fifo/gul_P6
mkfifo fifo/gul_P7
mkfifo fifo/gul_P8
mkfifo fifo/gul_P9
mkfifo fifo/gul_P10
mkfifo fifo/gul_P11
mkfifo fifo/gul_P12
mkfifo fifo/gul_P13
mkfifo fifo/gul_P14
mkfifo fifo/gul_P15
mkfifo fifo/gul_P16
mkfifo fifo/gul_P17
mkfifo fifo/gul_P18
mkfifo fifo/gul_P19
mkfifo fifo/gul_P20
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_summarycalc_P1
mkfifo fifo/gul_S1_summary_P2
mkfifo fifo/gul_S1_summarycalc_P2
mkfifo fifo/gul_S1_summary_P3
mkfifo fifo/gul_S1_summarycalc_P3
mkfifo fifo/gul_S1_summary_P4
mkfifo fifo/gul_S1_summarycalc_P4
mkfifo fifo/gul_S1_summary_P5
mkfifo fifo/gul_S1_summarycalc_P5
mkfifo fifo/gul_S1_summary_P6
mkfifo fifo/gul_S1_summarycalc_P6
mkfifo fifo/gul_S1_summary_P7
mkfifo fifo/gul_S1_summarycalc_P7
mkfifo fifo/gul_S1_summary_P8
mkfifo fifo/gul_S1_summarycalc_P8
mkfifo fifo/gul_S1_summary_P9
mkfifo fifo/gul_S1_summarycalc_P9
mkfifo fifo/gul_S1_summary_P10
mkfifo fifo/gul_S1_summarycalc_P10
mkfifo fifo/gul_S1_summary_P11
mkfifo fifo/gul_S1_summarycalc_P11
mkfifo fifo/gul_S1_summary_P12
mkfifo fifo/gul_S1_summarycalc_P12
mkfifo fifo/gul_S1_summary_P13
mkfifo fifo/gul_S1_summarycalc_P13
mkfifo fifo/gul_S1_summary_P14
mkfifo fifo/gul_S1_summarycalc_P14
mkfifo fifo/gul_S1_summary_P15
mkfifo fifo/gul_S1_summarycalc_P15
mkfifo fifo/gul_S1_summary_P16
mkfifo fifo/gul_S1_summarycalc_P16
mkfifo fifo/gul_S1_summary_P17
mkfifo fifo/gul_S1_summarycalc_P17
mkfifo fifo/gul_S1_summary_P18
mkfifo fifo/gul_S1_summarycalc_P18
mkfifo fifo/gul_S1_summary_P19
mkfifo fifo/gul_S1_summarycalc_P19
mkfifo fifo/gul_S1_summary_P20
mkfifo fifo/gul_S1_summarycalc_P20
# --- Do ground up loss computes ---
summarycalctocsv < fifo/gul_S1_summarycalc_P1 > work/kat/gul_S1_summarycalc_P1 & pid1=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P2 > work/kat/gul_S1_summarycalc_P2 & pid2=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P3 > work/kat/gul_S1_summarycalc_P3 & pid3=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P4 > work/kat/gul_S1_summarycalc_P4 & pid4=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P5 > work/kat/gul_S1_summarycalc_P5 & pid5=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P6 > work/kat/gul_S1_summarycalc_P6 & pid6=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P7 > work/kat/gul_S1_summarycalc_P7 & pid7=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P8 > work/kat/gul_S1_summarycalc_P8 & pid8=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P9 > work/kat/gul_S1_summarycalc_P9 & pid9=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P10 > work/kat/gul_S1_summarycalc_P10 & pid10=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P11 > work/kat/gul_S1_summarycalc_P11 & pid11=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P12 > work/kat/gul_S1_summarycalc_P12 & pid12=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P13 > work/kat/gul_S1_summarycalc_P13 & pid13=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P14 > work/kat/gul_S1_summarycalc_P14 & pid14=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P15 > work/kat/gul_S1_summarycalc_P15 & pid15=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P16 > work/kat/gul_S1_summarycalc_P16 & pid16=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P17 > work/kat/gul_S1_summarycalc_P17 & pid17=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P18 > work/kat/gul_S1_summarycalc_P18 & pid18=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P19 > work/kat/gul_S1_summarycalc_P19 & pid19=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P20 > work/kat/gul_S1_summarycalc_P20 & pid20=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_summarycalc_P1 > /dev/null & pid21=$!
tee < fifo/gul_S1_summary_P2 fifo/gul_S1_summarycalc_P2 > /dev/null & pid22=$!
tee < fifo/gul_S1_summary_P3 fifo/gul_S1_summarycalc_P3 > /dev/null & pid23=$!
tee < fifo/gul_S1_summary_P4 fifo/gul_S1_summarycalc_P4 > /dev/null & pid24=$!
tee < fifo/gul_S1_summary_P5 fifo/gul_S1_summarycalc_P5 > /dev/null & pid25=$!
tee < fifo/gul_S1_summary_P6 fifo/gul_S1_summarycalc_P6 > /dev/null & pid26=$!
tee < fifo/gul_S1_summary_P7 fifo/gul_S1_summarycalc_P7 > /dev/null & pid27=$!
tee < fifo/gul_S1_summary_P8 fifo/gul_S1_summarycalc_P8 > /dev/null & pid28=$!
tee < fifo/gul_S1_summary_P9 fifo/gul_S1_summarycalc_P9 > /dev/null & pid29=$!
tee < fifo/gul_S1_summary_P10 fifo/gul_S1_summarycalc_P10 > /dev/null & pid30=$!
tee < fifo/gul_S1_summary_P11 fifo/gul_S1_summarycalc_P11 > /dev/null & pid31=$!
tee < fifo/gul_S1_summary_P12 fifo/gul_S1_summarycalc_P12 > /dev/null & pid32=$!
tee < fifo/gul_S1_summary_P13 fifo/gul_S1_summarycalc_P13 > /dev/null & pid33=$!
tee < fifo/gul_S1_summary_P14 fifo/gul_S1_summarycalc_P14 > /dev/null & pid34=$!
tee < fifo/gul_S1_summary_P15 fifo/gul_S1_summarycalc_P15 > /dev/null & pid35=$!
tee < fifo/gul_S1_summary_P16 fifo/gul_S1_summarycalc_P16 > /dev/null & pid36=$!
tee < fifo/gul_S1_summary_P17 fifo/gul_S1_summarycalc_P17 > /dev/null & pid37=$!
tee < fifo/gul_S1_summary_P18 fifo/gul_S1_summarycalc_P18 > /dev/null & pid38=$!
tee < fifo/gul_S1_summary_P19 fifo/gul_S1_summarycalc_P19 > /dev/null & pid39=$!
tee < fifo/gul_S1_summary_P20 fifo/gul_S1_summarycalc_P20 > /dev/null & pid40=$!
summarycalc -m -i -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 &
summarycalc -m -i -1 fifo/gul_S1_summary_P2 < fifo/gul_P2 &
summarycalc -m -i -1 fifo/gul_S1_summary_P3 < fifo/gul_P3 &
summarycalc -m -i -1 fifo/gul_S1_summary_P4 < fifo/gul_P4 &
summarycalc -m -i -1 fifo/gul_S1_summary_P5 < fifo/gul_P5 &
summarycalc -m -i -1 fifo/gul_S1_summary_P6 < fifo/gul_P6 &
summarycalc -m -i -1 fifo/gul_S1_summary_P7 < fifo/gul_P7 &
summarycalc -m -i -1 fifo/gul_S1_summary_P8 < fifo/gul_P8 &
summarycalc -m -i -1 fifo/gul_S1_summary_P9 < fifo/gul_P9 &
summarycalc -m -i -1 fifo/gul_S1_summary_P10 < fifo/gul_P10 &
summarycalc -m -i -1 fifo/gul_S1_summary_P11 < fifo/gul_P11 &
summarycalc -m -i -1 fifo/gul_S1_summary_P12 < fifo/gul_P12 &
summarycalc -m -i -1 fifo/gul_S1_summary_P13 < fifo/gul_P13 &
summarycalc -m -i -1 fifo/gul_S1_summary_P14 < fifo/gul_P14 &
summarycalc -m -i -1 fifo/gul_S1_summary_P15 < fifo/gul_P15 &
summarycalc -m -i -1 fifo/gul_S1_summary_P16 < fifo/gul_P16 &
summarycalc -m -i -1 fifo/gul_S1_summary_P17 < fifo/gul_P17 &
summarycalc -m -i -1 fifo/gul_S1_summary_P18 < fifo/gul_P18 &
summarycalc -m -i -1 fifo/gul_S1_summary_P19 < fifo/gul_P19 &
summarycalc -m -i -1 fifo/gul_S1_summary_P20 < fifo/gul_P20 &
eve -R 1 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P1 &
eve -R 2 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P2 &
eve -R 3 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P3 &
eve -R 4 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P4 &
eve -R 5 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P5 &
eve -R 6 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P6 &
eve -R 7 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P7 &
eve -R 8 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P8 &
eve -R 9 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P9 &
eve -R 10 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P10 &
eve -R 11 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P11 &
eve -R 12 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P12 &
eve -R 13 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P13 &
eve -R 14 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P14 &
eve -R 15 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P15 &
eve -R 16 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P16 &
eve -R 17 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P17 &
eve -R 18 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P18 &
eve -R 19 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P19 &
eve -R 20 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - > fifo/gul_P20 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20 $pid21 $pid22 $pid23 $pid24 $pid25 $pid26 $pid27 $pid28 $pid29 $pid30 $pid31 $pid32 $pid33 $pid34 $pid35 $pid36 $pid37 $pid38 $pid39 $pid40
# --- Do ground up loss kats ---
kat work/kat/gul_S1_summarycalc_P1 work/kat/gul_S1_summarycalc_P2 work/kat/gul_S1_summarycalc_P3 work/kat/gul_S1_summarycalc_P4 work/kat/gul_S1_summarycalc_P5 work/kat/gul_S1_summarycalc_P6 work/kat/gul_S1_summarycalc_P7 work/kat/gul_S1_summarycalc_P8 work/kat/gul_S1_summarycalc_P9 work/kat/gul_S1_summarycalc_P10 work/kat/gul_S1_summarycalc_P11 work/kat/gul_S1_summarycalc_P12 work/kat/gul_S1_summarycalc_P13 work/kat/gul_S1_summarycalc_P14 work/kat/gul_S1_summarycalc_P15 work/kat/gul_S1_summarycalc_P16 work/kat/gul_S1_summarycalc_P17 work/kat/gul_S1_summarycalc_P18 work/kat/gul_S1_summarycalc_P19 work/kat/gul_S1_summarycalc_P20 > output/gul_S1_summarycalc.csv & kpid1=$!
wait $kpid1
rm -R -f work/*
rm -R -f fifo/*
|
// Copyright © 2021 The Things Network Foundation, The Things Industries B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package storetest
import (
"database/sql"
"fmt"
"net/url"
"os"
"strings"
"testing"
"time"
"github.com/iancoleman/strcase"
_ "github.com/lib/pq" // PostgreSQL driver.
"go.thethings.network/lorawan-stack/v3/pkg/util/test"
)
func New(t *testing.T, newStore func(t *testing.T, dsn *url.URL) Store) *StoreTest {
dsn := url.URL{
Scheme: "postgresql",
Host: "localhost:5432",
Path: "ttn_lorawan_is_store_test",
}
dsn.User = url.UserPassword("root", "<PASSWORD>")
query := make(url.Values)
query.Add("sslmode", "disable")
if dbAddress := os.Getenv("SQL_DB_ADDRESS"); dbAddress != "" {
dsn.Host = dbAddress
}
if dbName := os.Getenv("TEST_DATABASE_NAME"); dbName != "" {
dsn.Path = dbName
}
if dbAuth := os.Getenv("SQL_DB_AUTH"); dbAuth != "" {
var username, password string
idx := strings.Index(dbAuth, ":")
if idx != -1 {
username, password = dbAuth[:idx], dbAuth[idx+1:]
} else {
username = dbAuth
}
dsn.User = url.UserPassword(username, password)
}
dsn.RawQuery = query.Encode()
return &StoreTest{
t: t,
dsn: dsn,
newStore: newStore,
population: &Population{},
}
}
type Store interface {
Init() error
Close() error
}
type StoreTest struct {
t *testing.T
dsn url.URL
newStore func(t *testing.T, dsn *url.URL) Store
population *Population
}
func (s *StoreTest) schemaDSN(schemaName string) *url.URL {
dsn := s.dsn
query := dsn.Query()
query.Add("search_path", schemaName)
dsn.RawQuery = query.Encode()
return &dsn
}
func (s *StoreTest) PrepareDB(t *testing.T) Store {
_, ctx := test.New(t)
db, err := sql.Open("postgres", s.dsn.String())
if err != nil {
t.Fatal(err)
}
defer db.Close()
start := time.Now()
schemaName := strcase.ToSnake(t.Name())
_, err = db.Exec(fmt.Sprintf("DROP SCHEMA IF EXISTS %s CASCADE;", schemaName))
if err != nil {
t.Fatal(err)
}
_, err = db.Exec(fmt.Sprintf("CREATE SCHEMA %s", schemaName))
if err != nil {
t.Fatal(err)
}
store := s.newStore(t, s.schemaDSN(schemaName))
if err := store.Init(); err != nil {
t.Fatal(err)
}
if err := s.population.Populate(ctx, store); err != nil {
t.Fatal(err)
}
t.Logf("Prepared schema %s in %s", schemaName, time.Since(start))
return store
}
func (s *StoreTest) DestroyDB(t *testing.T, assertClean bool, exceptions ...string) {
schemaName := strcase.ToSnake(t.Name())
if t.Failed() {
t.Logf("Keeping database to help debugging: %q", s.schemaDSN(schemaName).String())
return
}
db, err := sql.Open("postgres", s.dsn.String())
if err != nil {
t.Fatal(err)
}
defer db.Close()
start := time.Now()
var totalRowCount int
if assertClean {
tableNameRows, err := db.Query("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname = $1", schemaName)
if err != nil {
t.Fatal(err)
}
var tableNames []string
for tableNameRows.Next() {
var tableName string
if err := tableNameRows.Scan(&tableName); err != nil {
t.Fatal(err)
}
}
nextTable:
for _, tableName := range tableNames {
for _, exception := range exceptions {
if tableName == exception {
continue nextTable
}
}
var rowCount int
row := db.QueryRow(fmt.Sprintf("SELECT COUNT(*) FROM %s", tableName))
err = row.Scan(&rowCount)
if err != nil {
t.Fatal(err)
}
totalRowCount += rowCount
if rowCount > 0 {
t.Errorf("%d rows left in table %s", rowCount, tableName)
}
}
}
if totalRowCount == 0 {
_, err = db.Exec(fmt.Sprintf("DROP SCHEMA %s CASCADE", schemaName))
if err != nil {
t.Fatal(err)
}
}
t.Logf("Destroyed schema %s in %s", schemaName, time.Since(start))
}
|
#!/usr/bin/env bash
#
# Test replicated distribution using default configuration.
CWD=$(cd $(dirname $0) ; pwd)
source $CWD/common.bash
$CWD/nightly.dist replicated
|
let x = 2, y = 8;
let maxValue = x > y ? x : y;
console.log(maxValue); |
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.hammer;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import com.archimatetool.editor.ui.services.ViewManager;
import com.archimatetool.hammer.view.IValidatorView;
/**
* Show Validator View
*
* @author <NAME>
*/
public class ShowValidatorViewHandler extends AbstractHandler {
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
// activate = false to keep originating part in focus so we can update current selection
ViewManager.toggleViewPart(IValidatorView.ID, false);
return null;
}
}
|
<reponame>Ronaldoyoung/podcast-backend
import { Resolver, Query, Mutation, Args } from '@nestjs/graphql';
import {
CreateEpisodeInput,
CreateEpisodeOutput,
} from './dtos/create-episode.dto';
import {
CreatePodcastInput,
CreatePodcastOutput,
} from './dtos/create-podcast.dto';
import {
DeleteEpisodeInput,
DeleteEpisodeOutput,
} from './dtos/delete-episode.dto';
import {
DeletePodcastInput,
DeletePodcastOutput,
} from './dtos/delete-podcast.dto';
import { EditEpisodeInput, EditEpisodeOutput } from './dtos/edit-episode.dto';
import { EditPodcastInput, EditPodcastOutput } from './dtos/edit-podcast.dto';
import { EpisodesOutput } from './dtos/episodes.dto';
import { PodcastInput, PodcastOutput } from './dtos/podcast.dto';
import { PodcastsOutput } from './dtos/podcasts.dto';
import { Podcast } from './entities/podcast.entity';
import { PodcastsService } from './podcasts.service';
@Resolver(() => Podcast)
export class PodcastsResolver {
constructor(private readonly podcastsService: PodcastsService) {}
@Query(() => PodcastsOutput)
allPodcasts(): Promise<PodcastsOutput> {
return this.podcastsService.allPodcasts();
}
@Mutation(() => CreatePodcastOutput)
createPodcast(
@Args('input') createPodcastInput: CreatePodcastInput,
): Promise<CreatePodcastOutput> {
return this.podcastsService.createPodcast(createPodcastInput);
}
@Query(() => PodcastOutput)
podcast(@Args('input') podcastInput: PodcastInput): Promise<PodcastOutput> {
return this.podcastsService.findPodcastById(podcastInput);
}
@Mutation(() => EditPodcastOutput)
editPodcast(
@Args('input') editPodcastInput: EditPodcastInput,
): Promise<EditPodcastOutput> {
return this.podcastsService.editPodcast(editPodcastInput);
}
@Mutation(() => DeletePodcastOutput)
deletePodcast(
@Args('input') deletePodcastInput: DeletePodcastInput,
): Promise<DeletePodcastOutput> {
return this.podcastsService.deletePodcast(deletePodcastInput);
}
@Query(() => EpisodesOutput)
allEpisodes(): Promise<EpisodesOutput> {
return this.podcastsService.allEpisodes();
}
@Mutation(() => CreateEpisodeOutput)
createEpisode(
@Args('input') createEpisodeInput: CreateEpisodeInput,
): Promise<CreateEpisodeOutput> {
return this.podcastsService.createEpisode(createEpisodeInput);
}
@Mutation(() => EditEpisodeOutput)
editEpisode(
@Args('input') editEpisodeInput: EditEpisodeInput,
): Promise<EditEpisodeOutput> {
return this.podcastsService.editEpisode(editEpisodeInput);
}
@Mutation(() => DeleteEpisodeOutput)
deleteEpisode(
@Args('input') deleteEpisodeInput: DeleteEpisodeInput,
): Promise<DeleteEpisodeOutput> {
return this.podcastsService.deleteEpisode(deleteEpisodeInput);
}
}
|
// Function to multiply two numbers
int multiply(int x, int y) {
int result = 0;
for (int i = 0; i < y; i++)
result += x;
return result;
}
// Driver code
int a = 5;
int b = 7;
int multiplyResult = multiply(a, b);
cout << multiplyResult; # 35 |
#!/bin/sh
./bin/android-petstore-httpclient.sh
./bin/android-petstore-volley.sh
|
import os
import yaml
class YAMLBackend:
def __init__(self, db):
self.db = db
def create_db(self, files):
for filename, content in files.items():
with open(os.path.join(self.db, filename), 'w+') as file:
file.write(content)
def load(self, filename):
file_path = os.path.join(self.db, filename)
with open(file_path, 'r') as file:
content = file.read()
return yaml.safe_load(content)
def test_yamlbackend_load(self):
f1 = """
---
key: value
"""
f2 = """
---
key2: value2
"""
files = {'f1.yaml': f1, 'f2.yaml': f2}
self.create_db(files)
assert self.load('f1.yaml') == {'key': 'value'}
assert self.load('f2.yaml') == {'key2': 'value2'}
backend = YAMLBackend('/path/to/database')
backend.test_yamlbackend_load() |
<filename>controller/allRelatedResource.js
const dbhandler = require('../db/resource.db');
const escp = require('../utils').escapeString;
const stopword = require('stopword');
/*function to get all related resource based on the algorithm on this site
http://www.catalysoft.com/articles/StrikeAMatch.html*/
const allRelatedResource = async (currentArticle) => {
const allResources = await dbhandler.readAll();
let allTitlesArr = allResources.payload.resources.map(resource =>
/*get non noise words from all articles in DB and their meta titles*/
[stopword.removeStopwords(escp(resource.meta.title).toUpperCase().split(/\s+/)),
resource.meta.title]
);
/*get non noise words from the currently read article*/
currentArticleWords = stopword.removeStopwords(escp(currentArticle).toUpperCase().split(/\s+/));
/*get the relativity value between 0 to 1*/
const relatedFigures = allTitlesArr.filter((title) => {
let figure = compareArticles(currentArticleWords, title[0])
if (figure >= 0.4 && figure < 1)
return title;
})
/*return the actual title names*/
return relatedFigures.map(title => title[1]);
}
/*pair adjacent letters in word*/
const letterPairs = (word) => {
let len = word.length - 1;
let pairs = [];
for (let i = 0; i < len; i++) {
pairs[i] = word.substring(i, i + 2);
}
return pairs;
}
const wordLetterPairs = (articleWords) => {
let allPairs = [];
// For each word
for (let l = 0; l < articleWords.length; l++) {
// Find the pairs of characters
let pairsInWord = letterPairs(articleWords[l]);
allPairs = [...allPairs, ...pairsInWord];
}
return allPairs;
}
const compareArticles = (currentArticle, dbArticle) => {
const currentArticlePairs = wordLetterPairs(currentArticle);
const dbArticlePairs = wordLetterPairs(dbArticle);
let intersection = 0;
const union = currentArticlePairs.length + dbArticlePairs.length;
for (let i = 0; i < currentArticlePairs.length; i++) {
let pair1 = currentArticlePairs[i];
for (let j = 0; j < dbArticlePairs.length; j++) {
let pair2 = dbArticlePairs[j];
if (pair1 === pair2) {
intersection++;
dbArticlePairs.splice(j,1);
break;
}
}
}
return (2.0 * intersection) / union;
}
module.exports = allRelatedResource;
|
package me.insidezhou.southernquiet.util;
public interface IdGenerator {
long generate();
long getTicksFromId(long id);
long getTimestampFromId(long id);
int getWorkerFromId(long id);
int getSequenceFromId(long id);
}
|
import { ApiProperty } from "@nestjs/swagger";
import { BaseDto } from "./base-dto";
import { RecordStatus, RecordStatusFinish } from "../enums";
import { IOrderEntity } from "../interfaces";
import { StaffDto } from "./staff.dto";
import { CustomerDto } from "./customer.dto";
import { ServiceDto } from "./service.dto";
export class OrderDto extends BaseDto {
constructor(order?: IOrderEntity) {
super();
if (order) {
this.id = order.id;
this.createdDate = order.createdDate.toISOString();
this.visitDate = order.visitDate;
this.status = order.status;
this.finishStatus = order.finishStatus;
}
}
@ApiProperty({ description: 'Дата создания' })
createdDate: string;
@ApiProperty({ description: 'Клиент' })
customer: CustomerDto;
@ApiProperty({ description: 'Дата визита' })
visitDate: string;
@ApiProperty({ description: 'Статус записи' })
status: RecordStatus;
@ApiProperty({ description: 'Мастер услуги' })
master: StaffDto;
@ApiProperty({ description: 'Услуга' })
service: ServiceDto;
@ApiProperty({ description: 'Статус завершения записи' })
finishStatus: RecordStatusFinish;
}
export class CreateOrderDto {
@ApiProperty({ description: 'Имя клиента', required: false })
name: string;
@ApiProperty({ description: 'Номер телефона' })
phone: string;
@ApiProperty({ description: 'Id мастера', required: false })
masterId?: number;
@ApiProperty({ description: 'Id услуги', required: false })
serviceId?: number;
@ApiProperty({ description: 'Дата визита', required: false })
visitDate?: string;
}
export class UpdateOrderDto {
@ApiProperty({ description: 'Id клиента', required: false })
customerId: number;
@ApiProperty({ description: 'Id мастера', required: false })
masterId?: number;
@ApiProperty({ description: 'Id услуги', required: false })
serviceId?: number;
@ApiProperty({ description: 'Дата визита', required: false })
visitDate?: string;
@ApiProperty({ description: 'Статус записи', enum: RecordStatus, required: false })
status?: RecordStatus;
@ApiProperty({ description: 'Статус завершения записи', enum: RecordStatusFinish, required: false })
finishStatus?: RecordStatusFinish;
} |
from os import getcwd
from pathlib import Path
from typing import Optional
class QMLProcessor:
def __init__(self):
pass
def build(self, qmlfile: Path) -> Path:
output_file = qmlfile.with_name('output.qml')
# Perform QML file processing and generate output file
# Your implementation here
return output_file
def start(self, qmlfile: Optional[Path] = None):
if not qmlfile:
qmlfile = Path(getcwd()) / '__declare_qtquick_autogen__.qml'
self.build(qmlfile)
# Start the QML application using qmlfile
# Your implementation here
def debug(self, qmlfile: Optional[Path] = None):
if not qmlfile:
qmlfile = Path(getcwd()) / '__declare_qtquick_autogen__.qml'
# Enable debugging for the QML application using qmlfile
# Your implementation here |
python deeplucia_eval.py Model/trained_model.h5 learn_config/all.val_set_06.test_set_07.n2p_001.num_pos_16.json
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const wrap_1 = require("../../wrap");
const LOAD = 'time/LOAD';
const LOAD_SUCCESS = 'time/LOAD_SUCCESS';
const LOAD_FAIL = 'time/LOAD_FAIL';
function reducer(state = {}, action = {}) {
switch (action.type) {
case LOAD:
return Object.assign({}, state, { loading: true });
case LOAD_SUCCESS:
return Object.assign({}, state, { loading: false, loaded: true, data: action.result });
case LOAD_FAIL:
return Object.assign({}, state, { loading: false, loaded: false });
default:
return state;
}
}
exports.default = reducer;
function load(params) {
return wrap_1.default([LOAD, LOAD_SUCCESS, LOAD_FAIL], (inspect) => {
return inspect.service.time.loadNow({ format: 'yyyy' });
}, params);
}
exports.load = load;
|
<filename>open-sphere-plugins/csv-common/src/test/java/io/opensphere/csvcommon/ui/columndefinition/controller/FormatControllerTest.java<gh_stars>10-100
package io.opensphere.csvcommon.ui.columndefinition.controller;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Set;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.easymock.IAnswer;
import org.junit.Test;
import io.opensphere.core.common.configuration.date.DateFormat;
import io.opensphere.core.common.configuration.date.DateFormat.Type;
import io.opensphere.core.common.configuration.date.DateFormatsConfig;
import io.opensphere.core.preferences.ClasspathPreferencesPersistenceManager;
import io.opensphere.core.preferences.InternalPreferencesIF;
import io.opensphere.core.preferences.Preferences;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.core.util.collections.New;
import io.opensphere.csvcommon.common.Constants;
import io.opensphere.csvcommon.common.datetime.ConfigurationProviderImpl;
import io.opensphere.csvcommon.detect.util.CSVColumnPrefsUtil;
import io.opensphere.csvcommon.ui.columndefinition.model.BeforeAfterRow;
import io.opensphere.csvcommon.ui.columndefinition.model.ColumnDefinitionModel;
import io.opensphere.csvcommon.ui.columndefinition.model.ColumnDefinitionRow;
import io.opensphere.importer.config.ColumnType;
import io.opensphere.mantle.util.MantleConstants;
/**
* Tests the FormatController class.
*
*/
public class FormatControllerTest
{
/**
* Tests that the selected format changes when the data type changes.
*/
@SuppressWarnings("unused")
@Test
public void testDataTypeChanged()
{
DateFormatsConfig config = getDateFormats();
EasyMockSupport support = new EasyMockSupport();
PreferencesRegistry preferencesRegistry = createPreferencesRegistry(support, config, null);
support.replayAll();
ColumnDefinitionModel model = new ColumnDefinitionModel();
String formatString = "format";
ColumnDefinitionRow row0 = new ColumnDefinitionRow();
row0.setColumnId(0);
row0.setColumnName("column0");
row0.setIsImport(true);
ColumnDefinitionRow row1 = new ColumnDefinitionRow();
row1.setColumnId(1);
row1.setColumnName("column1");
row1.setIsImport(true);
ColumnDefinitionRow row2 = new ColumnDefinitionRow();
row2.setColumnId(2);
row2.setColumnName("column2");
row2.setIsImport(true);
row2.setDataType(ColumnType.TIMESTAMP.toString());
row2.setFormat(formatString);
List<List<String>> sampleData = New.list();
sampleData.add(New.list("2014-05-21 14:51:10"));
model.setSampleData(sampleData);
model.getDefinitionTableModel().addRows(New.list(row0, row1, row2));
FormatController controller = new FormatController(preferencesRegistry, model);
model.setSelectedDefinition(row0);
row0.setDataType(ColumnType.TIMESTAMP.toString());
row0.setFormat(formatString);
assertTrue(model.canAddFormats());
row0.setDataType(null);
assertNull(row0.getFormat());
row0.setDataType(ColumnType.LAT.toString());
assertFalse(model.canAddFormats());
row0.setDataType(ColumnType.TIME.toString());
row0.setFormat(formatString);
row0.setDataType(ColumnType.TIME.toString());
assertTrue(model.canAddFormats());
assertEquals(formatString, row0.getFormat());
row0.setDataType(ColumnType.TIMESTAMP.toString());
assertTrue(model.canAddFormats());
assertTrue(row0.getFormat().startsWith("yyyy"));
row0.setDataType(ColumnType.TIME.toString());
row0.setFormat(formatString);
model.setSelectedDefinition(row1);
assertEquals(formatString, row0.getFormat());
assertNull(row1.getFormat());
model.setSelectedDefinition(row2);
assertEquals(formatString, row2.getFormat());
support.verifyAll();
}
/**
* Tests the save current format function.
*/
@Test
public void testSaveCurrentFormat()
{
String newFormat = "y";
DateFormatsConfig config = getDateFormats();
EasyMockSupport support = new EasyMockSupport();
PreferencesRegistry preferencesRegistry = createPreferencesRegistry(support, config, "yyyy");
support.replayAll();
BeforeAfterRow successRow = new BeforeAfterRow();
successRow.setAfterValue("05/20/2014");
BeforeAfterRow failRow1 = new BeforeAfterRow();
failRow1.setAfterValue("N/A");
BeforeAfterRow failRow2 = new BeforeAfterRow();
failRow2.setAfterValue(Constants.ERROR_LABEL);
ColumnDefinitionModel model = new ColumnDefinitionModel();
ColumnDefinitionRow selectedColumn = new ColumnDefinitionRow();
selectedColumn.setDataType(ColumnType.DATE.toString());
selectedColumn.setFormat(newFormat);
@SuppressWarnings("unused")
FormatController controller = new FormatController(preferencesRegistry, model);
model.setSelectedDefinition(selectedColumn);
model.getBeforeAfterTableModel().addRows(New.list(failRow1));
model.getBeforeAfterTableModel().clear();
newFormat = "yy";
selectedColumn.setFormat(newFormat);
model.getBeforeAfterTableModel().addRows(New.list(failRow2));
model.getBeforeAfterTableModel().clear();
newFormat = "yyy";
selectedColumn.setFormat(newFormat);
model.getAvailableFormats().add(newFormat);
model.getBeforeAfterTableModel().addRows(New.list(successRow));
model.getAvailableFormats().clear();
newFormat = "yyyy";
selectedColumn.setFormat(newFormat);
model.getBeforeAfterTableModel().addRows(New.list(successRow));
support.verifyAll();
}
/**
* Tests when data types are changed and verifies the formats are populated
* appropriately.
*/
@Test
public void testUpdate()
{
DateFormatsConfig config = getDateFormats();
EasyMockSupport support = new EasyMockSupport();
PreferencesRegistry preferencesRegistry = createPreferencesRegistry(support, config, null);
support.replayAll();
ColumnDefinitionModel model = new ColumnDefinitionModel();
model.setSampleData(New.<List<String>>list());
ColumnDefinitionRow selectedColumn = new ColumnDefinitionRow();
selectedColumn.setDataType(ColumnType.DATE.toString());
model.getDefinitionTableModel().addRows(New.list(selectedColumn));
@SuppressWarnings("unused")
FormatController controller = new FormatController(preferencesRegistry, model);
model.setSelectedDefinition(selectedColumn);
List<String> actualAvailableFormats = model.getAvailableFormats();
Set<String> actuals = New.set(actualAvailableFormats);
assertEquals(actuals.size(), actualAvailableFormats.size());
assertTrue(model.getDefinitionTableModel().isCellEditable(0, 3));
Set<String> expected = New.set();
for (DateFormat format : config.getFormats())
{
if (format.getType() == Type.DATE)
{
expected.add(format.getSdf());
}
}
assertEquals(expected.size(), actuals.size());
for (String anExpected : expected)
{
assertTrue(actuals.contains(anExpected));
}
selectedColumn.setDataType(ColumnType.TIMESTAMP.toString());
actualAvailableFormats = model.getAvailableFormats();
actuals = New.set(actualAvailableFormats);
assertEquals(actuals.size(), actualAvailableFormats.size());
expected = New.set();
for (DateFormat format : config.getFormats())
{
if (format.getType() == Type.TIMESTAMP)
{
expected.add(format.getSdf());
}
}
assertEquals(expected.size(), actuals.size());
for (String anExpected : expected)
{
assertTrue(actuals.contains(anExpected));
}
support.verifyAll();
}
/**
* Tests when no data type is selected.
*/
@Test
public void testUpdateNoDataType()
{
DateFormatsConfig config = getDateFormats();
EasyMockSupport support = new EasyMockSupport();
PreferencesRegistry preferencesRegistry = createPreferencesRegistry(support, config, null);
support.replayAll();
ColumnDefinitionModel model = new ColumnDefinitionModel();
ColumnDefinitionRow selectedColumn = new ColumnDefinitionRow();
selectedColumn.setDataType(ColumnType.TIME.toString());
@SuppressWarnings("unused")
FormatController controller = new FormatController(preferencesRegistry, model);
model.setSelectedDefinition(selectedColumn);
List<String> actualAvailableFormats = model.getAvailableFormats();
Set<String> actuals = New.set(actualAvailableFormats);
assertEquals(actuals.size(), actualAvailableFormats.size());
Set<String> expected = New.set();
for (DateFormat format : config.getFormats())
{
if (format.getType() == Type.TIME)
{
expected.add(format.getSdf());
}
}
assertEquals(expected.size(), actuals.size());
for (String anExpected : expected)
{
assertTrue(actuals.contains(anExpected));
}
selectedColumn = new ColumnDefinitionRow();
model.setSelectedDefinition(selectedColumn);
actualAvailableFormats = model.getAvailableFormats();
assertEquals(0, actualAvailableFormats.size());
support.verifyAll();
}
/**
* Tests when a column is unselected.
*/
@Test
public void testUpdateNoSelectedColumn()
{
DateFormatsConfig config = getDateFormats();
EasyMockSupport support = new EasyMockSupport();
PreferencesRegistry preferencesRegistry = createPreferencesRegistry(support, config, null);
support.replayAll();
ColumnDefinitionModel model = new ColumnDefinitionModel();
ColumnDefinitionRow selectedColumn = new ColumnDefinitionRow();
selectedColumn.setDataType(ColumnType.TIME.toString());
@SuppressWarnings("unused")
FormatController controller = new FormatController(preferencesRegistry, model);
model.setSelectedDefinition(selectedColumn);
List<String> actualAvailableFormats = model.getAvailableFormats();
Set<String> actuals = New.set(actualAvailableFormats);
assertEquals(actuals.size(), actualAvailableFormats.size());
Set<String> expected = New.set();
for (DateFormat format : config.getFormats())
{
if (format.getType() == Type.TIME)
{
expected.add(format.getSdf());
}
}
assertEquals(expected.size(), actuals.size());
for (String anExpected : expected)
{
assertTrue(actuals.contains(anExpected));
}
model.setSelectedDefinition(null);
actualAvailableFormats = model.getAvailableFormats();
assertEquals(0, actualAvailableFormats.size());
support.verifyAll();
}
/**
* Creates an easy mocked toolbox.
*
* @param support The easy mock support object.
* @param config The config to return.
* @param newFormat A new format if save format should be expected to be
* called.
* @return The easy mocked toolbox.
*/
@SuppressWarnings("unchecked")
private PreferencesRegistry createPreferencesRegistry(EasyMockSupport support, DateFormatsConfig config,
final String newFormat)
{
Preferences preferences = support.createMock(Preferences.class);
if (newFormat != null)
{
preferences.putJAXBObject(EasyMock.cmpEq(MantleConstants.USER_DATE_FORMAT_CONFIG_FILE_KEY),
EasyMock.isA(DateFormatsConfig.class), EasyMock.eq(true), EasyMock.isA(ConfigurationProviderImpl.class));
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>()
{
@Override
public Object answer()
{
DateFormatsConfig config = (DateFormatsConfig)EasyMock.getCurrentArguments()[1];
Set<String> formatsSdfs = New.set();
for (DateFormat format : config.getFormats())
{
formatsSdfs.add(format.getSdf());
}
assertTrue(formatsSdfs.contains(newFormat));
return null;
}
});
}
preferences.getJAXBObject(EasyMock.eq(DateFormatsConfig.class),
EasyMock.cmpEq(MantleConstants.USER_DATE_FORMAT_CONFIG_FILE_KEY), EasyMock.isA(DateFormatsConfig.class));
EasyMock.expectLastCall().andReturn(config);
EasyMock.expectLastCall().atLeastOnce();
preferences.getBoolean(EasyMock.isA(String.class), EasyMock.eq(false));
EasyMock.expectLastCall().andReturn(Boolean.TRUE);
EasyMock.expectLastCall().atLeastOnce();
preferences.getStringList(EasyMock.isA(String.class), (List<String>)EasyMock.isNull());
EasyMock.expectLastCall().andReturn(New.<String>list());
EasyMock.expectLastCall().anyTimes();
PreferencesRegistry registry = support.createMock(PreferencesRegistry.class);
registry.getPreferences(EasyMock.cmpEq(MantleConstants.USER_DATE_FORMAT_CONFIG_FILE_TOPIC));
EasyMock.expectLastCall().andReturn(preferences);
EasyMock.expectLastCall().atLeastOnce();
registry.getPreferences(EasyMock.eq(CSVColumnPrefsUtil.class));
EasyMock.expectLastCall().andReturn(preferences);
EasyMock.expectLastCall().anyTimes();
return registry;
}
/**
* Gets the date formats.
*
* @return The list of known configured date formats.
*/
private DateFormatsConfig getDateFormats()
{
ClasspathPreferencesPersistenceManager manager = new ClasspathPreferencesPersistenceManager();
InternalPreferencesIF preferences = manager.load(MantleConstants.USER_DATE_FORMAT_CONFIG_FILE_TOPIC, null, false);
DateFormatsConfig config = preferences.getJAXBObject(DateFormatsConfig.class, "DateFormatConfig", null);
return config;
}
}
|
def selection_sort(arr):
for i in range(len(arr)-1):
min_idx = i
for j in range(i+1, len(arr)):
if arr[min_idx] > arr[j]:
min_idx = j
arr[i], arr[min_idx] = arr[min_idx], arr[i] |
#!/usr/bin/env bash
set -euo pipefail
docker build -t manylinux-cuda101 -f Dockerfile.manylinux-cuda101 .
docker build -t eddl-manylinux-gpu -f Dockerfile.eddl-manylinux-gpu .
docker build -t pyeddl-manylinux-gpu -f Dockerfile.manylinux-gpu .
# copy the wheels to /tmp/wheels on the host
docker run --rm pyeddl-manylinux-gpu bash -c "tar -c -C /pyeddl wheels" | tar -x -C /tmp
|
<filename>client/src/components/SpecialReview/index.js<gh_stars>1-10
import React from "react";
import "./style.css";
function SpecialReview(props) {
return (
<div className='divs'>
<h2>
{props.name} <i className='fas fa-star' id={"star"}></i>
<i className='fas fa-star' id={"grey-star"}></i>
<i className='fas fa-star' id={"grey-star"}></i>
<i className='fas fa-star' id={"grey-star"}></i>
<i className='fas fa-star' id={"grey-star"}></i>
</h2>
<p>{props.review}</p>
<p>{props.date}</p>
<h5>Type of gig: {props.typeOfGig}</h5>
</div>
);
}
export default SpecialReview;
|
#!/bin/sh
HOST_IP="35.246.243.109"
RMI_IP="10.156.0.2"
LOG_DIR="logs/experiment_4/instances_1"
HOME_DIR="distributed_systems/out/production/rmi-tact"
USER="sven"
# Instances
INSTANCE_01_IP="instance-01"
INSTANCE_02_IP="instance-02"
INSTANCE_03_IP="instance-03"
INSTANCES=(${INSTANCE_01_IP} ${INSTANCE_02_IP} ${INSTANCE_03_IP})
# Replicas
REPLICAS_INSTANCE_01=(ReplicaA ReplicaB)
REPLICAS_INSTANCE_02=(ReplicaC ReplicaD)
REPLICAS_INSTANCE_03=(ReplicaE ReplicaF)
REPLICAS=("${REPLICAS_INSTANCE_01[@]}" "${REPLICAS_INSTANCE_02[@]}" "${REPLICAS_INSTANCE_03[@]}")
# Charachters used
LETTERS=(x y z)
# #########################################################################
# # #
# # Initalize experiment #
# # #
# #########################################################################
echo "Initialize experiment..."
for instance in ${INSTANCES[@]}
do
echo "=> Setup ${instance}"
ssh ${USER}@${instance} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
rm -rf ${LOG_DIR};
mkdir -p ${LOG_DIR}
"
done
echo "Done! \n"
# #########################################################################
# # #
# # Start master and replicas per instance #
# # #
# #########################################################################
echo "Start master and replicas..."
echo "=> Start master on ${INSTANCE_01_IP}"
ssh ${USER}@${INSTANCE_01_IP} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
nohup scala -Djava.rmi.server.hostname=${HOST_IP} main.scala.history.MasterReplica > ${LOG_DIR}/master.log 2>&1 &
"
echo "=> Start replicas on ${INSTANCE_01_IP}"
for replica in ${REPLICAS_INSTANCE_01[@]}
do
echo "\t => Start ${replica} on ${INSTANCE_01_IP}"
ssh ${USER}@${INSTANCE_01_IP} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
nohup scala main.scala.replica.TactReplica ${RMI_IP} ${replica: -1} > ${LOG_DIR}/${replica}.log 2>&1 &
"
done
echo "=> Start replicas on ${INSTANCE_02_IP}"
for replica in ${REPLICAS_INSTANCE_02[@]}
do
echo "\t => Start ${replica} on ${INSTANCE_02_IP}"
ssh ${USER}@${INSTANCE_02_IP} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
nohup scala main.scala.replica.TactReplica ${RMI_IP} ${replica: -1} > ${LOG_DIR}/${replica}.log 2>&1 &
"
done
echo "=> Start replicas on ${INSTANCE_03_IP}"
for replica in ${REPLICAS_INSTANCE_03[@]}
do
echo "\t => Start ${replica} on ${INSTANCE_03_IP}"
ssh ${USER}@${INSTANCE_03_IP} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
nohup scala main.scala.replica.TactReplica ${RMI_IP} ${replica: -1} > ${LOG_DIR}/${replica}.log 2>&1 &
"
done
# Wait for everything to start
sleep 5
echo "=> Start coordinator on ${INSTANCE_01_IP}"
ssh ${USER}@${INSTANCE_01_IP} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
nohup scala main.scala.history.VoluntaryCoordinator ${RMI_IP} > ${LOG_DIR}/coordinator.log 2>&1 &
"
echo "Done! \n"
# Wait for coordinator to start
sleep 5
#########################################################################
# #
# Simulation #
# #
#########################################################################
echo "Start simulation"
for r in {1..5}
do
#########################################################################
# #
# Write simulation #
# #
#########################################################################
for i in {1..75}
do
RND_REPLICA=$((RANDOM % ${#REPLICAS[@]}))
REPLICA=${REPLICAS[$RND_REPLICA]}
RND_LETTERS=$((RANDOM % ${#LETTERS[@]}))
LETTER=${LETTERS[$RND_LETTERS]}
COMMAND="
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
echo -ne '($i/75) $REPLICA: ';
scala main.scala.client.Client ${RMI_IP} ${REPLICA} write ${LETTER} 1
"
# Run command
if [[ " ${REPLICAS_INSTANCE_01[*]} " == *" ${REPLICA} "* ]]; then
ssh ${USER}@${INSTANCE_01_IP} ${COMMAND}
fi
if [[ " ${REPLICAS_INSTANCE_02[*]} " == *" ${REPLICA} "* ]]; then
ssh ${USER}@${INSTANCE_02_IP} ${COMMAND}
fi
if [[ " ${REPLICAS_INSTANCE_03[*]} " == *" ${REPLICA} "* ]]; then
ssh ${USER}@${INSTANCE_03_IP} ${COMMAND}
fi
# Sleep for max 2 seconds
sleep $(bc -l <<< "scale=4 ; ${RANDOM}/16383")
done
echo "Done! \n"
#########################################################################
# #
# Fetching results #
# #
#########################################################################
echo "Fetch Master results:"
ssh ${USER}@${INSTANCE_01_IP} "
source /home/${USER}/.sdkman/bin/sdkman-init.sh;
cd ${HOME_DIR};
scala main.scala.client.History ${RMI_IP}
"
echo "Done! \n"
echo "Results:"
for replica in ${REPLICAS[@]}
do
echo "=> " $replica
COMMAND="source /home/${USER}/.sdkman/bin/sdkman-init.sh; cd ${HOME_DIR};"
for letter in ${LETTERS[@]}
do
COMMAND="$COMMAND scala main.scala.client.Client ${RMI_IP} ${replica} read ${letter};"
done
ssh ${USER}@${INSTANCE_01_IP} ${COMMAND}
done
echo "Done! \n"
done
echo "Done! \n"
#########################################################################
# #
# Kill the master and replicas #
# #
#########################################################################
echo "Stop the master and all the replicas..."
ssh ${USER}@${INSTANCE_01_IP} "lsof -tc java | xargs --no-run-if-empty kill -9"
ssh ${USER}@${INSTANCE_02_IP} "lsof -tc java | xargs --no-run-if-empty kill -9"
ssh ${USER}@${INSTANCE_03_IP} "lsof -tc java | xargs --no-run-if-empty kill -9"
echo "Done! \n"
#########################################################################
# #
# Fetch generated logs #
# #
#########################################################################
sleep 10;
echo "Fetching logs..."
echo "=> Remove old logs"
mkdir -p $LOG_DIR
echo "=> Get the new logs"
ssh ${USER}@${INSTANCE_01_IP} "cat ${HOME_DIR}/${LOG_DIR}/master.log" > $LOG_DIR/master.log
# Fetch logs on instance-01"
for replica in ${REPLICAS_INSTANCE_01[@]}
do
ssh ${USER}@${INSTANCE_01_IP} "cat ${HOME_DIR}/${LOG_DIR}/${replica}.log" > $LOG_DIR/${replica}.log
done
# Fetch logs on instance-02"
for replica in ${REPLICAS_INSTANCE_02[@]}
do
ssh ${USER}@${INSTANCE_02_IP} "cat ${HOME_DIR}/${LOG_DIR}/${replica}.log" > $LOG_DIR/${replica}.log
done
# Fetch logs on instance-03
for replica in ${REPLICAS_INSTANCE_03[@]}
do
ssh ${USER}@${INSTANCE_03_IP} "cat ${HOME_DIR}/${LOG_DIR}/${replica}.log" > $LOG_DIR/${replica}.log
done
echo "Done! \n"; |
#!/bin/bash
maven_goals="install"
image_name="ubi-java-intermediate"
image_tag="latest"
image_namespace="cicd"
#deployment_create="true"
#deplyoment_namespace="stocktrader"
#services_file="services.yaml"
|
#!/bin/bash
#SBATCH -J Act_selu_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py selu 192 Adamax 1 0.37071094496401036 0.0016993873003270615 rnormal 0.05
|
// Import the necessary modules
const express = require("express");
const bodyParser = require("body-parser");
// Create the Express server
const app = express();
// Parse incoming request bodies in JSON format
app.use(bodyParser.json());
// Create the demo API endpoint
app.post("/demo-api/", (req, res) => {
// extract parameters from request body
const params = req.body;
// do something with the extracted parameters
const response = {
message: `Hello ${params.name}, welcome to the demo API.`
};
// send back the response
res.json(response);
});
// listen for requests
app.listen(3000, () => {
console.log("Server is listening on port 3000...");
}); |
//--------------------------------------------------------------------------------------
// File: MorphTarget.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//--------------------------------------------------------------------------------------
#include "DXUT.h"
#include "MorphTarget.h"
//--------------------------------------------------------------------------------------
void CMorphTarget::RenderToTexture( ID3D10Device* pd3dDevice, ID3D10RenderTargetView* pRTV,
ID3D10DepthStencilView* pDSV, ID3D10EffectTechnique* pTechnique )
{
// Store the old viewport
D3D10_VIEWPORT OldVP;
UINT cRT = 1;
pd3dDevice->RSGetViewports( &cRT, &OldVP );
if( pRTV )
{
// Set a new viewport that exactly matches the size of our 2d textures
D3D10_VIEWPORT PVP;
PVP.Width = m_XRes;
PVP.Height = m_YRes;
PVP.MinDepth = 0;
PVP.MaxDepth = 1;
PVP.TopLeftX = 0;
PVP.TopLeftY = 0;
pd3dDevice->RSSetViewports( 1, &PVP );
}
// Set input params
UINT offsets = 0;
UINT uStrides[] = { sizeof( QUAD_VERTEX ) };
pd3dDevice->IASetVertexBuffers( 0, 1, &m_pVB, uStrides, &offsets );
pd3dDevice->IASetIndexBuffer( m_pIB, DXGI_FORMAT_R32_UINT, 0 );
pd3dDevice->IASetPrimitiveTopology( D3D10_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP );
// Set the render target and a NULL depth/stencil surface
if( pRTV )
{
ID3D10RenderTargetView* aRTViews[] = { pRTV };
pd3dDevice->OMSetRenderTargets( 1, aRTViews, pDSV );
}
// Draw
D3D10_TECHNIQUE_DESC techDesc;
pTechnique->GetDesc( &techDesc );
for( UINT p = 0; p < techDesc.Passes; ++p )
{
pTechnique->GetPassByIndex( p )->Apply( 0 );
pd3dDevice->DrawIndexed( 4, 0, 0 );
}
// Restore the original viewport
pd3dDevice->RSSetViewports( 1, &OldVP );
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::CreateTexturesFLOAT( ID3D10Device* pd3dDevice )
{
HRESULT hr = S_OK;
// Create Position and Velocity Textures
D3D10_TEXTURE2D_DESC dstex;
dstex.Width = m_Header.XRes;
dstex.Height = m_Header.YRes;
dstex.MipLevels = 1;
dstex.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
dstex.SampleDesc.Count = 1;
dstex.SampleDesc.Quality = 0;
dstex.Usage = D3D10_USAGE_DEFAULT;
dstex.BindFlags = D3D10_BIND_SHADER_RESOURCE;
dstex.CPUAccessFlags = 0;
dstex.MiscFlags = 0;
dstex.ArraySize = 3;
V_RETURN( pd3dDevice->CreateTexture2D( &dstex, NULL, &m_pTexture ) );
// Create Resource Views
D3D10_SHADER_RESOURCE_VIEW_DESC SRVDesc;
ZeroMemory( &SRVDesc, sizeof( SRVDesc ) );
SRVDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
SRVDesc.ViewDimension = D3D10_SRV_DIMENSION_TEXTURE2DARRAY;
SRVDesc.Texture2DArray.MipLevels = 1;
SRVDesc.Texture2DArray.FirstArraySlice = 0;
SRVDesc.Texture2DArray.ArraySize = 3;
V_RETURN( pd3dDevice->CreateShaderResourceView( m_pTexture, &SRVDesc, &m_pTexRV ) );
return hr;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::CreateTexturesBIASED( ID3D10Device* pd3dDevice )
{
HRESULT hr = S_OK;
// Create Position and Velocity Textures
D3D10_TEXTURE2D_DESC dstex;
dstex.Width = m_Header.XRes;
dstex.Height = m_Header.YRes;
dstex.MipLevels = 1;
dstex.Format = DXGI_FORMAT_R8G8B8A8_SNORM;
dstex.SampleDesc.Count = 1;
dstex.SampleDesc.Quality = 0;
dstex.Usage = D3D10_USAGE_DEFAULT;
dstex.BindFlags = D3D10_BIND_SHADER_RESOURCE;
dstex.CPUAccessFlags = 0;
dstex.MiscFlags = 0;
dstex.ArraySize = 3;
V_RETURN( pd3dDevice->CreateTexture2D( &dstex, NULL, &m_pTexture ) );
// Create Resource Views
D3D10_SHADER_RESOURCE_VIEW_DESC SRVDesc;
ZeroMemory( &SRVDesc, sizeof( SRVDesc ) );
SRVDesc.Format = DXGI_FORMAT_R8G8B8A8_SNORM;
SRVDesc.ViewDimension = D3D10_SRV_DIMENSION_TEXTURE2DARRAY;
SRVDesc.Texture2DArray.MipLevels = 1;
SRVDesc.Texture2DArray.FirstArraySlice = 0;
SRVDesc.Texture2DArray.ArraySize = 3;
V_RETURN( pd3dDevice->CreateShaderResourceView( m_pTexture, &SRVDesc, &m_pTexRV ) );
return hr;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::LoadTextureDataFLOAT( ID3D10Device* pd3dDevice, ID3D10Texture2D* pTex, HANDLE hFile )
{
if( m_Header.XRes * m_Header.YRes * 3 < 1 )
return E_FAIL;
// Create enough room to load the data from the file
D3DXVECTOR4* pvFileData = new D3DXVECTOR4[ m_Header.XRes * m_Header.YRes * 3 ];
if( !pvFileData )
return E_OUTOFMEMORY;
// Load the data
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, pvFileData, sizeof( D3DXVECTOR4 ) * m_Header.XRes * m_Header.YRes * 3, &dwBytesRead, NULL ) )
return E_FAIL;
// Update the texture with this information
// Position
pd3dDevice->UpdateSubresource( pTex,
D3D10CalcSubresource( 0, 0, 1 ),
NULL,
pvFileData,
m_Header.XRes * sizeof( D3DXVECTOR4 ),
0 );
// Normal
pd3dDevice->UpdateSubresource( pTex,
D3D10CalcSubresource( 0, 1, 1 ),
NULL,
&pvFileData[m_Header.XRes * m_Header.YRes],
m_Header.XRes * sizeof( D3DXVECTOR4 ),
0 );
// Tangent
pd3dDevice->UpdateSubresource( pTex,
D3D10CalcSubresource( 0, 2, 1 ),
NULL,
&pvFileData[2 * m_Header.XRes * m_Header.YRes],
m_Header.XRes * sizeof( D3DXVECTOR4 ),
0 );
SAFE_DELETE_ARRAY( pvFileData );
return S_OK;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::LoadTextureDataBIASED( ID3D10Device* pd3dDevice, ID3D10Texture2D* pTex, HANDLE hFile )
{
if( m_Header.XRes * m_Header.YRes * 3 < 1 )
return E_FAIL;
// Create enough room to load the data from the file
BYTE* pvFileData = new BYTE[ m_Header.XRes * m_Header.YRes * 3 * 4 ];
if( !pvFileData )
return E_OUTOFMEMORY;
// Load the data
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, pvFileData, m_Header.XRes * m_Header.YRes * 3 * 4, &dwBytesRead, NULL ) )
return E_FAIL;
// Update the texture with this information
// Position
pd3dDevice->UpdateSubresource( pTex,
D3D10CalcSubresource( 0, 0, 1 ),
NULL,
pvFileData,
m_Header.XRes * 4,
0 );
// Normal
pd3dDevice->UpdateSubresource( pTex,
D3D10CalcSubresource( 0, 1, 1 ),
NULL,
&pvFileData[m_Header.XRes * m_Header.YRes * 4],
m_Header.XRes * 4,
0 );
// Tangent
pd3dDevice->UpdateSubresource( pTex,
D3D10CalcSubresource( 0, 2, 1 ),
NULL,
&pvFileData[2 * m_Header.XRes * m_Header.YRes * 4],
m_Header.XRes * 4,
0 );
SAFE_DELETE_ARRAY( pvFileData );
return S_OK;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::CreateRenderQuad( ID3D10Device* pd3dDevice )
{
HRESULT hr = S_OK;
// First create space for the vertices
UINT uiVertBufSize = 4 * sizeof( QUAD_VERTEX );
QUAD_VERTEX* pVerts = new QUAD_VERTEX[ uiVertBufSize ];
if( !pVerts )
return E_OUTOFMEMORY;
float fLeft = ( m_Header.XStart / ( float )m_XRes ) * 2.0f - 1.0f;
float fRight = ( ( m_Header.XStart + m_Header.XRes ) / ( float )m_XRes ) * 2.0f - 1.0f;
float fBottom = ( m_Header.YStart / ( float )m_YRes ) * 2.0f - 1.0f;
float fTop = ( ( m_Header.YStart + m_Header.YRes ) / ( float )m_YRes ) * 2.0f - 1.0f;
pVerts[0].pos = D3DXVECTOR3( fLeft, fBottom, 0 );
pVerts[0].tex = D3DXVECTOR2( 0, 0 );
pVerts[1].pos = D3DXVECTOR3( fLeft, fTop, 0 );
pVerts[1].tex = D3DXVECTOR2( 0, 1 );
pVerts[2].pos = D3DXVECTOR3( fRight, fBottom, 0 );
pVerts[2].tex = D3DXVECTOR2( 1, 0 );
pVerts[3].pos = D3DXVECTOR3( fRight, fTop, 0 );
pVerts[3].tex = D3DXVECTOR2( 1, 1 );
D3D10_BUFFER_DESC vbdesc =
{
uiVertBufSize,
D3D10_USAGE_IMMUTABLE,
D3D10_BIND_VERTEX_BUFFER,
0,
0
};
D3D10_SUBRESOURCE_DATA InitData;
InitData.pSysMem = pVerts;
InitData.SysMemPitch = 0;
InitData.SysMemSlicePitch = 0;
V_RETURN( pd3dDevice->CreateBuffer( &vbdesc, &InitData, &m_pVB ) );
SAFE_DELETE_ARRAY( pVerts );
//
// Now create space for the indices
//
UINT uiIndexBufSize = 4 * sizeof( DWORD );
DWORD* pIndices = new DWORD[ uiIndexBufSize ];
if( !pIndices )
return E_OUTOFMEMORY;
pIndices[0] = 0;
pIndices[1] = 1;
pIndices[2] = 2;
pIndices[3] = 3;
D3D10_BUFFER_DESC ibdesc =
{
uiIndexBufSize,
D3D10_USAGE_IMMUTABLE,
D3D10_BIND_INDEX_BUFFER,
0,
0
};
InitData.pSysMem = pIndices;
InitData.SysMemPitch = 0;
InitData.SysMemSlicePitch = 0;
V_RETURN( pd3dDevice->CreateBuffer( &ibdesc, &InitData, &m_pIB ) );
SAFE_DELETE_ARRAY( pIndices );
return hr;
}
//--------------------------------------------------------------------------------------
CMorphTarget::CMorphTarget() : m_pVB( NULL ),
m_pIB( NULL ),
m_pTexture( NULL ),
m_pTexRV( NULL ),
m_XRes( 0 ),
m_YRes( 0 )
{
ZeroMemory( &m_Header, sizeof( MORPH_TARGET_BLOCK_HEADER ) );
}
//--------------------------------------------------------------------------------------
CMorphTarget::~CMorphTarget()
{
SAFE_RELEASE( m_pVB );
SAFE_RELEASE( m_pIB );
SAFE_RELEASE( m_pTexture );
SAFE_RELEASE( m_pTexRV );
}
//--------------------------------------------------------------------------------------
WCHAR* CMorphTarget::GetName()
{
return m_Header.szName;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::LoadFLOAT( ID3D10Device* pd3dDevice, HANDLE hFile, UINT XRes, UINT YRes )
{
HRESULT hr = S_OK;
m_XRes = XRes;
m_YRes = YRes;
// Read in the header
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, &m_Header, sizeof( MORPH_TARGET_BLOCK_HEADER ), &dwBytesRead, NULL ) )
{
return E_FAIL;
}
// Create the textures
V_RETURN( CreateTexturesFLOAT( pd3dDevice ) );
// Load the texture data
V_RETURN( LoadTextureDataFLOAT( pd3dDevice, m_pTexture, hFile ) );
// Create a vb
V_RETURN( CreateRenderQuad( pd3dDevice ) );
return hr;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTarget::LoadBIASED( ID3D10Device* pd3dDevice, HANDLE hFile, UINT XRes, UINT YRes )
{
HRESULT hr = S_OK;
m_XRes = XRes;
m_YRes = YRes;
// Read in the header
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, &m_Header, sizeof( MORPH_TARGET_BLOCK_HEADER ), &dwBytesRead, NULL ) )
{
return E_FAIL;
}
// Create the textures
V_RETURN( CreateTexturesBIASED( pd3dDevice ) );
// Load the texture data
V_RETURN( LoadTextureDataBIASED( pd3dDevice, m_pTexture, hFile ) );
// Create a vb
V_RETURN( CreateRenderQuad( pd3dDevice ) );
return hr;
}
//--------------------------------------------------------------------------------------
void CMorphTarget::Apply( ID3D10Device* pd3dDevice,
ID3D10RenderTargetView* pRTV,
ID3D10DepthStencilView* pDSV,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData )
{
pVertData->SetResource( m_pTexRV );
RenderToTexture( pd3dDevice, pRTV, pDSV, pTechnique );
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTargetManager::SetLDPRTData( MESH_VERTEX* pVertices, ID3DXPRTBuffer* pLDPRTBuff )
{
HRESULT hr = S_OK;
unsigned int dwNumSamples = pLDPRTBuff->GetNumSamples();
unsigned int dwNumCoeffs = pLDPRTBuff->GetNumCoeffs();
unsigned int uSampSize = dwNumCoeffs * pLDPRTBuff->GetNumChannels();
float* pConvData;
const unsigned int uChanMul = ( pLDPRTBuff->GetNumChannels() == 1 )?0:1;
V_RETURN( pLDPRTBuff->LockBuffer( 0, dwNumSamples, &pConvData ) );
for( UINT uVert = 0; uVert < dwNumSamples; uVert++ )
{
float fRCoeffs[6], fGCoeffs[6], fBCoeffs[6];
for( UINT i = 0; i < dwNumCoeffs; i++ )
{
fRCoeffs[i] = pConvData[uVert * uSampSize + i];
fGCoeffs[i] = pConvData[uVert * uSampSize + i + uChanMul * dwNumCoeffs];
fBCoeffs[i] = pConvData[uVert * uSampSize + i + 2 * uChanMul * dwNumCoeffs];
}
// Through the cubics...
pVertices[uVert].coeff0.x = fRCoeffs[0];
pVertices[uVert].coeff0.y = fRCoeffs[1];
pVertices[uVert].coeff0.z = fRCoeffs[2];
pVertices[uVert].coeff0.w = fRCoeffs[3];
pVertices[uVert].coeff1.x = fGCoeffs[0];
pVertices[uVert].coeff1.y = fGCoeffs[1];
pVertices[uVert].coeff1.z = fGCoeffs[2];
pVertices[uVert].coeff1.w = fGCoeffs[3];
pVertices[uVert].coeff2.x = fBCoeffs[0];
pVertices[uVert].coeff2.y = fBCoeffs[1];
pVertices[uVert].coeff2.z = fBCoeffs[2];
pVertices[uVert].coeff2.w = fBCoeffs[3];
pVertices[uVert].coeff3.x = fRCoeffs[4];
pVertices[uVert].coeff3.y = fRCoeffs[5];
pVertices[uVert].coeff3.z = fGCoeffs[4];
pVertices[uVert].coeff3.w = fGCoeffs[5];
pVertices[uVert].coeff4.x = fBCoeffs[4];
pVertices[uVert].coeff4.y = fBCoeffs[5];
}
pLDPRTBuff->UnlockBuffer();
return hr;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTargetManager::CreateVB( ID3D10Device* pd3dDevice, HANDLE hFile, ID3DXPRTBuffer* pLDPRTBuff )
{
HRESULT hr = S_OK;
if( m_fileHeader.NumBaseVerts < 1 )
return E_FAIL;
// Create space for the VB data
MESH_VERTEX* pVerts = new MESH_VERTEX[ m_fileHeader.NumBaseVerts ];
if( !pVerts )
return E_OUTOFMEMORY;
// Read in the file verts
FILE_VERTEX* pFileVerts = new FILE_VERTEX[ m_fileHeader.NumBaseVerts ];
if( !pFileVerts )
return E_OUTOFMEMORY;
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, pFileVerts, m_fileHeader.NumBaseVerts * sizeof( FILE_VERTEX ), &dwBytesRead, NULL ) )
return E_FAIL;
// Fill in the mesh vertices with the file vertices
for( UINT i = 0; i < m_fileHeader.NumBaseVerts; i++ )
{
pVerts[i].DataIndex = pFileVerts[i].DataIndex;
pVerts[i].tex = pFileVerts[i].tex;
}
SAFE_DELETE_ARRAY( pFileVerts );
// Set the LDPRT data
if( FAILED( SetLDPRTData( pVerts, pLDPRTBuff ) ) )
{
SAFE_DELETE_ARRAY( pVerts );
return E_FAIL;
}
// Create a VB
D3D10_BUFFER_DESC vbdesc =
{
m_fileHeader.NumBaseVerts * sizeof( MESH_VERTEX ),
D3D10_USAGE_IMMUTABLE,
D3D10_BIND_VERTEX_BUFFER,
0,
0
};
D3D10_SUBRESOURCE_DATA InitData;
InitData.pSysMem = pVerts;
InitData.SysMemPitch = 0;
InitData.SysMemSlicePitch = 0;
if( FAILED( pd3dDevice->CreateBuffer( &vbdesc, &InitData, &m_pMeshVB ) ) )
{
SAFE_DELETE_ARRAY( pVerts );
return E_FAIL;
}
SAFE_DELETE_ARRAY( pVerts );
return hr;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTargetManager::CreateIB( ID3D10Device* pd3dDevice, HANDLE hFile )
{
HRESULT hr = S_OK;
// Create space for the VB data
DWORD* pIndices = new DWORD[ m_fileHeader.NumBaseIndices ];
if( !pIndices )
return E_OUTOFMEMORY;
// Read in the indices
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, pIndices, m_fileHeader.NumBaseIndices * sizeof( DWORD ), &dwBytesRead, NULL ) )
{
SAFE_DELETE_ARRAY( pIndices );
return E_FAIL;
}
// Create a VB
D3D10_BUFFER_DESC ibdesc =
{
m_fileHeader.NumBaseIndices * sizeof( DWORD ),
D3D10_USAGE_IMMUTABLE,
D3D10_BIND_INDEX_BUFFER,
0,
0
};
D3D10_SUBRESOURCE_DATA InitData;
InitData.pSysMem = pIndices;
InitData.SysMemPitch = 0;
InitData.SysMemSlicePitch = 0;
V_RETURN( pd3dDevice->CreateBuffer( &ibdesc, &InitData, &m_pMeshIB ) );
SAFE_DELETE_ARRAY( pIndices );
return hr;
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTargetManager::CreateTextures( ID3D10Device* pd3dDevice, UINT uiXRes, UINT uiYRes )
{
HRESULT hr = S_OK;
// Create Position and Velocity Textures
D3D10_TEXTURE2D_DESC dstex;
dstex.Width = uiXRes;
dstex.Height = uiYRes;
dstex.MipLevels = 1;
dstex.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
dstex.SampleDesc.Count = 1;
dstex.SampleDesc.Quality = 0;
dstex.Usage = D3D10_USAGE_DEFAULT;
dstex.BindFlags = D3D10_BIND_RENDER_TARGET | D3D10_BIND_SHADER_RESOURCE;
dstex.CPUAccessFlags = 0;
dstex.MiscFlags = 0;
dstex.ArraySize = 3;
V_RETURN( pd3dDevice->CreateTexture2D( &dstex, NULL, &m_pTexture ) );
dstex.Format = DXGI_FORMAT_D32_FLOAT;
dstex.BindFlags = D3D10_BIND_DEPTH_STENCIL;
V_RETURN( pd3dDevice->CreateTexture2D( &dstex, NULL, &m_pDepth ) );
// Create Resource Views
D3D10_SHADER_RESOURCE_VIEW_DESC SRVDesc;
ZeroMemory( &SRVDesc, sizeof( SRVDesc ) );
SRVDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
SRVDesc.ViewDimension = D3D10_SRV_DIMENSION_TEXTURE2DARRAY;
SRVDesc.Texture2DArray.MipLevels = 1;
SRVDesc.Texture2DArray.FirstArraySlice = 0;
SRVDesc.Texture2DArray.ArraySize = 3;
V_RETURN( pd3dDevice->CreateShaderResourceView( m_pTexture, &SRVDesc, &m_pTexRV ) );
// Create Render Target Views
D3D10_RENDER_TARGET_VIEW_DESC DescRT;
DescRT.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
DescRT.ViewDimension = D3D10_RTV_DIMENSION_TEXTURE2DARRAY;
DescRT.Texture2DArray.FirstArraySlice = 0;
DescRT.Texture2DArray.ArraySize = 3;
DescRT.Texture2DArray.MipSlice = 0;
V_RETURN( pd3dDevice->CreateRenderTargetView( m_pTexture, &DescRT, &m_pRTV ) );
// Create Depth stencil view
D3D10_DEPTH_STENCIL_VIEW_DESC DescDS;
DescDS.Format = DXGI_FORMAT_D32_FLOAT;
DescDS.ViewDimension = D3D10_DSV_DIMENSION_TEXTURE2DARRAY;
DescDS.Texture2DArray.FirstArraySlice = 0;
DescDS.Texture2DArray.ArraySize = 3;
DescDS.Texture2DArray.MipSlice = 0;
V_RETURN( pd3dDevice->CreateDepthStencilView( m_pDepth, &DescDS, &m_pDSV ) );
return hr;
}
//--------------------------------------------------------------------------------------
CMorphTargetManager::CMorphTargetManager() : m_pMeshVB( NULL ),
m_pMeshIB( NULL ),
m_pMeshLayout( NULL ),
m_pQuadLayout( NULL ),
m_pTexture( NULL ),
m_pTexRV( NULL ),
m_pRTV( NULL ),
m_pDepth( NULL ),
m_pDSV( NULL ),
m_XRes( 0 ),
m_YRes( 0 ),
m_pMorphTargets( NULL )
{
ZeroMemory( &m_fileHeader, sizeof( MORPH_TARGET_FILE_HEADER ) );
}
//--------------------------------------------------------------------------------------
CMorphTargetManager::~CMorphTargetManager()
{
Destroy();
}
//--------------------------------------------------------------------------------------
HRESULT CMorphTargetManager::Create( ID3D10Device* pd3dDevice, WCHAR* szMeshFile, WCHAR* szLDPRTFile )
{
HRESULT hr = S_OK;
// Error checking
if( INVALID_FILE_ATTRIBUTES == GetFileAttributes( szMeshFile ) ||
INVALID_FILE_ATTRIBUTES == GetFileAttributes( szLDPRTFile ) )
return E_FAIL;
// Load the LDPRT buffer
ID3DXPRTBuffer* pLDPRTBuffer = NULL;
V_RETURN( D3DXLoadPRTBufferFromFile( szLDPRTFile, &pLDPRTBuffer ) );
// Open the file
HANDLE hFile = CreateFile( szMeshFile, FILE_READ_DATA, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0, NULL );
if( INVALID_HANDLE_VALUE == hFile )
return DXUTERR_MEDIANOTFOUND;
DWORD dwBytesRead = 0;
if( !ReadFile( hFile, &m_fileHeader, sizeof( MORPH_TARGET_FILE_HEADER ), &dwBytesRead, NULL ) )
{
CloseHandle( hFile );
return E_FAIL;
}
// Read in the VB and IB
if( FAILED( CreateVB( pd3dDevice, hFile, pLDPRTBuffer ) ) )
{
CloseHandle( hFile );
return E_FAIL;
}
if( FAILED( CreateIB( pd3dDevice, hFile ) ) )
{
CloseHandle( hFile );
return E_FAIL;
}
// We're done with the LDPRT buffer
SAFE_RELEASE( pLDPRTBuffer );
// Allocate the morph targets
m_pMorphTargets = new CMorphTarget[ m_fileHeader.NumTargets ];
if( !m_pMorphTargets )
{
CloseHandle( hFile );
return E_OUTOFMEMORY;
}
// Get the X and Y base res for the data
float fRes = sqrt( ( float )m_fileHeader.NumBaseVerts );
m_XRes = ( UINT )fRes + 1;
m_YRes = m_XRes;
// Create the textures
if( FAILED( CreateTextures( pd3dDevice, m_XRes, m_YRes ) ) )
{
CloseHandle( hFile );
return E_FAIL;
}
// Read in the base target
if( FAILED( ( m_pMorphTargets[0].LoadFLOAT( pd3dDevice, hFile, m_XRes, m_YRes ) ) ) )
{
CloseHandle( hFile );
return E_FAIL;
}
// Read in all morph targets
for( UINT i = 1; i < m_fileHeader.NumTargets; i++ )
{
if( FAILED( ( m_pMorphTargets[i].LoadBIASED( pd3dDevice, hFile, m_XRes, m_YRes ) ) ) )
{
CloseHandle( hFile );
return E_FAIL;
}
}
CloseHandle( hFile );
return hr;
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::Destroy()
{
SAFE_RELEASE( m_pMeshVB );
SAFE_RELEASE( m_pMeshIB );
SAFE_RELEASE( m_pMeshLayout );
SAFE_RELEASE( m_pQuadLayout );
SAFE_RELEASE( m_pTexture );
SAFE_RELEASE( m_pTexRV );
SAFE_RELEASE( m_pRTV );
SAFE_RELEASE( m_pDepth );
SAFE_RELEASE( m_pDSV );
SAFE_DELETE_ARRAY( m_pMorphTargets );
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::ResetToBase( ID3D10Device* pd3dDevice,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData,
ID3D10EffectScalarVariable* pBlendAmt,
ID3D10EffectVectorVariable* pMaxDeltas )
{
ApplyMorph( pd3dDevice, ( UINT )0, 1.0f, pTechnique, pVertData, pBlendAmt, pMaxDeltas );
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::ApplyMorph( ID3D10Device* pd3dDevice,
WCHAR* szMorph,
float fAmount,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData,
ID3D10EffectScalarVariable* pBlendAmt,
ID3D10EffectVectorVariable* pMaxDeltas )
{
for( UINT i = 0; i < m_fileHeader.NumTargets; i++ )
{
if( 0 == wcscmp( szMorph, m_pMorphTargets[i].GetName() ) )
ApplyMorph( pd3dDevice, i, fAmount, pTechnique, pVertData, pBlendAmt, pMaxDeltas );
}
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::ApplyMorph( ID3D10Device* pd3dDevice,
UINT iMorph,
float fAmount,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData,
ID3D10EffectScalarVariable* pBlendAmt,
ID3D10EffectVectorVariable* pMaxDeltas )
{
pBlendAmt->SetFloat( fAmount );
D3DXVECTOR4 delta[3];
delta[0] = m_pMorphTargets[ iMorph ].GetMaxPositionDelta();
delta[1] = m_pMorphTargets[ iMorph ].GetMaxNormalDelta();
delta[2] = m_pMorphTargets[ iMorph ].GetMaxTangentDelta();
pMaxDeltas->SetFloatVectorArray( ( float* )delta, 0, 3 );
pd3dDevice->IASetInputLayout( m_pQuadLayout );
m_pMorphTargets[ iMorph ].Apply( pd3dDevice, m_pRTV, m_pDSV, pTechnique, pVertData );
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::Render( ID3D10Device* pd3dDevice,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData,
ID3D10EffectShaderResourceVariable* pVertDataOrig,
ID3D10EffectScalarVariable* pDataTexSize )
{
pVertData->SetResource( m_pTexRV );
pDataTexSize->SetInt( m_XRes );
// Set the original data for the wrinkle map
ID3D10ShaderResourceView* pOrigRV = m_pMorphTargets[0].GetTexRV();
pVertDataOrig->SetResource( pOrigRV );
pd3dDevice->IASetInputLayout( m_pMeshLayout );
ID3D10Buffer* pBuffers[1] = { m_pMeshVB };
UINT uStrides = sizeof( MESH_VERTEX );
UINT offsets = 0;
pd3dDevice->IASetVertexBuffers( 0, 1, pBuffers, &uStrides, &offsets );
pd3dDevice->IASetIndexBuffer( m_pMeshIB, DXGI_FORMAT_R32_UINT, 0 );
pd3dDevice->IASetPrimitiveTopology( D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST );
// Draw
D3D10_TECHNIQUE_DESC techDesc;
pTechnique->GetDesc( &techDesc );
for( UINT p = 0; p < techDesc.Passes; ++p )
{
pTechnique->GetPassByIndex( p )->Apply( 0 );
pd3dDevice->DrawIndexed( m_fileHeader.NumBaseIndices, 0, 0 );
}
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::SetVertexLayouts( ID3D10InputLayout* pMeshLayout, ID3D10InputLayout* pQuadLayout )
{
m_pMeshLayout = pMeshLayout;
m_pMeshLayout->AddRef();
m_pQuadLayout = pQuadLayout;
m_pQuadLayout->AddRef();
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::ShowMorphTexture( ID3D10Device* pd3dDevice,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData,
ID3D10EffectScalarVariable* pRT,
ID3D10EffectScalarVariable* pBlendAmt,
int iRT,
int iTarget )
{
pVertData->SetResource( m_pMorphTargets[iTarget].GetTexRV() );
pRT->SetInt( iRT );
pBlendAmt->SetFloat( 100.0f );
pd3dDevice->IASetInputLayout( m_pQuadLayout );
m_pMorphTargets[ iTarget ].RenderToTexture( pd3dDevice, NULL, NULL, pTechnique );
}
//--------------------------------------------------------------------------------------
void CMorphTargetManager::ShowMorphTexture( ID3D10Device* pd3dDevice,
ID3D10EffectTechnique* pTechnique,
ID3D10EffectShaderResourceVariable* pVertData,
ID3D10EffectScalarVariable* pRT,
ID3D10EffectScalarVariable* pBlendAmt,
int iRT,
WCHAR* szMorph )
{
for( UINT i = 0; i < m_fileHeader.NumTargets; i++ )
{
if( 0 == wcscmp( szMorph, m_pMorphTargets[i].GetName() ) )
ShowMorphTexture( pd3dDevice, pTechnique, pVertData, pRT, pBlendAmt, iRT, i );
}
}
|
"use strict";
var batchelor = require("./lib/Batchelor");
var Events = require("./lib/Events");
module.exports = {
configure: batchelor.configure,
execute: batchelor.execute,
close: batchelor.stop,
Events: Events
};
|
func removeOccurrences(arr: Array<Int>, item: Int) -> Array<Int> {
return arr.filter { $0 != item }
}
let arr = [3, 5, 7, 2, 3, 4, 5]
let item = 5
// Output: [3, 7, 2, 3, 4]
print(removeOccurrences(arr: arr, item: item)) |
<reponame>ustclug-dev/206hub
import { ItemList } from "../libs/type"
import Card from "react-bootstrap/Card"
import Table from "react-bootstrap/Table"
import Link from "next/link"
import { slugify } from "../libs/utils"
import Button from "react-bootstrap/Button"
export interface ArchiveProps {
title: string
items: ItemList
}
export default function Archive(props: ArchiveProps) {
return (
<Card bg="light" className="shadow-sm">
<Card.Header className="py-3">
<h3 className="mb-0">{props.title}</h3>
</Card.Header>
<Card.Body className="p-0">
<Table hover className="mb-0">
<thead className="nowrap">
<tr>
<th>条目</th>
<th>评论数</th>
<th>平均分</th>
<th>标签</th>
</tr>
</thead>
<tbody>
{props.items.map((item) => {
const url = `/${item.collection.slug}/${item.slug}`
return (
<tr key={url}>
<td>
<Link href={url}>
<a>{item.name}</a>
</Link>
</td>
<td>{item.commentCnt}</td>
<td>{item.averageScore}</td>
<td>
{item.tags.map((tag) => (
<Link key={tag} href={`/tag/${slugify(tag)}`} passHref>
<a className="tagButton">{tag}</a>
</Link>
))}
</td>
</tr>
)
})}
</tbody>
</Table>
</Card.Body>
</Card>
)
}
|
#!/bin/bash
dieharder -d 206 -g 37 -S 1976792971
|
<filename>src/utils/index.ts
export const debounce = (func: Function, wait: number, immediate: boolean) => {
let timeout: any = null
return (...args: any[]) => {
const later = () => {
timeout = null
if (!immediate) func(...args)
}
const callNow = immediate && !timeout
clearTimeout(timeout)
timeout = setTimeout(later, wait)
if (callNow) func(...args)
}
}
|
use std::sync::{Arc, Mutex, Condvar};
struct State {
waiters: Arc<Mutex<HashMap<ThreadId, CondvarWaitStatus>>>,
}
impl State {
fn signal_and_unblock(&self, me: ThreadId) {
let mut waiters = self.waiters.lock().unwrap();
for (tid, status) in waiters.iter_mut() {
if *tid != me {
*status = CondvarWaitStatus::Broadcast(current::clock());
// Note: the task might have been unblocked by a previous signal
ExecutionState::with(|s| s.get_mut(*tid).unblock());
}
}
}
} |
#!/bin/sh
python gcodeServer.py |
#!/bin/bash
python3 artsy.py story/1
python3 artsy.py story/3
python3 artsy.py story/2 #covid
python3 artsy.py story/13 #blackhole
python3 artsy.py story/thor
python3 artsy.py story/4 #time passes
python3 artsy.py story/7 #stronger
python3 artsy.py story/8
python3 artsy.py story/9
python3 artsy.py story/10 #glasses
python3 artsy.py story/12 |
#!/bin/bash
a="one two three four"
#要将$a分割开,可以这样:
OLD_IFS="$IFS"
IFS=" "
arr=($a)
IFS="$OLD_IFS"
echo ${arr[0]}" "${arr[2]}
|
#!/bin/bash
BRANCH=0.1
if [ -n "$1" ]
then
BRANCH=$1
fi
git subtree push --prefix src/app/tests/api-spec api-spec $BRANCH --squash
|
import expect from 'expect';
import sinon from 'sinon';
import rewiremock from "rewiremock/webpack";
import { getters } from '../../../../Store/modules/AppState';
describe('Vuex App Module Getters', () => {
describe("checks if on mobile screen", () => {
it('should be a mobile screen', () => {
const state = { window: { width: 480 } };
const result = getters.MOBILE_DEVICE_WIDTH(state);
expect(result).toBe(true);
})
it('should NOT be a mobile screen', () => {
const state = { window: { width: 481 } };
const result = getters.MOBILE_DEVICE_WIDTH(state);
expect(result).toBe(false);
})
})
describe("checks if on a tablet screen", () => {
it("should be a tablet screen", () => {
const state = { window: { width: 577 } };
const result = getters.TABLET_DEVICE_WIDTH(state);
expect(result).toBe(true);
})
it("should NOT be a tablet screen", () => {
const tooSmallState = { window: { width: 576 } };
const tooSmallResult = getters.TABLET_DEVICE_WIDTH(tooSmallState);
expect(tooSmallResult).toBe(false);
const tooLargeState = { window: { width: 992 } };
const tooLargeResult = getters.TABLET_DEVICE_WIDTH(tooLargeState);
expect(tooLargeResult).toBe(false);
})
})
describe("checks if generally on a desktop", () => {
it("should be a general desktop screen", () => {
const state = { window: { width: 992 } };
const result = getters.GENERAL_DESKTOP_WIDTH(state);
expect(result).toBe(true);
})
it("should NOT be a general desktop screen", () => {
const state = { window: { width: 991 } };
const result = getters.GENERAL_DESKTOP_WIDTH(state);
expect(result).toBe(false);
})
})
describe("checks if on mid-sized desktop screen", () => {
it("should be a mid-sized desktop screen", () => {
const state = { window: { width: 992 } };
const result = getters.MID_DESKTOP_DEVICE_WIDTH(state);
expect(result).toBe(true);
})
it("should NOT be a mid-sized desktop screen", () => {
const tooSmallState = { window: { width: 991 } };
const tooSmallResult = getters.MID_DESKTOP_DEVICE_WIDTH(tooSmallState);
expect(tooSmallResult).toBe(false);
const tooLargeState = { window: { width: 1400 } };
const tooLargeResult = getters.MID_DESKTOP_DEVICE_WIDTH(tooLargeState);
expect(tooLargeResult).toBe(false);
})
})
describe("checks if on large desktop screen", () => {
it("should be a large desktop screen", () => {
const state = { window: { width: 1400 } };
const result = getters.LARGE_DESKTOP_WIDTH(state);
expect(result).toBe(true);
})
it("should NOT be a large desktop screen", () => {
const tooSmallState = { window: { width: 1399 } };
const tooSmallResult = getters.LARGE_DESKTOP_WIDTH(tooSmallState);
expect(tooSmallResult).toBe(false);
})
})
describe("checks if page visibility api available", () => {
it("should be available", () => {
const availableState = { websiteVisibility: { hidden: 'hidden', visibilityChange: 'visibilitychange' } };
const availableResult = getters.PAGE_VISIBILITY_READY(availableState);
expect(availableResult).toBe(true);
})
it("should NOT be available", () => {
const notAvailableState = { websiteVisibility: { hidden: 'none', visibilityChange: 'none' } };
const notAvailableResult = getters.PAGE_VISIBILITY_READY(notAvailableState);
expect(notAvailableResult).toBe(false);
})
it("calls PageVisAPI Utility Function once with right args", () => {
let isVisApiAvailable = sinon.stub().returns(true);
const appModule = rewiremock.proxy("../../../../Store/modules/AppState", { "../../../../Utility/Functions/page_visibility": { IsVisApiAvailable: isVisApiAvailable } })
rewiremock.enable();
const availableState = { websiteVisibility: { hidden: 'hidden', visibilityChange: 'visibilitychange' } };
appModule.getters.PAGE_VISIBILITY_READY(availableState);
sinon.assert.calledOnceWithExactly(isVisApiAvailable, availableState.websiteVisibility.hidden, availableState.websiteVisibility.visibilityChange);
rewiremock.disable();
})
})
}); |
var searchData=
[
['info_237',['Info',['../class_application.html#ae10b8e3e8ee614e5b077d5dcf8bf1a7caf197a4781c5a8b3f143ea828f9403172',1,'Application']]],
['input_5fkierunek_238',['Input_Kierunek',['../class_application.html#ae10b8e3e8ee614e5b077d5dcf8bf1a7cacc36ac7b5c79d0e107460ab0f79b7949',1,'Application']]],
['input_5fprowadzacy_239',['Input_Prowadzacy',['../class_application.html#ae10b8e3e8ee614e5b077d5dcf8bf1a7ca17e0eb7d4f5f0171f2477d18f4b34480',1,'Application']]],
['input_5fprzedmiot_240',['Input_Przedmiot',['../class_application.html#ae10b8e3e8ee614e5b077d5dcf8bf1a7cab04c7edf4a8cfb71b388edb6b04b14d7',1,'Application']]],
['input_5fstudent_241',['Input_Student',['../class_application.html#ae10b8e3e8ee614e5b077d5dcf8bf1a7ca3861d037f44686be59fea1b55cc302d5',1,'Application']]],
['input_5fwydzial_242',['Input_Wydzial',['../class_application.html#ae10b8e3e8ee614e5b077d5dcf8bf1a7ca174e4a93cd6a39012a52bb62aa0cef72',1,'Application']]]
];
|
<reponame>nikolabebic95/PIAZadaci<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package beans;
import java.util.ArrayList;
/**
*
* @author Nikola
*/
public class InstructorBean {
private ArrayList<SkierBean> list = new ArrayList<>();
public ArrayList<SkierBean> getList() {
return list;
}
public void setList(ArrayList<SkierBean> list) {
this.list = list;
}
public void add(SkierBean skierBean) {
list.add(skierBean);
}
}
|
function emptyArray(arr) {
arr.length = 0;
return arr;
} |
package cn.jackq.messenger.audio;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created on: 5/8/17.
* Creator: <NAME> <<EMAIL>>
*/
public class MessengerAudioPacker {
private static final String TAG = "MessengerAudioPacker";
static ByteBuffer packAudioFrame(int index, ByteBuffer buffer){
int size = 2 + buffer.limit() - buffer.position();
byte[] buf = new byte[size];
ByteBuffer byteBuffer = ByteBuffer.wrap(buf);
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
byteBuffer.putShort((short) index);
System.arraycopy(buffer.array(), buffer.position(), buf, 2, size - 2);
return ByteBuffer.wrap(buf, 0, size);
}
/**
* read the payload index (sequential number)
* and move the position of byte buffer to initial position of data payload
* @param buffer the packed data buffer with a leading sequential number
* @return packet index
*/
static int unpackAudioFrame(ByteBuffer buffer){
buffer.order(ByteOrder.LITTLE_ENDIAN);
return (int) buffer.getShort();
}
}
|
import numpy as np
from scipy import optimize
def optimization_function(x):
return np.sin(x)
res = optimize.minimize_scalar(optimization_function, bounds=[-5, 5])
print(res.x)
# Result: -0.90898 |
#!/bin/bash
#
# Copyright (C) 2008-2011,
# LingCloud Team,
# Institute of Computing Technology,
# Chinese Academy of Sciences.
# P.O.Box 2704, 100190, Beijing, China.
#
# http://lingcloud.org
#
# Install dependencies from distribution software source
# Usage: install-dependencies.sh [NODE_TYPE: main|xen] [METHOD: yum|zypper|apt] <OPENNEBULA_DIR>
pushd `dirname "$0"`
NODE_TYPE="$1"
METHOD="$2"
OPENNEBULA_DIR="$3"
onerror ()
{
local _ERRINFO="$1"
[ "$_ERRINFO" = "" ] && _ERRINFO="no detailed error information"
echo "Error: $_ERRINFO."
local _ERRINPUTED=""
while [ "$_ERRINPUTED" != "y" -a "$_ERRINPUTED" != "n" \
-a "$_ERRINPUTED" != "Y" -a "$_ERRINPUTED" != "N" ]
do
echo -n "Do you want to exit (y/n)? "
read _ERRINPUTED
if [ "$_ERRINPUTED" = "y" -o "$_ERRINPUTED" = "Y" ]
then
exit 1
elif [ "$_ERRINPUTED" = "n" -o "$_ERRINPUTED" = "N" ]
then
return 0
fi
done
}
case "`uname -m`" in
x86_64|amd64) ARCH="x86_64" ;;
*) ARCH="i386" ;;
esac
echo "==== Install dependencies for $NODE_TYPE node by $METHOD ($ARCH) ===="
case $NODE_TYPE in
main)
case $METHOD in
yum)
# Common
echo " * yum for common"
yum install openssh-server openssh-clients gcc gcc-c++ libstdc++ libstdc++-devel flex bison || onerror "yum error"
# Molva
echo " * yum for Molva"
yum install mysql* || onerror "yum error"
# OpenNebula
echo " * yum for OpenNebula"
yum install mysql* ruby* openssl openssl-devel curl-devel unixODBC unixODBC-devel libxml2-devel || onerror "yum error"
echo " * rpm for xmlrpc-c xmlrpc-c-devel"
rpm -Uhv dependencies/rpms/xmlrpc-c-*.$ARCH.rpm || onerror "rpm error (you should ignore this error if the packages are already installed)"
echo " * yum for scons"
rpm -Uhv dependencies/rpms/scons-*.rpm || onerror "rpm error (you should ignore this error if the packages are already installed)"
pushd dependencies/src
tar xzf sqlite-autoconf-*.tar.gz || onerror "tar error"
tar xzf sqlite3-ruby-*.tar.gz || onerror "tar error"
tar xzf opennebula-*.tar.gz || onerror "tar error"
pushd sqlite-autoconf-*/
echo " * src for sqlite"
./configure --prefix=/usr || onerror "configure error"
make || onerror "make error"
make install || onerror "make install error"
popd
pushd sqlite3-ruby-*/
echo " * src for sqlite3-ruby"
ruby setup.rb config || onerror "ruby config error"
ruby setup.rb setup || onerror "ruby setup error"
ruby setup.rb install || onerror "ruby install error"
popd
pushd opennebula-*/
echo " * src for opennebula"
scons mysql=yes || onerror "scons error"
mkdir -p "$OPENNEBULA_DIR" || onerror "mkdir error"
./install.sh -d "$OPENNEBULA_DIR" || onerror "install error"
popd
rm -rf sqlite-autoconf-*/ sqlite3-ruby-*/ opennebula-*/
popd
chkconfig mysqld on
;;
zypper)
echo "unimplemented method"
exit 1
;;
apt)
echo "unimplemented method"
exit 1
;;
*)
echo "bad method"
exit 1
;;
esac
;;
xen)
case $METHOD in
yum)
# Common
echo " * yum for common"
yum install openssh-server openssh-clients gcc gcc-c++ libstdc++ libstdc++-devel flex bison || onerror "yum error"
# OpenNebula
echo " * yum for OpenNebula (xen)"
yum install ruby* qemu* kvm-qemu-img || onerror "yum error"
;;
zypper)
echo "unimplemented method"
exit 1
;;
apt)
echo "unimplemented method"
exit 1
;;
*)
echo "bad method"
exit 1
;;
esac
;;
nfs)
case $METHOD in
yum)
# Common
echo " * yum for common"
yum install openssh-server openssh-clients || onerror "yum error"
# NFS
echo " * yum for NFS"
yum install nfs-utils nfs-utils-lib portmap system-config-nfs || onerror "yum error"
# QEMU
echo " * yum for QEMU"
yum install qemu* kvm-qemu-img || onerror "yum error"
;;
zypper)
echo "unimplemented method"
exit 1
;;
apt)
echo "unimplemented method"
exit 1
;;
*)
echo "bad method"
exit 1
;;
esac
;;
app)
case $METHOD in
yum)
# Common
echo " * yum for common"
yum install openssh-server openssh-clients || onerror "yum error"
# OpenNebula
echo " * yum for QEMU"
yum install qemu* kvm-qemu-img || onerror "yum error"
;;
zypper)
echo "unimplemented method"
exit 1
;;
apt)
echo "unimplemented method"
exit 1
;;
*)
echo "bad method"
exit 1
;;
esac
;;
general)
case $METHOD in
yum)
# Common
echo " * yum for common"
yum install openssh-server openssh-clients || onerror "yum error"
;;
zypper)
echo "unimplemented method"
exit 1
;;
apt)
echo "unimplemented method"
exit 1
;;
*)
echo "bad method"
exit 1
;;
esac
;;
*)
echo "bad node type"
exit 1
;;
esac
popd
exit 0
|
<reponame>guidosantillan01/syder-ideas
import React from 'react';
import { Link } from 'react-router-dom';
const Welcome = () => {
return (
<div className="hero">
<div className="hero-body">
<div className="container is-centered box">
<h1 className="title is-4">I want to...</h1>
<div className="columns is-centered">
<div className="column is-two-fifths">
<Link to="/create">
<button
// autoFocus
className="button is-link is-medium"
type="button"
>
Create new idea
</button>
</Link>
</div>
<div className="column is-two-fifths">
<Link to="/dashboard">
<button className="button is-light is-medium" type="button">
Check my ideas
</button>
</Link>
</div>
</div>
</div>
</div>
</div>
);
};
export default Welcome;
|
<filename>src/utility/exists/exists.utility.spec.ts
/**
* @author <NAME>
*/
import { RocketExists } from './exists.utility';
describe('Rocket Exists Utility:', () => {
// Tests.
it('Should successfully test for the existence of a value.', () => {
const nothing: undefined = undefined;
const nonExistent: null = null;
const trueMe = true;
const someValue = 'yay';
expect(RocketExists(trueMe)).toBeTruthy();
expect(RocketExists(someValue)).toBeTruthy();
expect(RocketExists(nothing)).toBeFalsy();
expect(RocketExists(nonExistent)).toBeFalsy();
expect(RocketExists(false)).toBeFalsy();
});
});
|
<reponame>CurtisLusmore/telstra-purple-signatures<gh_stars>1-10
import React from 'react';
import { shallow } from 'enzyme';
import Button from './Button';
it('Renders', () => {
const tree = shallow(
<Button
onClickHandler={() => null}
classBefore={'test before'}
classAfter={'test after'}
textBefore={'text before'}
textAfter={'text after'}
/>
);
expect(tree).toMatchSnapshot();
});
|
import java.util.ArrayList;
import java.util.List;
public class ServiceFilter {
public List<String> filterServices(List<String> services, String pattern) {
List<String> matchedServices = new ArrayList<>();
for (String service : services) {
if (isMatch(service, pattern)) {
matchedServices.add(service);
}
}
return matchedServices;
}
private boolean isMatch(String service, String pattern) {
if (pattern.equals("*")) {
return true;
}
if (pattern.isEmpty()) {
return service.isEmpty();
}
if (pattern.charAt(0) == '*') {
return isMatch(service, pattern.substring(1)) || (!service.isEmpty() && isMatch(service.substring(1), pattern));
}
if (!service.isEmpty() && (pattern.charAt(0) == '.' || pattern.charAt(0) == service.charAt(0))) {
return isMatch(service.substring(1), pattern.substring(1));
}
return false;
}
public static void main(String[] args) {
ServiceFilter serviceFilter = new ServiceFilter();
List<String> services = List.of("glibc.*", "cckit.*", "books.*", "test.*", "*httpecho*");
String pattern = "test.*";
List<String> matchedServices = serviceFilter.filterServices(services, pattern);
System.out.println("Matched Services: " + matchedServices);
}
} |
#!/usr/bin/env bash
# -*- coding:utf-8 -*-
# Author: Donny You(youansheng@gmail.com)
# Generate train & val data.
export PYTHONPATH='/home/donny/Projects/PytorchCV'
ORI_IMG_DIR='/home/donny/KITTI/image'
ORI_LABEL_DIR='/home/donny/KITTI/label'
SAVE_DIR='/data/DataSet/KITTI'
python kitti_det_generator.py --ori_img_dir $ORI_IMG_DIR \
--ori_label_dir $ORI_LABEL_DIR \
--save_dir $SAVE_DIR \
--val_interval 10 |
//
// MineVC.h
// allrichstore
//
// Created by 任强宾 on 16/10/27.
// Copyright © 2016年 allrich88. All rights reserved.
//
#import "BaseVC.h"
@interface MineVC : BaseVC
@end
|
<gh_stars>1-10
package com.ziggeo;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
public class ZiggeoMetaProfiles {
private Ziggeo application;
public ZiggeoMetaProfiles(Ziggeo application) {
this.application = application;
}
public JSONObject create(JSONObject data) throws IOException, JSONException {
return this.application.connect().postJSON("/v1/metaprofiles/", data);
}
public JSONObject index(JSONObject data) throws IOException, JSONException {
return this.application.connect().getJSON("/v1/metaprofiles/", data);
}
public JSONObject get(String tokenOrKey) throws IOException, JSONException {
return this.application.connect().getJSON("/v1/metaprofiles/" + tokenOrKey + "", null);
}
public InputStream delete(String tokenOrKey) throws IOException, JSONException {
return this.application.connect().delete("/v1/metaprofiles/" + tokenOrKey + "", null);
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-NER/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-NER/7-1024+0+512-N-VB-ADJ-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_and_adjectives_first_two_thirds_full --eval_function last_element_eval |
#!/bin/bash
# Copyright 2019 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# GRAFANA_DATA=/usr/share/grafana.knative/
for f in `find ${GRAFANA_DATA} -type f -exec grep -l "'Grafana - '" "{}" \; | sort | uniq`
do
ls -l "$f"
vim -c "%s/'Grafana - '/'Knative - '/g|wq" "$f"
done
for f in `find ${GRAFANA_DATA} -type f -exec grep -l '"Grafana - "' "{}" \; | sort | uniq`
do
ls -l "$f"
vim -c '%s/"Grafana - "/"Knative - "/g|wq' "$f"
done
cp -n ${GRAFANA_DATA}/public/img/grafana_icon.svg ${GRAFANA_DATA}/public/img/grafana_icon.svg.bak
cp grafana/img/knative.svg ${GRAFANA_DATA}/public/img/grafana_icon.svg || exit 1
cp -n ${GRAFANA_DATA}/public/img/grafana_com_auth_icon.svg ${GRAFANA_DATA}/public/img/grafana_com_auth_icon.svg.bak
cp grafana/img/knative.svg ${GRAFANA_DATA}/public/img/grafana_com_auth_icon.svg || exit 1
cp -n ${GRAFANA_DATA}/public/img/grafana_net_logo.svg ${GRAFANA_DATA}/public/img/grafana_net_logo.svg.svg.bak
cp grafana/img/knative.svg ${GRAFANA_DATA}/public/img/grafana_net_logo.svg || exit 1
cp -n ${GRAFANA_DATA}/public/img/fav32.png ${GRAFANA_DATA}/public/img/fav32.png.bak
cp grafana/img/knative.png ${GRAFANA_DATA}/public/img/fav32.png || exit 1
echo 'OK'
|
#!/bin/sh
# Runs etags recurisvely on all source files in the current directory.
#--languages=-python
etags -R --exclude=swig --exclude=trainings --exclude=html --exclude='*.asm'
exit $?
|
import requests
from bs4 import BeautifulSoup
# Get the webpage
url = 'https://en.wikipedia.org/wiki/Python_(programming_language)'
response = requests.get(url)
# Create the soup
soup = BeautifulSoup(response.text, 'html.parser')
# Extract the text
text = soup.get_text()
# Output the text
print(text) |
package io.opensphere.wfs.gml311;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import io.opensphere.core.model.Altitude;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.data.geom.AbstractMapGeometrySupport;
import io.opensphere.mantle.data.geom.impl.DefaultMapPolylineGeometrySupport;
/**
* GML SAX handler for LineString features.
*/
public class GmlLinestringHandler extends AbstractGmlGeometryHandler
{
/** The Constant LOGGER. */
private static final Logger LOGGER = Logger.getLogger(GmlLinestringHandler.class);
/** Constant XML tag for list of GML Point Positions. */
protected static final String POSITION_LIST_TAG = "posList";
/** List or translated points. */
private final List<LatLonAlt> myLatLonList = New.list();
/**
* Instantiates a new SAX handler for GML LineStrings.
*
* @param tagName the geometry tag name
* @param isLatBeforeLon flag indicating position order in points
*/
public GmlLinestringHandler(String tagName, boolean isLatBeforeLon)
{
super(tagName, isLatBeforeLon);
}
@Override
public AbstractMapGeometrySupport getGeometry()
{
AbstractMapGeometrySupport mgs = new DefaultMapPolylineGeometrySupport(myLatLonList);
mgs.setFollowTerrain(true, this);
return mgs;
}
/**
* Protected accessor for the position list.
*
* NOTE: Only intended for inherited classes because this returns the live
* list, not a safe copy.
*
* @return the list of positions
*/
protected List<LatLonAlt> getPositionList()
{
return myLatLonList;
}
@Override
public void handleClosingTag(String tag, String value)
{
if (POSITION_LIST_TAG.equals(tag))
{
boolean isLat = isLatBeforeLong();
int positions = 0;
double lat = 0.0;
double lon = 0.0;
for (String entry : value.split(" "))
{
if (StringUtils.isNotEmpty(entry)) // ignore extra spaces
{
boolean hasError = false;
try
{
if (isLat)
{
lat = Double.valueOf(entry).doubleValue();
}
else
{
lon = Double.valueOf(entry).doubleValue();
if (lon > 180.0)
{
lon = lon - 360.0;
}
}
}
catch (NumberFormatException e)
{
hasError = true;
LOGGER.warn("Parse error in posList parsing top element: \"" + entry + "\"", e);
}
isLat = !isLat;
if (++positions == 2)
{
if (!hasError)
{
myLatLonList.add(LatLonAlt.createFromDegrees(lat, lon, Altitude.ReferenceLevel.TERRAIN));
}
positions = 0;
}
}
}
}
}
@Override
public void handleOpeningTag(String tag)
{
}
}
|
# require './app/models/parsers/edi/etf/find_carrier'
require 'spec_helper'
describe Parsers::Edi::FindCarrier do
context 'carrier doesn\'t exist' do
it 'notifies listener of carrier not found by fein' do
listener = double
find_carrier = Parsers::Edi::FindCarrier.new(listener, Parsers::Edi::ImportCache.new)
expect(listener).to receive(:carrier_not_found)
expect(find_carrier.by_fein('4321')).to be_nil
end
end
context 'carrier exists' do
it 'notifies listener of carrier found by fein' do
carrier = Carrier.new
carrier.carrier_profiles << CarrierProfile.new(fein: '1234')
carrier.save!
listener = double
find_carrier = Parsers::Edi::FindCarrier.new(listener, Parsers::Edi::ImportCache.new)
expect(listener).to receive(:carrier_found)
expect(find_carrier.by_fein('1234')).to eq carrier
end
end
end
|
<filename>packages/app/lib/apps/auth-routes/index.js
const AuthRoutes = require('./AuthRoutes');
module.exports = {
__init__: [ 'authRoutes' ],
authRoutes: [ 'type', AuthRoutes ]
}; |
package ru.job4j.array;
public class ArrayModifier {
public int[] modifyArray(int[] inputArray) {
int[] modifiedArray = new int[inputArray.length];
for (int i = 0; i < inputArray.length; i++) {
if (inputArray[i] < 0) {
modifiedArray[i] = 0;
} else {
modifiedArray[i] = inputArray[i] * inputArray[i];
}
}
return modifiedArray;
}
public static void main(String[] args) {
ArrayModifier arrayModifier = new ArrayModifier();
int[] inputArray = {3, -1, 4, -2, 0};
int[] modifiedArray = arrayModifier.modifyArray(inputArray);
System.out.print("Input Array: ");
for (int num : inputArray) {
System.out.print(num + " ");
}
System.out.println();
System.out.print("Modified Array: ");
for (int num : modifiedArray) {
System.out.print(num + " ");
}
}
} |
// Custom RxJS operator distinctUntilChanged with shallowEqual comparison
const distinctUntilChanged = (comparer: (x: any, y: any) => boolean) => {
return (source: Observable<any>) =>
new Observable<any>((observer) => {
let previousValue: any;
return source.subscribe({
next(value) {
if (!comparer(previousValue, value)) {
observer.next(value);
previousValue = value;
}
},
error(err) {
observer.error(err);
},
complete() {
observer.complete();
},
});
});
};
// Higher-order function to set default props for ControlledInput component
const withDefaultProps = <P extends object>(
ControlledInput: React.ComponentType<P>,
DEFAULT_PROPS: Partial<P>
): React.ComponentType<P> => {
const ModifiedControlledInput = (props: P) => {
const mergedProps = { ...DEFAULT_PROPS, ...props };
return <ControlledInput {...mergedProps} />;
};
(ModifiedControlledInput as any).defaultProps = DEFAULT_PROPS as ControlledInputProps<P>;
return ModifiedControlledInput;
}; |
declare -r date=$(date +%Y%m%d)
declare -r time=$(date +%H%M%S)
if [[ -z ${SERVER_TEMPLATE} ]]; then
declare serverTemplate=true
declare serverDirectory="${MCPANEL_DIRECTORY}/process/server"
else
declare serverTemplate=false
declare serverDirectory="${MCPANEL_DIRECTORY}/server/${SERVER_TEMPLATE}"
fi
function mcpanel::backup::info()
{
abs::notice "Usage: mcpanel backup ${STYLE_COMMENT}[command]"
abs::writeln
abs::writeln "Creates backup for Minecraft server"
abs::writeln
abs::writeln "Available commands:"
abs::info "complete" "Creates a complete server backup"
abs::info "world" "Creates backup for your world only"
abs::info "plugins" "Creates backup for plugins"
abs::info "help" "Shows this message"
abs::writeln
abs::developer "hktr92"
}
function mcpanel::toolkit::compress()
{
local backupFor=$1
local subdirectory="${backupFor}"
case "${backupFor}" in
complete)
subdirectory=
;;
plugins)
;;
world)
if [[ "${serverTemplate}" ]]; then
subdirectory=$(cat "${serverDirectory}/server.properties" | grep 'level-name=' | cut -d'=' -f2)
else
subdirectory=$(cat "${serverDirectory}/server.properties" | grep 'level-name=' | cut -d'=' -f2)
fi
local worldContainer=$(yq r "${serverDirectory}/bukkit.yml" "settings.world-container")
if [[ "${worldContainer}" != "null" ]]; then
subdirectory="${worldContainer}/${subdirectory}"
fi
;;
*)
abs::error "Invalid directory to compress: ${STYLE_COMMENT}${backupFor}"
return 1
;;
esac
abs::notice "Creating backup for ${STYLE_COMMENT}${backupFor}"
if [[ ! -d "${MCPANEL_DIRECTORY}/backup/${backupFor}" ]]; then
abs::writeln "Creating directory for backups"
mkdir -p "${MCPANEL_DIRECTORY}/backup/${backupFor}"
if [[ $? -ne 0 ]]; then
abs::error "Unable to create backup directory!"
return $?
fi
fi
abs::writeln "Creating backup, using ${STYLE_COMMENT}xz${STYLE_DEFAULT} compression"
tar Jcf "${MCPANEL_DIRECTORY}/backup/${backupFor}/${backupFor}_${date}_${time}.txz" "${serverDirectory}/${subdirectory}"
if [[ $? -ne 0 ]]; then
abs::error "Unable to create backup for ${STYLE_COMMENT}${backupFor}"
return $?
fi
abs::writeln "Creating archive checksum for integrity checking"
sha256sum "${MCPANEL_DIRECTORY}/backup/${backupFor}/${backupFor}_${date}_${time}.txz" > "${MCPANEL_DIRECTORY}/backup/${backupFor}/${backupFor}_${date}_${time}.txz.sha256sum"
if [[ $? -ne 0 ]]; then
abs::error "Unable to create archive checksum!"
return $?
fi
abs::success "Backup for ${backupFor} successfully created!"
return 0
}
function mcpanel::backup::main()
{
local action=$1
case "${action}" in
complete|plugins|world)
mcpanel::toolkit::compress "${action}"
return $?
;;
help|*) mcpanel::backup::info;;
esac
return 0
}
|
<filename>client-base/src/main/webapp/html/javascript/filter-knetwork.js
/*
* Function to check numberOfNodes in knetwork json's metadata (allGraphData) and if over 3000, delete all nodes/edges
* with conceptDisplay:none and relationDisplay:none from graphJson, and also use their pid to remove same from metadata.
* @returns revised knetwork blob for large knets before launching KnetMaps
*/
function filterKnetworkJson(json_blob) {
console.log("knetworkJSON received from server/network API.");
//console.log("knetworkJSON from server..."+ json_blob);
eval(json_blob); // gets the 2 JS vars from it to be avilable in local scope
//console.dir(graphJSON); // graphJSON
//console.dir(allGraphData); // metadata JSON that hold numberOfCOncepts too
var graphjson2_nodes= [], graphjson2_edges=[], graphJSON2= {}; // 2 empty new jsonArrays and 1 jsonObject
var retained_ids= [];
if(allGraphData.ondexmetadata.numberOfConcepts > 3000) {
console.log("numberOfConcepts in this knetwork= "+ allGraphData.ondexmetadata.numberOfConcepts);
console.log("filter large knetwork json.");
// filter out nodes/edges from graphJSON with conceptDisplay/relationDisplay:none, and keep their id's to later filter allGraphData too.
// for each node in nodes, check conceptDisplay:none and if yes, delete the node, and if no, retain id.
// for each edge in in edges, check relationDisplay:none and yes, delete the edge and if no, retain id.
for(var i=0; i < graphJSON.nodes.length; i++) {
if(graphJSON.nodes[i].data.conceptDisplay === "element") {
graphjson2_nodes.push(graphJSON.nodes[i]); // insert node in new jsonArray
retained_ids.push(graphJSON.nodes[i].data.id); // retain ID
}
}
for(var j=0; j < graphJSON.edges.length; j++) {
if(graphJSON.edges[j].data.relationDisplay === "element") {
graphjson2_edges.push(graphJSON.edges[j]); // insert edge in new jsonArray
retained_ids.push(graphJSON.edges[j].data.id); // retain ID
}
}
// make new graphJSON object with only visible nodes/edges.
graphJSON2= {"nodes": graphjson2_nodes, "edges": graphjson2_edges };
//console.log("retained_ids to filter allGraphData: "+ retained_ids);
// now filter metadata json (allGraphData).
var allGraphData2= {}, omd= {}, agd2_nodes=[], agd2_edges= []; // 3 empty new jsonArrays and 1 jsonObject
for(var k=0; k < allGraphData.ondexmetadata.concepts.length; k++) {
if(retained_ids.includes(allGraphData.ondexmetadata.concepts[k].id)) { // insert concept in new jsonArray
agd2_nodes.push(allGraphData.ondexmetadata.concepts[k]);
}
}
for(var l=0; l < allGraphData.ondexmetadata.relations.length; l++) {
if(retained_ids.includes(allGraphData.ondexmetadata.relations[l].id)) { // insert relation in new jsonArray
agd2_edges.push(allGraphData.ondexmetadata.relations[l]);
}
}
// make new allGraphData object with only visible nodes/edges metadata.
omd= {"graphName": allGraphData.ondexmetadata.graphName, "concepts": agd2_nodes, "relations": agd2_edges, "numberOfConcepts": allGraphData.ondexmetadata.numberOfConcepts, "numberOfRelations": allGraphData.ondexmetadata.numberOfRelations, "version": allGraphData.ondexmetadata.version };
allGraphData2= {"ondexmetadata": omd};
console.log("filtered new knetworkJSON generated.");
//console.dir(graphJSON2);
//console.dir(allGraphData2);
// new filtered output knetwork blob
json_blob= "var graphJSON= "+ JSON.stringify(graphJSON2) +";\n\n"+"var allGraphData= "+ JSON.stringify(allGraphData2) +";";
//console.log(json_blob); // new json contents with nested JS vars
}
return json_blob;
};
|
import Router from 'koa-router';
import mongoose from 'mongoose';
import userModel from './../models/user.js';
const router = Router();
router.get('/signin', async (ctx, next) => {
var userEntity = new userModel( {name: ctx.query.loginNo, passwd: ctx.query.passwd});
console.log("user: ", userEntity);
userEntity.save((err)=>{
if(!err)
console.log('save success');
else
console.log('err ', err);
});
ctx.status = 200;
});
export default router;
|
<filename>src/constants/AuthActions.js
export const FETCHING_LOGIN = "FETCHING_LOGIN";
export const LOGIN_SUCCESSFUL = "LOGIN_SUCCESSFUL";
export const LOGIN_FAIL = "LOGIN_FAIL";
export const FETCHING_LOGOUT = "FETCHING_LOGOUT";
export const LOGOUT_SUCCESSFUL = "LOGOUT_SUCCESSFUL";
export const LOGOUT_FAIL = "LOGOUT_FAIL";
export const FETCHING_SIGNUP = "FETCHING_SIGNUP";
export const SIGNUP_SUCCESSFUL = "SIGNUP_SUCCESSFUL";
export const SIGNUP_FAIL = "SIGNUP_FAIL"; |
import numpy as np
import tensorflow as tf
from environment import connect_four # connect_four() creates a new Connect Four game
tf.compat.v1.disable_v2_behavior()
# create our reinforcement learning agent
class Agent:
def __init__(self, learning_rate):
self.learning_rate = learning_rate
self.states = []
self.q_values = []
self.rewards = []
self.action_space = range(7)
self.build_model()
def build_model(self):
self.model = tf.keras.Sequential([
tf.keras.layers.Dense(7, input_shape=(6, 7, 1), activation='relu'),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(7, activation='relu'),
tf.keras.layers.Dense(7, activation='softmax')
])
self.model.compile(
optimizer=tf.keras.optimizers.Adam(lr=self.learning_rate),
loss=tf.keras.losses.mean_squared_error,
metrics=['accuracy']
)
def register_experience(self, state, action, reward):
self.states.append(state)
action_one_hot = np.zeros(7)
action_one_hot[action] = 1.
self.q_values.append(action_one_hot)
self.rewards.append(reward)
def train(self):
self.model.fit(np.array(self.states), np.array(self.q_values), verbose=0)
def predict(self, state):
pred = self.model.predict(np.array([state]))[0]
return np.argmax(pred)
def play_game(agent):
env = connect_four()
state = env.get_state() # get the initial state of the game
total_reward = 0
done = False
while not done:
action = agent.predict(state) # predict an action from the current state
state, reward, done, _ = env.step(action) # take the action and get the return values
total_reward += reward # add the reward to the total reward
agent.register_experience(state, action, reward) # register the experience
agent.train() # train the agent
return total_reward # return the total reward |
// param.rs
pub struct Parameter {
name: String,
value: i32,
}
impl Parameter {
pub fn new(name: String, value: i32) -> Parameter {
Parameter { name, value }
}
pub fn update_value(&mut self, new_value: i32) {
self.value = new_value;
}
}
// reg.rs
use crate::param::Parameter;
pub struct Register {
name: String,
parameters: Vec<Parameter>,
}
impl Register {
pub fn new(name: String) -> Register {
Register {
name,
parameters: Vec::new(),
}
}
pub fn add_parameter(&mut self, parameter: Parameter) {
self.parameters.push(parameter);
}
pub fn get_parameter(&self, name: &str) -> Option<&Parameter> {
self.parameters.iter().find(|param| param.name == name)
}
}
// main.rs
mod param;
mod reg;
use param::Parameter;
use reg::Register;
fn main() {
let param1 = Parameter::new("param1".to_string(), 10);
let param2 = Parameter::new("param2".to_string(), 20);
let mut reg1 = Register::new("register1".to_string());
reg1.add_parameter(param1);
reg1.add_parameter(param2);
if let Some(param) = reg1.get_parameter("param1") {
println!("Found parameter: {} = {}", param.name, param.value);
}
if let Some(param) = reg1.get_parameter("param2") {
println!("Found parameter: {} = {}", param.name, param.value);
}
if let Some(param) = reg1.get_parameter("param3") {
println!("Found parameter: {} = {}", param.name, param.value);
} else {
println!("Parameter 'param3' not found");
}
let mut param1_ref = reg1.get_parameter("param1").unwrap();
param1_ref.update_value(15);
println!("Updated parameter value: {} = {}", param1_ref.name, param1_ref.value);
} |
<filename>base/src/main/java/com/example/instantapp/model/RelatedEntity.java<gh_stars>1-10
package com.example.instantapp.model;
import io.objectbox.annotation.Entity;
import io.objectbox.annotation.Id;
@Entity
public class RelatedEntity {
@Id
private long id;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
|
<gh_stars>1-10
#include <algorithm>
#include <filesystem>
#include <string_view>
#include <spdlog/spdlog.h>
#include <helpers/AsyncFileReader.h>
struct AsyncFileStreamer {
std::map<std::string_view, AsyncFileReader *> asyncFileReaders;
std::string root;
AsyncFileStreamer(std::string root)
: root(root) {
// for all files in this path, init the map of AsyncFileReaders
spdlog::info("root : {}", root);
updateRootCache();
}
void updateRootCache() {
for (auto &p : std::filesystem::recursive_directory_iterator(root)) {
if (std::filesystem::is_directory(p.path())) {
continue;
}
std::string url = "/" + std::filesystem::relative(p.path(), std::filesystem::path(root)).generic_string();
spdlog::info("url available in root : {}", url);
char *key = new char[url.length()];
memcpy(key, url.data(), url.length());
asyncFileReaders[std::string_view(key, url.length())] = new AsyncFileReader(p.path().string());
}
}
template <bool SSL>
void streamFile(uWS::HttpResponse<SSL> *res, std::string_view url) {
auto it = url == "/" ? asyncFileReaders.find("/index.html") : asyncFileReaders.find(url);
if (it == asyncFileReaders.end()) {
spdlog::info("Did not find url: {}", url);
} else {
streamFile(res, it->second);
}
}
template <bool SSL>
static void streamFile(uWS::HttpResponse<SSL> *res, AsyncFileReader *asyncFileReader) {
// Peek from cache
std::string_view chunk = asyncFileReader->peek(res->getWriteOffset());
if (!chunk.length() || res->tryEnd(chunk, asyncFileReader->getFileSize()).first) {
if (chunk.length() < asyncFileReader->getFileSize()) {
asyncFileReader->request(res->getWriteOffset(), [res, asyncFileReader](std::string_view chunk) {
// We were aborted for some reason
if (!chunk.length()) {
res->close();
} else {
AsyncFileStreamer::streamFile(res, asyncFileReader);
}
});
}
} else {
// We failed writing everything, so let's continue when we can
res->onWritable([res, asyncFileReader](int offset) {
AsyncFileStreamer::streamFile(res, asyncFileReader);
return false;
})
->onAborted([]() {
spdlog::info("Aborted request");
});
}
}
};
|
#!/bin/bash
# Generate a sequence from m to n, m defaults to 1.
seq ()
{
declare -i lo hi i # makes local
local _SEQ
case $# in
1) seq 1 "$1" ; return $? ;;
2) lo=$1 hi=$2
i=$lo _SEQ=""
while let "i <= hi"; do
_SEQ="${_SEQ}$i "
let i+=1
done
echo "${_SEQ# }"
return 0 ;;
*) echo seq: usage: seq [low] high 1>&2 ; return 2 ;;
esac
}
# like the APL `iota' function (or at least how I remember it :-)
iota()
{
case $# in
1) seq 1 "$1"; return $?;;
*) echo "iota: usage: iota high" 1>&2; return 2;;
esac
}
# "32953 5" "32958 5" "32976 5" "32984 5" "33035 5" "33045 5" "33064 5" "33082 5" "33135 5"
MAINDIR=/mnt/hgfs/data/SocReward.01/Analysis/FSL/HIGHER_LEVEL
MAINOUTPUT=${MAINDIR}/3rd_level_copes/MODEL4-TD_averse_flame_s19
ANALYZED=${MAINOUTPUT}
mkdir -p ${MAINOUTPUT}
for LIST in "face-money 1" "money-face 2" "hot-not 3" "hot-neutralf 4" "gain-loss 5" \
"gain-neutralm 6" "pos-neg 7" "not-hot 8" "not-neutralf 9" "loss-gain 10" "loss-neutralm 11" \
"neg-pos 12" "neutralf-hot 13" "neutralf-not 14" "neutralm-gain 15" "neutralm-loss 16"; do
set -- $LIST #parses list
CON_NAME=$1
RUN=$2
OUTPUT=${MAINOUTPUT}/COPE${RUN}_${CON_NAME}
INPUT01=${MAINDIR}/33754_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT02=${MAINDIR}/33642_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT03=${MAINDIR}/32953_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT04=${MAINDIR}/32958_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT05=${MAINDIR}/32976_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT06=${MAINDIR}/32984_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT07=${MAINDIR}/33035_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT08=${MAINDIR}/33045_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT09=${MAINDIR}/33771_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT10=${MAINDIR}/33082_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT11=${MAINDIR}/33135_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT12=${MAINDIR}/33757_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT13=${MAINDIR}/33302_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT14=${MAINDIR}/33402_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT15=${MAINDIR}/33456_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT16=${MAINDIR}/33467_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT17=${MAINDIR}/33732_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT18=${MAINDIR}/33744_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
INPUT19=${MAINDIR}/33746_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
#INPUT20=${MAINDIR}/33754_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
#INPUT21=${MAINDIR}/33757_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
#INPUT22=${MAINDIR}/33771_2nd_lvl_model4_td_denoised.gfeat/cope${RUN}.feat/stats/cope1.nii.gz
echo $OUTPUT
for i in 'averse_s19.fsf'; do
sed -e 's@OUTPUT@'$OUTPUT'@g' \
-e 's@INPUT01@'$INPUT01'@g' \
-e 's@INPUT02@'$INPUT02'@g' \
-e 's@INPUT03@'$INPUT03'@g' \
-e 's@INPUT04@'$INPUT04'@g' \
-e 's@INPUT05@'$INPUT05'@g' \
-e 's@INPUT06@'$INPUT06'@g' \
-e 's@INPUT07@'$INPUT07'@g' \
-e 's@INPUT08@'$INPUT08'@g' \
-e 's@INPUT09@'$INPUT09'@g' \
-e 's@INPUT10@'$INPUT10'@g' \
-e 's@INPUT11@'$INPUT11'@g' \
-e 's@INPUT12@'$INPUT12'@g' \
-e 's@INPUT13@'$INPUT13'@g' \
-e 's@INPUT14@'$INPUT14'@g' \
-e 's@INPUT15@'$INPUT15'@g' \
-e 's@INPUT16@'$INPUT16'@g' \
-e 's@INPUT17@'$INPUT17'@g' \
-e 's@INPUT18@'$INPUT18'@g' \
-e 's@INPUT19@'$INPUT19'@g' <$i> ${ANALYZED}/3rdLvlFixed_${RUN}_${CON_NAME}.fsf
done
#runs the analysis using the newly created fsf file
feat ${ANALYZED}/3rdLvlFixed_${RUN}_${CON_NAME}.fsf
done
|
#!/usr/bin/env bash
set -e
# For snapshots, please specify the full version (with date and time)
cdist_version="0.1.0-20201002.083956-2"
cdist_path_version="$cdist_version"
if [ -n "${cdist_version#*-}" ]; then
cdist_path_version="${cdist_version%%-*}-SNAPSHOT"
fi
url="https://maven.enginehub.org/repo/org/enginehub/crowdin/crowdin-distributor/$cdist_path_version/crowdin-distributor-$cdist_version-bundle.zip"
[ -d ./build ] || mkdir ./build
curl "$url" >./build/cdist.zip
(cd ./build && unzip -o cdist.zip)
# CROWDIN_DISTRIBUTOR_TOKEN is set by CI
export CROWDIN_DISTRIBUTOR_ON_CHANGE="true"
export CROWDIN_DISTRIBUTOR_PROJECT_ID="360697"
export CROWDIN_DISTRIBUTOR_MODULE="worldedit-lang"
# Artifactory & Build Number is set by CI
export CROWDIN_DISTRIBUTOR_OPTS="--enable-preview"
"./build/crowdin-distributor-$cdist_path_version/bin/crowdin-distributor"
|
<filename>api/migrations/20200603153526-bcmiDataLoad.js
'use strict';
let dbm;
let type;
let seed;
const https = require('https');
// register the mine schema/model
const ObjectID = require('mongodb').ObjectID;
const bcmiUrl = 'https://mines.empr.gov.bc.ca'; // prod api url
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = async function (db) {
let mClient;
try {
mClient = await db.connection.connect(db.connectionString, { native_parser: true });
const nrpti = mClient.collection('nrpti');
// API call to pull data from BCMI
// could also fetch from NRPTI first: require('../src/models/bcmi/mine').find().then(...);
// then match to BCMI, just doing it this way so we can match on name, rather then code
console.log('Fetching all major mines in BCMI...');
// 30 results for major, 74 for published. Note, published does not return links so if we're supposed to use
// published, we'll need to do a follow up call to /api/project/bycode/<mineData.code> to get the links
const publishedMines = await getRequest(bcmiUrl + '/api/projects/published');
console.log('Located ' + publishedMines.length + ' mines. Batching updates...');
const promises = [];
// build up a collection of all requests
for (let i = 0; i < publishedMines.length; i++) {
try {
// The published endpoint doesn't have links, refresh the mineData object
let mineData = await getRequest(bcmiUrl + '/api/project/bycode/' + publishedMines[i].code);
promises.push(updateMine(mineData, nrpti));
} catch(err) {
console.error('Could not find ' + publishedMines[i]._id + ' : ' + publishedMines[i].name);
// dont rethrow, we'll just ignore this one as a failure and check the rest
}
}
// fire off the requests and wait
let results = await Promise.all(promises);
let updatedCount = 0;
let notFoundCount = 0;
results.forEach(result => {
if (Object.prototype.hasOwnProperty.call(result, 'notfound')) {
notFoundCount++;
console.log('Could not find ' + result.data._id + ' : ' + result.data.name);
} else {
updatedCount++;
}
});
// we're done
console.log('BCMI migration complete.');
console.log('Of ' + publishedMines.length + ' mines in BCMI, ' + notFoundCount + ' could not be found in NRPTI, and ' + updatedCount + ' were updated.');
} catch(err) {
console.error('Error on BCMI dataload: ' + err);
}
mClient.close();
};
exports.down = function(db) {
return null;
};
exports._meta = {
"version": 1
};
async function updateMine(mineData, nrpti) {
let nrptiMines = await nrpti.find({ _schemaName: 'Mine', name: mineData.name}).toArray();
// should have 1 result returned. Any more or less, just ignore this update
if (nrptiMines.length === 1) {
let externalLinks = [];
// format the links from the data
for(const idx in mineData.externalLinks) {
const link = mineData.externalLinks[idx];
externalLinks.push(link.link);
}
return nrpti.update({ _id: new ObjectID(nrptiMines[0]._id) }, {
$set: {
type: mineData.type,
summary: mineData.description, // BCMI doesn't have a "summary" attribute
description: mineData.description,
links: externalLinks,
updatedBy: 'NRPTI BCMI Data Migration',
sourceSystemRef: 'mem-admin'
}
});
}
return { notfound: true, data: mineData };
}
function getRequest(url) {
return new Promise(function(resolve, reject) {
let req = https.get(url, function(res) {
if (res.statusCode < 200 || res.statusCode >= 300) {
return reject(new Error('statusCode=' + res.statusCode));
}
let body = [];
res.on('data', function(chunk) {
body.push(chunk);
});
res.on('end', function() {
try {
body = JSON.parse(Buffer.concat(body).toString());
} catch(e) {
reject(e);
}
resolve(body);
});
});
req.on('error', function(err) {
reject(err);
});
req.end();
});
}
|
<gh_stars>1-10
package com.github.shaquu;
import com.github.shaquu.logger.Logger;
import com.github.shaquu.networking.IpPort;
import com.github.shaquu.networking.tcp.TCPServer;
import com.github.shaquu.networking.udp.UDPClientServer;
import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class Main {
//for udp with debug
//java -jar target/torro-1.1-SNAPSHOT.jar true true 10001 10001 10002 10003
public static void main(String[] args) throws Exception {
Logger.DEBUG = Boolean.parseBoolean(args[0]);
boolean udp = Boolean.parseBoolean(args[1]);
int myPort = Integer.parseInt(args[2]);
if (!available(myPort)) {
System.out.println("Port not available");
System.exit(0);
}
String folder = args[3];
List<Integer> theirPorts = new ArrayList<>();
for (int i = 4; i < args.length; i++) {
theirPorts.add(Integer.parseInt(args[i]));
}
try {
if (udp) {
UDPClientServer udpClientServer = new UDPClientServer(myPort, folder);
for (int port : theirPorts) {
udpClientServer.addClient(new IpPort(InetAddress.getByName("localhost"), port));
}
udpClientServer.start();
udpClientServer.stop();
} else {
TCPServer tcpServer = new TCPServer(myPort, folder);
new Thread(() -> {
List<Integer> toJoin = new ArrayList<>(theirPorts);
Iterator<Integer> iterator = toJoin.iterator();
while (true) {
boolean connected = false;
int port = iterator.next();
try {
connected = tcpServer.connect(port);
} catch (IOException e) {
//System.out.println("Cannot connect " + e.getLocalizedMessage());
}
if (connected) {
iterator.remove();
}
try {
Thread.sleep(TCPServer.WAIT_TIME * 50);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (toJoin.size() == 0 || !iterator.hasNext()) {
toJoin = new ArrayList<>(theirPorts);
iterator = toJoin.iterator();
}
}
}).start();
tcpServer.start();
tcpServer.stop();
}
} catch (SocketException e) {
e.printStackTrace();
return;
}
System.exit(0);
}
private static boolean available(int port) {
try {
DatagramSocket datagramSocket = new DatagramSocket(port);
datagramSocket.close();
} catch (SocketException e) {
return false;
}
return true;
}
}
|
#!/bin/bash
# Strict mode, fail on any error
set -euo pipefail
source ../components/azure-hdinsight/get-hdinsight-kafka-brokers.sh
KAFKA_OUT_SEND_BROKERS=$KAFKA_BROKERS
KAFKA_OUT_SEND_SASL_MECHANISM=$KAFKA_SASL_MECHANISM
KAFKA_OUT_SEND_SECURITY_PROTOCOL=$KAFKA_SECURITY_PROTOCOL
KAFKA_OUT_SEND_JAAS_CONFIG=$KAFKA_SASL_JAAS_CONFIG
|
<reponame>smagill/opensphere-desktop
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 02:04:22 PM MST
//
package net.opengis.gml._311;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for RelatedTimeType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="RelatedTimeType">
* <complexContent>
* <extension base="{http://www.opengis.net/gml}TimePrimitivePropertyType">
* <attribute name="relativePosition">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="Before"/>
* <enumeration value="After"/>
* <enumeration value="Begins"/>
* <enumeration value="Ends"/>
* <enumeration value="During"/>
* <enumeration value="Equals"/>
* <enumeration value="Contains"/>
* <enumeration value="Overlaps"/>
* <enumeration value="Meets"/>
* <enumeration value="OverlappedBy"/>
* <enumeration value="MetBy"/>
* <enumeration value="BegunBy"/>
* <enumeration value="EndedBy"/>
* </restriction>
* </simpleType>
* </attribute>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "RelatedTimeType")
public class RelatedTimeType
extends TimePrimitivePropertyType
{
@XmlAttribute(name = "relativePosition")
protected String relativePosition;
/**
* Gets the value of the relativePosition property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRelativePosition() {
return relativePosition;
}
/**
* Sets the value of the relativePosition property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRelativePosition(String value) {
this.relativePosition = value;
}
}
|
#!/bin/bash
cat >> /etc/rsyslog.conf <<EOF
action(type="omfwd"
protocol="tcp"
Target="remote.system.com"
port="6514"
StreamDriver="gtls"
StreamDriverMode="1"
StreamDriverAuthMode="x509/name"
streamdriver.CheckExtendedKeyPurpose="on")
EOF
|
if (iframe === undefined) {
iframe = function () {
try {
return window.self !== window.top;
} catch (e) {
return true;
}
};
UI.registerHelper('iframe', function () {
return iframe();
});
}
|
def std_dev(array):
import math
mean = sum(array)/len(array)
sum_of_squared_differences = 0
for element in array:
sum_of_squared_differences += (element-mean)**2
return math.sqrt(sum_of_squared_differences/len(array)) |
<filename>testapp/types.go
package testapp
import "encoding/json"
// Migrated from github/kava/app/genesis.go
// GenesisState represents the genesis state of the blockchain. It is a map from module names to module genesis states.
type GenesisState map[string]json.RawMessage
|
def string_to_int(string_number):
result = 0
for i in range(len(string_number)):
result = result * 10 + (ord(string_number[i]) - ord('0'))
return result
if __name__ == '__main__':
string_number = "123"
print(string_to_int(string_number)) |
/* eslint-disable react/jsx-props-no-spreading */
//* __________________________Layout Wrapper Component_________________________*/
// ⚛ Component wraps up header, headerLinks, footer, and children for Appbar
// TODO: Get react-helmet setup for gql to inject siteMetadata
import React from 'react';
import Helmet from 'react-helmet';
import { graphql } from 'gatsby';
import Header from '../Header/Header.jsx';
import HeaderLinks from '../Header/HeaderLinks.jsx';
import Footer from '../Footer/Footer.jsx';
// import SEO from '../Seo';
import 'assets/scss/material-kit-react.scss?v=1.4.0';
const LAYOUT_PAGE_QUERY = graphql`
query layoutPageQuery {
site {
id
siteMetadata {
title
description
}
}
siteSearchIndex {
index
}
}
`;
export default class Layout extends React.Component {
render() {
const { children, ...rest } = this.props;
const dashboardRoutes = [];
return (
<div>
<Header
color="transparent"
routes={dashboardRoutes}
brand="Citylights Church"
fixed
changeColorOnScroll={{
height: 400,
color: 'white',
}}
{...rest}
/>
{/* <SEO></SEO> */}
{/* TODO- Get props setup for query to pass properly */}
{/* <Helmet>
<meta name="description" content={data.site.} />
</Helmet> */}
{children}
{/* <Footer /> */}
</div>
);
}
}
|
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def convert(root):
# base case
if root is None:
return root
# if root.left is not None, convert the left subtree
root.left = convert(root.left)
# set the parent node for the head of the left subtree
if root.left:
prev = calc_tail(root.left)
prev.right = root
# if root.right is not None, convert the right subtree
root.right = convert(root.right)
# set the parent node for the head of the right subtree
if root.right:
prev = calc_tail(root.right)
prev.left = root
# return the head of doubly linked list
return calc_head(root)
def calc_tail(node):
while node.right:
node = node.right
return node
def calc_head(node):
while node.left:
node = node.left
return node
if __name__ == '__main__':
root = Node(10)
root.left = Node(12)
root.right = Node(15)
root.left.left = Node(25)
root.left.right = Node(30)
root.right.left = Node(36)
root = convert(root)
while root:
print(root.data, end=" ")
root = root.right |
RESTSERVER=localhost
curl -X POST http://$RESTSERVER:1024/spider/publicip?connection_name=openstack-config01 -H 'Content-Type: application/json' -d '{}'
|
<gh_stars>1-10
var exec = require('child_process').exec;
module.exports = {
status: _status,
};
function _status(wifi_interface, callback) {
var fields = {
hw_addr: /HWaddr\s([^\s]+)/,
inet_addr: /inet addr:([^\s]+)/
};
var output = {
hw_addr: '<unknown>',
inet_addr: '<unknown>'
};
exec("ifconfig " + wifi_interface, function(error, stdout, stderr) {
if (error) return callback(error);
for (var key in fields) {
re = stdout.match(fields[key]);
if (re && re.length > 1) {
output[key] = re[1];
}
}
callback(null, output);
});
}
|
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.CopyOnWriteArrayList;
import games.stendhal.client.listener.FeatureChangeListener;
import games.stendhal.client.listener.RPObjectChangeListener;
import marauroa.common.Logger;
import marauroa.common.game.RPObject;
/**
* The player user context. This class holds/manages the data for the user of
* this client. This is independent of any on-screen representation Entity that,
* while related, serves an entirely different purpose.
*
* Currently this is just a helper class for StendhalClient. Maybe it will be
* directly used by other code later.
*/
public class UserContext implements RPObjectChangeListener {
/**
* The logger.
*/
private static final Logger logger = Logger.getLogger(UserContext.class);
private static UserContext instance;
/**
* The currently enabled features.
*/
private HashMap<String, String> features;
/**
* The feature change listeners.
*/
private final List<FeatureChangeListener> featureListeners = new CopyOnWriteArrayList<FeatureChangeListener>();
/**
* The admin level.
*/
private int adminlevel;
/**
* The player character's name.
*/
private String name;
/**
* The owned sheep RPObject ID.
*/
private int sheepID;
private RPObject player;
/**
* Constructor.
*
*/
public UserContext() {
adminlevel = 0;
name = null;
sheepID = 0;
features = new HashMap<String, String>();
instance = this;
}
//
// UserContext
//
public static UserContext get() {
if (instance == null) {
instance = new UserContext();
}
return instance;
}
/**
* Add a feature change listener.
*
* @param l
* The listener.
*/
public void addFeatureChangeListener(final FeatureChangeListener l) {
featureListeners.add(l);
}
/**
* Fire feature enabled to all registered listeners.
*
* @param name
* The name of the feature.
*/
private void fireFeatureDisabled(final String name) {
logger.debug("Feature disabled: " + name);
for (final FeatureChangeListener l : featureListeners) {
l.featureDisabled(name);
}
}
/**
* Fire feature enabled to all registered listeners.
*
* @param name
* The name of the feature.
* @param value
* The optional feature value.
*/
private void fireFeatureEnabled(final String name, final String value) {
logger.debug("Feature enabled: " + name + " = " + value);
for (final FeatureChangeListener l : featureListeners) {
l.featureEnabled(name, value);
}
}
/**
* Get the admin level.
*
* @return The admin level.
*/
public int getAdminLevel() {
return adminlevel;
}
/**
* Get the player character name.
*
* @return The player character name.
*/
public String getName() {
return name;
}
/**
* Get the player's owned sheep RPObject ID.
*
* @return The RPObject ID of the sheep the player owns, or <code>0</code>
* if none.
*/
public int getSheepID() {
return sheepID;
}
/**
* Determine if the user is an admin.
*
* @return <code>true</code> is the user is an admin.
*/
public boolean isAdmin() {
return (getAdminLevel() != 0);
}
/**
* Remove a feature change listener.
*
* @param listener
* The listener.
*/
void removeFeatureChangeListener(final FeatureChangeListener listener) {
featureListeners.remove(listener);
}
/**
* A feature object added/changed attribute(s).
*
* @param changes
* The object changes.
*/
private void processFeaturesAdded(final Map<String, String> changes) {
for (final Entry<String, String> entry : changes.entrySet()) {
if (!features.containsKey(entry.getKey())) {
features.put(entry.getKey(), entry.getValue());
fireFeatureEnabled(entry.getKey(), entry.getValue());
}
}
}
/**
* A feature object removed attribute(s).
*
* @param changes
* The object changes.
*/
private void processFeaturesRemoved(final Map<String, String> changes) {
for (final String feature : changes.keySet()) {
if (features.containsKey(feature)) {
features.remove(feature);
fireFeatureDisabled(feature);
}
}
}
public RPObject getPlayer() {
return player;
}
protected void setPlayer(final RPObject object) {
/*
* Ignore no-changes
*/
if (player != object) {
player = object;
name = object.get("name");
}
}
boolean isUser(final RPObject object) {
if (name == null) {
return false;
}
if (object.getRPClass().subclassOf("player")) {
return name.equalsIgnoreCase(object.get("name"));
} else {
return false;
}
}
//
// RPObjectChangeListener
//
/**
* An object was added.
*
* @param object
* The object.
*/
@Override
public void onAdded(final RPObject object) {
if (isUser(object)) {
if (object.has("adminlevel")) {
adminlevel = object.getInt("adminlevel");
// fireAdminLevelChanged(adminlevel);
}
}
}
/**
* The object added/changed attribute(s).
*
* @param object
* The base object.
* @param changes
* The changes.
*/
@Override
public void onChangedAdded(final RPObject object, final RPObject changes) {
if (isUser(object)) {
if (changes.has("adminlevel")) {
adminlevel = changes.getInt("adminlevel");
}
if (changes.has("name")) {
name = changes.get("name");
}
if (changes.has("sheep")) {
sheepID = changes.getInt("sheep");
// fireOwnedSheep(sheepID);
}
if (changes.hasMap("features")) {
processFeaturesAdded(changes.getMap("features"));
}
}
}
/**
* An object removed attribute(s).
*
* @param object
* The base object.
* @param changes
* The changes.
*/
@Override
public void onChangedRemoved(final RPObject object, final RPObject changes) {
if (isUser(object)) {
if (changes.has("adminlevel")) {
adminlevel = 0;
}
if (changes.has("name")) {
name = null;
}
if (changes.has("sheep")) {
sheepID = 0;
// fireOwnedSheep(sheepID);
}
if (changes.hasMap("features")) {
processFeaturesRemoved(changes.getMap("features"));
}
}
}
/**
* An object was removed.
*
* @param object
* The object.
*/
@Override
public void onRemoved(final RPObject object) {
if (isUser(object)) {
adminlevel = 0;
name = null;
sheepID = 0;
}
}
/**
* A slot object was added.
*
* @param object
* The container object.
* @param slotName
* The slot name.
* @param sobject
* The slot object.
*/
@Override
public void onSlotAdded(final RPObject object, final String slotName,
final RPObject sobject) {
}
/**
* A slot object added/changed attribute(s).
*
* @param object
* The base container object.
* @param slotName
* The container's slot name.
* @param sobject
* The slot object.
* @param schanges
* The slot object changes.
*/
@Override
public void onSlotChangedAdded(final RPObject object,
final String slotName, final RPObject sobject,
final RPObject schanges) {
}
/**
* A slot object removed attribute(s).
*
* @param object
* The base container object.
* @param slotName
* The container's slot name.
* @param sobject
* The slot object.
* @param schanges
* The slot object changes.
*/
@Override
public void onSlotChangedRemoved(final RPObject object,
final String slotName, final RPObject sobject,
final RPObject schanges) {
}
/**
* A slot object was removed.
*
* @param object
* The container object.
* @param slotName
* The slot name.
* @param sobject
* The slot object.
*/
@Override
public void onSlotRemoved(final RPObject object, final String slotName,
final RPObject sobject) {
}
public void setName(final String username) {
name = username;
}
/**
* Checks if the player has a feature.
*/
public boolean hasFeature(final String name) {
return features.get(name) != null;
}
}
|
#include "nmos/node_registration.h"
#include "cpprest/http_client.h"
#include "nmos/api_downgrade.h"
#include "nmos/api_utils.h" // for nmos::type_from_resourceType
#include "nmos/model.h"
#include "nmos/query_utils.h"
#include "nmos/rational.h"
#include "nmos/slog.h"
#include "nmos/thread_utils.h" // for wait_until, reverse_lock_guard
#include "nmos/version.h"
namespace nmos
{
namespace details
{
nmos::resource make_node_registration_subscription(const nmos::id& id)
{
using web::json::value;
value data;
data[nmos::fields::id] = value::string(id);
data[nmos::fields::max_update_rate_ms] = 0; // add a setting for this?
data[nmos::fields::persist] = false; // not to be deleted by someone else
data[nmos::fields::resource_path] = value::string(U(""));
data[nmos::fields::params] = value::object();
// generate a websocket url?
return{ nmos::is04_versions::v1_2, nmos::types::subscription, data, true };
}
nmos::resource make_node_registration_grain(const nmos::id& id, const nmos::id& subscription_id, const nmos::resources& resources)
{
using web::json::value;
value data;
data[nmos::fields::id] = value::string(id);
data[nmos::fields::subscription_id] = value::string(subscription_id);
data[nmos::fields::message] = details::make_grain(nmos::make_id(), subscription_id, U("/"));
nmos::fields::message_grain_data(data) = make_resource_events(resources, nmos::is04_versions::v1_2, U(""), value::object());
return{ nmos::is04_versions::v1_2, nmos::types::grain, data, true };
}
web::uri make_registration_uri_with_no_version(const nmos::settings& settings)
{
// scheme, host and port should come from the mDNS record for the registry's Registration API (via settings)
// version should be the the highest version supported by both this node and the registry
return web::uri_builder()
.set_scheme(U("http"))
.set_host(nmos::fields::registry_address(settings))
.set_port(nmos::fields::registration_port(settings))
.set_path(U("/x-nmos/registration/"))
.to_uri();
}
// make a POST or DELETE request on the Registration API specified by the client for the specified resource event
// this could be made asynchronous, returning pplx::task<web::http::http_response>
// however, the logging gateway would need to be passed out of scope to a continuation to perform the response logging
void request_registration(web::http::client::http_client& client, const nmos::api_version& registry_version, const web::json::value& event, slog::base_gate& gate)
{
const auto& path = event.at(U("path")).as_string();
const auto& data = event.at(U("post"));
auto slash = path.find('/'); // assert utility::string_t::npos != slash
nmos::type type = nmos::type_from_resourceType(path.substr(0, slash));
nmos::id id = path.substr(slash + 1);
if (!data.is_null())
{
// 'create' or 'update'
const bool creation = event.at(U("pre")) == data;
slog::log<slog::severities::info>(gate, SLOG_FLF) << "Requesting registration " << (creation ? "creation" : "update") << " for " << type.name << ": " << id;
// a downgrade is required if the registry version is lower than this resource's version
auto body = web::json::value_of(
{
{ U("type"), web::json::value::string(type.name) },
{ U("data"), nmos::downgrade(nmos::is04_versions::v1_2, type, data, registry_version, registry_version) }
});
// block and wait for the response
// No trailing slash on the URL
// See https://github.com/AMWA-TV/nmos-discovery-registration/issues/15
auto response = client.request(web::http::methods::POST, make_api_version(registry_version) + U("/resource"), body).get();
if (web::http::status_codes::OK == response.status_code())
slog::log<slog::severities::more_info>(gate, SLOG_FLF) << "Registration updated for " << type.name << ": " << id;
else if (web::http::status_codes::Created == response.status_code())
slog::log<slog::severities::more_info>(gate, SLOG_FLF) << "Registration created for " << type.name << ": " << id;
else if (web::http::is_error_status_code(response.status_code()))
slog::log<slog::severities::error>(gate, SLOG_FLF) << "Registration " << (creation ? "creation" : "update") << " rejected for " << type.name << ": " << id;
}
else
{
// 'delete'
slog::log<slog::severities::info>(gate, SLOG_FLF) << "Requesting registration deletion for " << type.name << ": " << id;
// block and wait for the response
auto response = client.request(web::http::methods::DEL, make_api_version(registry_version) + U("/resource/") + path).get();
if (web::http::status_codes::NoContent == response.status_code())
slog::log<slog::severities::more_info>(gate, SLOG_FLF) << "Registration deleted for " << type.name << ": " << id;
else if (web::http::is_error_status_code(response.status_code()))
slog::log<slog::severities::error>(gate, SLOG_FLF) << "Registration deletion rejected for " << type.name << ": " << id;
}
}
void update_node_health(web::http::client::http_client& client, const nmos::api_version& registry_version, const nmos::id& id, slog::base_gate& gate)
{
slog::log<slog::severities::too_much_info>(gate, SLOG_FLF) << "Posting heartbeat for node " << id;
// block and wait for the response
auto response = client.request(web::http::methods::POST, make_api_version(registry_version) + U("/health/nodes/") + id).get();
// Check response to see if re-registration is required!
if (web::http::status_codes::NotFound == response.status_code())
slog::log<slog::severities::error>(gate, SLOG_FLF) << "Registration not found for node: " << id;
}
}
void node_registration_thread(nmos::model& model, nmos::mutex& mutex, nmos::condition_variable& condition, bool& shutdown, slog::base_gate& gate)
{
using utility::string_t;
using web::json::value;
// could start out as a shared/read lock, only upgraded to an exclusive/write lock when a grain in the resources is actually modified
nmos::write_lock lock(mutex);
const auto base_uri = details::make_registration_uri_with_no_version(model.settings);
const auto registry_version = nmos::parse_api_version(nmos::fields::registry_version(model.settings));
web::http::client::http_client client(base_uri);
tai most_recent_message{};
auto earliest_necessary_update = (tai_clock::time_point::max)();
auto subscription_id = nmos::make_id();
auto grain_id = nmos::make_id();
const auto subscription = insert_resource(model.resources, details::make_node_registration_subscription(subscription_id)).first;
const auto grain = insert_resource(model.resources, details::make_node_registration_grain(grain_id, subscription_id, model.resources)).first;
for (;;)
{
// wait for the thread to be interrupted either because there are resource changes, or because the server is being shut down
// or because message sending was throttled earlier
details::wait_until(condition, lock, earliest_necessary_update, [&]{ return shutdown || most_recent_message < most_recent_update(model.resources); });
if (shutdown) break;
most_recent_message = most_recent_update(model.resources);
slog::log<slog::severities::too_much_info>(gate, SLOG_FLF) << "Got notification on node registration thread";
const auto now = tai_clock::now();
earliest_necessary_update = (tai_clock::time_point::max)();
// if the grain has events to send
if (0 == nmos::fields::message_grain_data(grain->data).size()) continue;
// throttle messages according to the subscription's max_update_rate_ms
const auto max_update_rate = std::chrono::milliseconds(nmos::fields::max_update_rate_ms(subscription->data));
const auto earliest_allowed_update = time_point_from_tai(details::get_grain_timestamp(nmos::fields::message(grain->data))) + max_update_rate;
if (earliest_allowed_update > now)
{
// make sure to send a message as soon as allowed
if (earliest_allowed_update < earliest_necessary_update)
{
earliest_necessary_update = earliest_allowed_update;
}
// just don't do it now!
continue;
}
slog::log<slog::severities::info>(gate, SLOG_FLF) << "Sending " << nmos::fields::message_grain_data(grain->data).size() << " changes to the Registration API";
value events = value::array();
// set the grain timestamp
// steal the events
// reset the grain for next time
model.resources.modify(grain, [&most_recent_message, &events, &model](nmos::resource& grain)
{
details::set_grain_timestamp(nmos::fields::message(grain.data), most_recent_message);
using std::swap;
swap(events, nmos::fields::message_grain_data(grain.data));
grain.updated = strictly_increasing_update(model.resources);
});
// this would be the place to handle e.g. the registration uri in the settings having changed...
// issue the registration requests, without the lock on the resources and settings
details::reverse_lock_guard<nmos::write_lock> unlock{ lock };
for (auto& event : events.as_array())
{
// need to implement specified handling of error conditions
// see https://github.com/AMWA-TV/nmos-discovery-registration/blob/v1.2.x/docs/4.1.%20Behaviour%20-%20Registration.md#error-conditions
// for the moment, just log http exceptions...
try
{
details::request_registration(client, registry_version, event, gate);
const auto& path = event.at(U("path")).as_string();
auto slash = path.find('/'); // assert utility::string_t::npos != slash
nmos::type type = nmos::type_from_resourceType(path.substr(0, slash));
nmos::id id = path.substr(slash + 1);
if (nmos::types::node == type)
{
// set the health of the node, to trigger the heartbeat thread
nmos::read_lock lock(mutex);
set_resource_health(model.resources, id);
}
}
catch (const web::http::http_exception& e)
{
slog::log<slog::severities::error>(gate, SLOG_FLF) << e.what() << " [" << e.error_code() << "]";
}
}
}
}
void node_registration_heartbeat_thread(const nmos::model& model, nmos::mutex& mutex, nmos::condition_variable& condition, bool& shutdown, slog::base_gate& gate)
{
// since health is mutable, no need to get an exclusive/write lock
nmos::read_lock lock(mutex);
const auto base_uri = details::make_registration_uri_with_no_version(model.settings);
const auto registry_version = nmos::parse_api_version(nmos::fields::registry_version(model.settings));
web::http::client::http_client client(base_uri);
auto resource = nmos::find_self_resource(model.resources);
auto self_health = model.resources.end() == resource || nmos::health_forever == resource->health ? health_now() : resource->health.load();
// wait until the next node heartbeat, or the server is being shut down
while (!condition.wait_until(lock, time_point_from_health(self_health + nmos::fields::registration_heartbeat_interval(model.settings)), [&]{ return shutdown; }))
{
auto heartbeat_health = health_now() - nmos::fields::registration_heartbeat_interval(model.settings);
resource = nmos::find_self_resource(model.resources);
self_health = model.resources.end() == resource || nmos::health_forever == resource->health ? health_now() : resource->health.load();
if (self_health > heartbeat_health) continue;
const auto id = resource->id;
set_resource_health(model.resources, id);
// this would be the place to handle e.g. the registration uri in the settings having changed...
// issue the registration heartbeat, without the lock on the resources and settings
details::reverse_lock_guard<nmos::read_lock> unlock{ lock };
// need to implement specified handling of error conditions
// see https://github.com/AMWA-TV/nmos-discovery-registration/blob/v1.2.x/docs/4.1.%20Behaviour%20-%20Registration.md#error-conditions
try
{
details::update_node_health(client, registry_version, id, gate);
}
catch (const web::http::http_exception& e)
{
slog::log<slog::severities::error>(gate, SLOG_FLF) << e.what() << " [" << e.error_code() << "]";
}
}
}
}
|
<filename>src/config.js<gh_stars>1-10
export default {
version: '0.1',
coinList: [
{
symbol: 'BTC',
name: 'Bitcoin',
desc:
'<span>比特币,创新的支付网络,一种新的货币。<a href="https://bitcoin.org/zh_CN/" target="_blank">bitcoin.org</a></span>',
type: 'base',
network: ['bitcoin'],
},
{
symbol: 'ETH',
name: 'Ethereum',
desc:
'<span>以太坊,智能合约区块链平台。<a href="https://www.ethereum.org/" target="_blank">ethereum.org</a></span>',
type: 'base',
network: ['homestead', 'rinkeby', 'ropsten', 'kovan', 'goerli'],
},
{
symbol: 'BTC|TEST',
name: '<NAME>',
desc: '<span>比特币测试网络,开发人员用于测试。',
type: 'base',
network: ['testnet'],
},
{
symbol: 'WGT',
name: 'W3cGroup',
desc:
'<span>W3C小组,区块链项目技术社群。<a href="https://w3c.group" target="_blank">w3c.group</a></span>',
type: 'erc20', // 每个erc20间隔10
tokenAddr: '<KEY>',
start: 10,
network: ['ropsten'],
},
],
abi: [
{
constant: true,
inputs: [],
name: 'name',
outputs: [
{
name: '',
type: 'string',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
constant: false,
inputs: [
{
name: '_spender',
type: 'address',
},
{
name: '_value',
type: 'uint256',
},
],
name: 'approve',
outputs: [
{
name: 'success',
type: 'bool',
},
],
payable: false,
stateMutability: 'nonpayable',
type: 'function',
},
{
constant: true,
inputs: [],
name: 'totalSupply',
outputs: [
{
name: '',
type: 'uint256',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
constant: false,
inputs: [
{
name: '_from',
type: 'address',
},
{
name: '_to',
type: 'address',
},
{
name: '_value',
type: 'uint256',
},
],
name: 'transferFrom',
outputs: [
{
name: 'success',
type: 'bool',
},
],
payable: false,
stateMutability: 'nonpayable',
type: 'function',
},
{
constant: true,
inputs: [],
name: 'decimals',
outputs: [
{
name: '',
type: 'uint8',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
constant: true,
inputs: [],
name: 'standard',
outputs: [
{
name: '',
type: 'string',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
constant: true,
inputs: [
{
name: '',
type: 'address',
},
],
name: 'balanceOf',
outputs: [
{
name: '',
type: 'uint256',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
constant: true,
inputs: [],
name: 'symbol',
outputs: [
{
name: '',
type: 'string',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
constant: false,
inputs: [
{
name: '_to',
type: 'address',
},
{
name: '_value',
type: 'uint256',
},
],
name: 'transfer',
outputs: [],
payable: false,
stateMutability: 'nonpayable',
type: 'function',
},
{
constant: true,
inputs: [
{
name: '',
type: 'address',
},
{
name: '',
type: 'address',
},
],
name: 'allowance',
outputs: [
{
name: '',
type: 'uint256',
},
],
payable: false,
stateMutability: 'view',
type: 'function',
},
{
inputs: [],
payable: false,
stateMutability: 'nonpayable',
type: 'constructor',
},
{
anonymous: false,
inputs: [
{
indexed: true,
name: 'from',
type: 'address',
},
{
indexed: true,
name: 'to',
type: 'address',
},
{
indexed: false,
name: 'value',
type: 'uint256',
},
],
name: 'Transfer',
type: 'event',
},
],
};
|
function knapsack(items, capacity) {
let numOfItems = items.length;
let knapsackMatrix = [];
// Create empty knapsack matrix with row = number of items + 1 and column = max capacity + 1
for (let i = 0; i <= numOfItems; i++) {
knapsackMatrix.push(new Array(capacity + 1).fill(0));
}
// Build the knapsack table
for (let i = 1; i <= numOfItems; i++) {
let currentItem = items[i - 1];
for (let j = 0; j <= capacity; j++) {
// If current item weight is greater than the knapsack capacity, ...
// ignore the current item by copying results from the previous item
if (currentItem.weight > j) {
knapsackMatrix[i][j] = knapsackMatrix[i - 1][j];
} else { // Otherwise, ...
// decide on max value between including the current item and not including the current item
let maxValue = Math.max(
knapsackMatrix[i - 1][j],
knapsackMatrix[i - 1][j - currentItem.weight] + currentItem.value
);
knapsackMatrix[i][j] = maxValue;
}
}
}
// Build the result
let result = [];
let i = numOfItems;
let k = capacity;
while (i > 0 && k > 0) {
if (knapsackMatrix[i][k] === knapsackMatrix[i - 1][k]) {
i--;
} else {
result.push(i - 1);
i--;
k = k - items[i].weight;
}
}
return result;
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-STWS/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-STWS/1024+0+512-N-VB-fill-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_two_thirds_full --eval_function last_element_eval |
package com.nolanlawson.keepscore.helper;
import android.content.Context;
import com.nolanlawson.keepscore.R;
/**
* A user's preferred orientation in the settings. In the future we may also add something like "Sensor" or "Automatic"
* @author nolan
*
*/
public enum Orientation {
Landsdcape(R.string.CONSTANT_pref_orientation_choice_landscape),
Portrait(R.string.CONSTANT_pref_orientation_choice_portrait),
;
private int prefResId;
private Orientation(int prefResId) {
this.prefResId = prefResId;
}
public int getPrefResId() {
return prefResId;
}
public static Orientation fromString(String str, Context context) {
for (Orientation orientation : values()) {
if (context.getString(orientation.getPrefResId()).equals(str)) {
return orientation;
}
}
return null;
}
}
|
class Node:
def __init__(self, value):
self.value = value
self.next = None
class LinkedList:
def __init__(self):
self.__head = None
def add_node(self, value):
new_node = Node(value)
if self.__head is None:
self.__head = new_node
else:
current = self.__head
while current.next:
current = current.next
current.next = new_node
def __cmp__(self, other):
len_comp = cmp(len(self.__head), len(other.__head))
if len_comp != 0:
return len_comp
if len(self.__head) > 0:
value_comp = cmp(self.__head[-1], other.__head[-1])
if value_comp != 0:
return value_comp
elif len(other.__head) > len(self.__head):
return -other.__cmp__(self) |
#! /usr/bin/env bash
apt-get update
apt-get install -f
{{#has deployment.language 'NODE'}}
wget -qO- "https://deb.nodesource.com/setup_8.x" | bash -;
apt-get install -y nodejs
{{/has}}
{{#has deployment.language 'PYTHON'}}
apt-get install -y python3-pip
{{/has}}
{{#has deployment.language 'DJANGO'}}
wget --no-check-certificate https://www.python.org/ftp/python/2.7.11/Python-2.7.11.tgz
tar -xzf Python-2.7.11.tgz
{{/has}}
{{#has deployment.language 'SWIFT'}}
apt-get install -y libatomic1 libpython2.7 libcurl4-openssl-dev
mkdir /opt/swift
cd /opt/swift
wget --no-check-certificate https://swift.org/builds/swift-4.1.2-release/ubuntu1404/swift-4.1.2-RELEASE/swift-4.1.2-RELEASE-ubuntu14.04.tar.gz
tar -xzf swift-4.1.2-RELEASE-ubuntu14.04.tar.gz
if ! grep -q "swift-4.1.2" ~/.profile; then echo "PATH=\"/opt/swift/swift-4.1.2-RELEASE-ubuntu14.04/usr/bin:$PATH\"" >> ~/.profile; fi;
chmod -R 755 /opt/swift/swift-4.1.2-RELEASE-ubuntu14.04/usr/lib/swift/
touch /etc/ld.so.conf.d/swift.conf
ls /opt/swift/swift-4.1.2-RELEASE-ubuntu14.04/usr/lib/swift/linux
if ! grep -q "/opt/swift/swift-4.1.2-RELEASE-ubuntu14.04/usr/lib/swift/linux" /etc/ld.so.conf.d/swift.conf; then echo "/opt/swift/swift-4.1.2-RELEASE-ubuntu14.04/usr/lib/swift/linux" >> /etc/ld.so.conf.d/swift.conf; fi;
ldconfig
cd -
{{/has}}
{{#has deployment.language 'SPRING'}}
echo "deb http://http.debian.net/debian jessie-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y -t jessie-backports openjdk-8-jre
{{/has}}
{{#has deployment.language 'JAVA'}}
echo "deb http://http.debian.net/debian jessie-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y -t jessie-backports openjdk-8-jre unzip
{{/has}}
|
// SPDX-License-Identifier: MIT
#include "stb_sprintf.h" /* libxml2 */
#include <SDL.h>
#include <tmx.h>
#include "core.h"
static void tmxlib_store_property(tmx_property* property, void* core);
Sint32 get_first_gid(tmx_map* tiled_map)
{
return (Sint32)tiled_map->ts_head->firstgid;
}
tmx_layer* get_head_layer(tmx_map* tiled_map)
{
return tiled_map->ly_head;
}
SDL_bool is_tiled_layer_of_type(const enum tmx_layer_type tiled_type, tmx_layer* tiled_layer)
{
if (tiled_type == tiled_layer->type)
{
return SDL_TRUE;
}
return SDL_FALSE;
}
tmx_object* get_head_object(tmx_layer* tiled_layer, core_t* core)
{
if (is_tiled_layer_of_type(L_OBJGR, tiled_layer))
{
return tiled_layer->content.objgr->head;
}
return NULL;
}
tmx_tileset* get_head_tileset(tmx_map* tiled_map)
{
Sint32 first_gid = get_first_gid(tiled_map);
return tiled_map->tiles[first_gid]->tileset;
}
Sint32* get_layer_content(tmx_layer* tiled_layer)
{
return (Sint32*)tiled_layer->content.gids;
}
const char* get_layer_name(tmx_layer* tiled_layer)
{
return tiled_layer->name;
}
Sint32 get_layer_property_count(tmx_layer* tiled_layer)
{
(void)tiled_layer;
return 0;
}
Sint32 get_local_id(Sint32 gid, tmx_map* tiled_map)
{
(void)tiled_map;
return gid;
}
Sint32 get_map_property_count(tmx_map* tiled_map)
{
(void)tiled_map;
return 0;
}
Sint32 get_next_animated_tile_id(Sint32 gid, Sint32 current_frame, tmx_map* tiled_map)
{
return (Sint32)tiled_map->tiles[gid]->animation[current_frame].tile_id;
return 0;
}
const char* get_object_name(tmx_object* tiled_object)
{
return tiled_object->name;
}
Sint32 get_object_property_count(tmx_object* tiled_object)
{
(void)tiled_object;
return 0;
}
const char* get_object_type_name(tmx_object* tiled_object)
{
return tiled_object->type;
}
Sint32 get_tile_height(tmx_map* tiled_map)
{
Sint32 first_gid = get_first_gid(tiled_map);
return (Sint32)tiled_map->tiles[first_gid]->tileset->tile_height;
}
void get_tile_position(Sint32 gid, Sint32* pos_x, Sint32* pos_y, tmx_map* tiled_map)
{
*pos_x = (Sint32)tiled_map->tiles[gid]->ul_x;
*pos_y = (Sint32)tiled_map->tiles[gid]->ul_y;
}
Sint32 get_tile_property_count(tmx_tile* tiled_tile)
{
(void)tiled_tile;
return 0;
}
Sint32 get_tile_width(tmx_map* tiled_map)
{
Sint32 first_gid = get_first_gid(tiled_map);
return (Sint32)tiled_map->tiles[first_gid]->tileset->tile_width;
}
void set_tileset_path(char* path_name, Sint32 path_length, core_t* core)
{
Sint32 first_gid = get_first_gid(core->map->handle);
char ts_path[64] = { 0 };
size_t ts_path_length = 0;
cwk_path_get_dirname(core->map->handle->ts_head->source, &ts_path_length);
if (63 <= ts_path_length)
{
ts_path_length = 63;
}
/* The tileset image source is stored relatively to the tileset
* file but because we only know the location of the tileset
* file relatively to the map file, we need to adjust the path
* accordingly. It's a hack, but it works.
*/
SDL_strlcpy(ts_path, core->map->handle->ts_head->source, ts_path_length + 1);
stbsp_snprintf(path_name, (Sint32)path_length, "E:\\%s%s%s",
core->map->path,
ts_path,
core->map->handle->tiles[first_gid]->tileset->image->source);
}
Sint32 get_tileset_path_length(core_t* core)
{
Sint32 path_length = 0;
Sint32 first_gid = get_first_gid(core->map->handle);
size_t ts_path_length = strlen(core->map->handle->ts_head->source);
path_length += (Sint32)SDL_strlen(core->map->path);
path_length += strlen(core->map->handle->tiles[first_gid]->tileset->image->source);
path_length += (Sint32)ts_path_length + 1;
return path_length;
}
SDL_bool is_gid_valid(Sint32 gid, tmx_map* tiled_map)
{
if (tiled_map->tiles[gid])
{
return SDL_TRUE;
}
return SDL_FALSE;
}
SDL_bool is_tile_animated(Sint32 gid, Sint32* animation_length, Sint32* id, tmx_map* tiled_map)
{
Sint32 local_id = get_local_id(gid, tiled_map);
if (tiled_map->tiles[local_id])
{
if (tiled_map->tiles[local_id]->animation)
{
if (animation_length)
{
*animation_length = (Sint32)tiled_map->tiles[local_id]->animation_len;
}
if (id)
{
*id = (Sint32)tiled_map->tiles[local_id]->animation[0].tile_id;
}
return SDL_TRUE;
}
}
return SDL_FALSE;
}
/* djb2 by <NAME>
* http://www.cse.yorku.ca/~oz/hash.html
*/
Uint64 generate_hash(const unsigned char* name)
{
Uint64 hash = 5381;
Uint32 c;
while ((c = *name++))
{
hash = ((hash << 5) + hash) + c;
}
return hash;
}
void load_property(const Uint64 name_hash, tmx_property* properties, Sint32 property_count, core_t* core)
{
(void)property_count;
core->map->hash_query = name_hash;
tmx_property_foreach(properties, tmxlib_store_property, (void*)core);
}
status_t load_tiled_map(const char* map_file_name, core_t* core)
{
FILE* fp = fopen(map_file_name, "r");
if (fp)
{
fclose(fp);
}
else
{
dbgprint("%s: %s not found.", FUNCTION_NAME, map_file_name);
return CORE_WARNING;
}
core->map->handle = (tmx_map*)tmx_load(map_file_name);
if (! core->map->handle)
{
dbgprint("%s: %s.", FUNCTION_NAME, tmx_strerr());
return CORE_WARNING;
}
return CORE_OK;
}
Sint32 remove_gid_flip_bits(Sint32 gid)
{
return gid & TMX_FLIP_BITS_REMOVAL;
}
SDL_bool tile_has_properties(Sint32 gid, tmx_tile** tile, tmx_map* tiled_map)
{
Sint32 local_id;
return SDL_TRUE;
}
void unload_tiled_map(core_t* core)
{
if (core->map->handle)
{
tmx_map_free(core->map->handle);
}
}
SDL_bool is_map_loaded(core_t* core)
{
if (core->is_map_loaded)
{
return SDL_TRUE;
}
return SDL_FALSE;
}
SDL_bool get_boolean_map_property(const Uint64 name_hash, core_t* core)
{
Sint32 prop_cnt;
if (! is_map_loaded(core))
{
return SDL_FALSE;
}
prop_cnt = get_map_property_count(core->map->handle);
core->map->boolean_property = SDL_FALSE;
load_property(name_hash, core->map->handle->properties, prop_cnt, core);
return core->map->boolean_property;
}
double get_decimal_map_property(const Uint64 name_hash, core_t* core)
{
Sint32 prop_cnt;
if (! is_map_loaded(core))
{
return 0.0;
}
prop_cnt = get_map_property_count(core->map->handle);
core->map->decimal_property = 0.0;
load_property(name_hash, core->map->handle->properties, prop_cnt, core);
return core->map->decimal_property;
}
Sint32 get_integer_map_property(const Uint64 name_hash, core_t* core)
{
Sint32 prop_cnt;
if (! is_map_loaded(core))
{
return 0;
}
prop_cnt = get_map_property_count(core->map->handle);
core->map->integer_property = 0;
load_property(name_hash, core->map->handle->properties, prop_cnt, core);
return core->map->integer_property;
}
const char* get_string_map_property(const Uint64 name_hash, core_t* core)
{
Sint32 prop_cnt;
if (! is_map_loaded(core))
{
return NULL;
}
prop_cnt = get_map_property_count(core->map->handle);
core->map->string_property = NULL;
load_property(name_hash, core->map->handle->properties, prop_cnt, core);
return core->map->string_property;
}
status_t load_map_path(const char* map_file_name, core_t* core)
{
core->map->path = (char*)calloc(1, (size_t)(strlen(map_file_name) + 1));
if (! core->map->path)
{
dbgprint("%s: error allocating memory.", FUNCTION_NAME);
return CORE_ERROR;
}
cwk_path_get_dirname(map_file_name, (size_t*)&(core->map->path_length));
SDL_strlcpy(core->map->path, map_file_name, core->map->path_length + 1);
return CORE_OK;
}
status_t load_texture_from_file(const char* file_name, SDL_Texture** texture, core_t* core)
{
SDL_Surface* surface;
if (! file_name)
{
return CORE_WARNING;
}
surface = SDL_LoadBMP(file_name);
if (NULL == surface)
{
dbgprint("Failed to load image: %s", SDL_GetError());
return CORE_ERROR;
}
if (0 != SDL_SetColorKey(surface, SDL_TRUE, SDL_MapRGB(surface->format, 0xff, 0x00, 0xff)))
{
dbgprint("Failed to set color key for %s: %s", file_name, SDL_GetError());
}
*texture = SDL_CreateTextureFromSurface(core->renderer, surface);
if (NULL == *texture)
{
dbgprint("Could not create texture from surface: %s", SDL_GetError());
SDL_FreeSurface(surface);
return CORE_ERROR;
}
SDL_FreeSurface(surface);
dbgprint("Loading image from file: %s.", file_name);
return CORE_OK;
}
status_t load_tileset(core_t* core)
{
status_t status = CORE_OK;
char* image_path = NULL;
Sint32 path_length = get_tileset_path_length(core);
image_path = (char*)calloc(1, path_length);
if (! image_path)
{
dbgprint("%s: error allocating memory.", FUNCTION_NAME);
return CORE_ERROR;
}
set_tileset_path(image_path, path_length, core);
if (CORE_OK != load_texture_from_file(image_path, &core->map->tileset_texture, core))
{
dbgprint("%s: Error loading image '%s'.", FUNCTION_NAME, image_path);
status = CORE_ERROR;
}
free(image_path);
return status;
}
status_t load_animated_tiles(core_t* core)
{
tmx_layer* layer = get_head_layer(core->map->handle);
Sint32 animated_tile_count = 0;
Sint32 index_height = 0;
Sint32 index_width = 0;
while (layer)
{
if (is_tiled_layer_of_type(L_LAYER, layer) && layer->visible)
{
for (index_height = 0; index_height < (Sint32)core->map->handle->height; index_height += 1)
{
for (index_width = 0; index_width < (Sint32)core->map->handle->width; index_width += 1)
{
Sint32* layer_content = get_layer_content(layer);
Sint32 gid = remove_gid_flip_bits((Sint32)layer_content[(index_height * (Sint32)core->map->handle->width) + index_width]);
if (is_tile_animated(gid, NULL, NULL, core->map->handle))
{
animated_tile_count += 1;
}
}
}
}
layer = layer->next;
}
if (0 >= animated_tile_count)
{
return CORE_OK;
}
else
{
core->map->animated_tile = (animated_tile_t*)calloc((size_t)animated_tile_count, sizeof(struct animated_tile));
if (!core->map->animated_tile)
{
dbgprint("%s: error allocating memory.", FUNCTION_NAME);
return CORE_ERROR;
}
}
dbgprint("Load %u animated tile(s).", animated_tile_count);
return CORE_OK;
}
status_t create_and_set_render_target(SDL_Texture** target, core_t* core)
{
if (! (*target))
{
(*target) = SDL_CreateTexture(
core->renderer,
SDL_PIXELFORMAT_RGB444,
SDL_TEXTUREACCESS_TARGET,
176,
208);
}
if (! (*target))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
else
{
if (0 > SDL_SetTextureBlendMode((*target), SDL_BLENDMODE_BLEND))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
SDL_DestroyTexture((*target));
return CORE_ERROR;
}
}
if (0 > SDL_SetRenderTarget(core->renderer, (*target)))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
SDL_DestroyTexture((*target));
return CORE_ERROR;
}
SDL_RenderClear(core->renderer);
return CORE_OK;
}
SDL_bool get_boolean_property(const Uint64 name_hash, tmx_property* properties, Sint32 property_count, core_t* core)
{
core->map->boolean_property = SDL_FALSE;
load_property(name_hash, properties, property_count, core);
return core->map->boolean_property;
}
double get_decimal_property(const Uint64 name_hash, tmx_property* properties, Sint32 property_count, core_t* core)
{
core->map->decimal_property = 0.0;
load_property(name_hash, properties, property_count, core);
return core->map->decimal_property;
}
int32_t get_integer_property(const Uint64 name_hash, tmx_property* properties, Sint32 property_count, core_t* core)
{
core->map->integer_property = 0;
load_property(name_hash, properties, property_count, core);
return core->map->integer_property;
}
const char* get_string_property(const Uint64 name_hash, tmx_property* properties, Sint32 property_count, core_t* core)
{
core->map->string_property = NULL;
load_property(name_hash, properties, property_count, core);
return core->map->string_property;
}
status_t render_map(Sint32 level, core_t* core)
{
tmx_layer* layer;
SDL_bool render_animated_tiles = SDL_FALSE;
render_layer render_layer = RENDER_MAP_FG;
Sint32 index;
if (! core->is_map_loaded)
{
return CORE_OK;
}
layer = get_head_layer(core->map->handle);
if (level >= MAP_LAYER_MAX)
{
dbgprint("%s: invalid layer level selected.", FUNCTION_NAME);
return CORE_ERROR;
}
if (MAP_LAYER_BG == level)
{
render_layer = RENDER_MAP_BG;
if (0 < core->map->animated_tile_fps)
{
render_animated_tiles = SDL_TRUE;
}
}
if (CORE_OK != create_and_set_render_target(&core->map->render_target[render_layer], core))
{
return CORE_ERROR;
}
// Update and render animated tiles.
core->map->time_since_last_anim_frame += core->time_since_last_frame;
if (0 < core->map->animated_tile_index &&
core->map->time_since_last_anim_frame >= 1000 / (Sint32)(core->map->animated_tile_fps) && render_animated_tiles)
{
core->map->time_since_last_anim_frame = 0;
/* Remark: animated tiles are always rendered in the background
* layer.
*/
if (! core->map->animated_tile_texture)
{
core->map->animated_tile_texture = SDL_CreateTexture(
core->renderer,
SDL_PIXELFORMAT_RGB444,
SDL_TEXTUREACCESS_TARGET,
(Sint32)(core->map->width),
(Sint32)(core->map->height));
}
if (! core->map->animated_tile_texture)
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
if (0 > SDL_SetRenderTarget(core->renderer, core->map->animated_tile_texture))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
SDL_RenderClear(core->renderer);
for (index = 0; core->map->animated_tile_index > index; index += 1)
{
tmx_tileset* tileset;
Sint32 gid = core->map->animated_tile[index].gid;
Sint32 next_tile_id = 0;
Sint32 local_id;
SDL_Rect dst;
SDL_Rect src;
local_id = core->map->animated_tile[index].id + 1;
tileset = get_head_tileset(core->map->handle);
src.w = dst.w = get_tile_width(core->map->handle);
src.h = dst.h = get_tile_height(core->map->handle);
dst.x = core->map->animated_tile[index].dst_x;
dst.y = core->map->animated_tile[index].dst_y;
get_tile_position(local_id, (Uint32*)&src.x, (Uint32*)&src.y, core->map->handle);
if (0 > SDL_RenderCopy(core->renderer, core->map->tileset_texture, &src, &dst))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
core->map->animated_tile[index].current_frame += 1;
if (core->map->animated_tile[index].current_frame >= core->map->animated_tile[index].animation_length)
{
core->map->animated_tile[index].current_frame = 0;
}
next_tile_id = get_next_animated_tile_id(gid, core->map->animated_tile[index].current_frame, core->map->handle);
core->map->animated_tile[index].id = next_tile_id;
}
if (0 > SDL_SetRenderTarget(core->renderer, core->map->render_target[render_layer]))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
SDL_RenderClear(core->renderer);
if (0 > SDL_SetTextureBlendMode(core->map->animated_tile_texture, SDL_BLENDMODE_BLEND))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
}
// Texture has already been rendered.
if (core->map->layer_texture[level])
{
Sint32 render_pos_x = core->map->pos_x - core->camera.pos_x;
Sint32 render_pos_y = core->map->pos_y - core->camera.pos_y;
SDL_Rect dst = {
(Sint32)render_pos_x,
(Sint32)render_pos_y,
(Sint32)core->map->width,
(Sint32)core->map->height
};
if (0 > SDL_RenderCopyEx(core->renderer, core->map->layer_texture[level], NULL, &dst, 0, NULL, SDL_FLIP_NONE))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
/*
if (render_animated_tiles)
{
if (core->map->animated_tile_texture)
{
if (0 > SDL_RenderCopyEx(core->renderer, core->map->animated_tile_texture, NULL, &dst, 0, NULL, SDL_FLIP_NONE))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
}
}
*/
return CORE_OK;
}
// Texture does not yet exist. Render it!
core->map->layer_texture[level] = SDL_CreateTexture(
core->renderer,
SDL_PIXELFORMAT_RGB444,
SDL_TEXTUREACCESS_TARGET,
(Sint32)core->map->width,
(Sint32)core->map->height);
if (! core->map->layer_texture[level])
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
if (0 > SDL_SetRenderTarget(core->renderer, core->map->layer_texture[level]))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
SDL_RenderClear(core->renderer);
while (layer)
{
SDL_Rect dst;
SDL_Rect src;
if (is_tiled_layer_of_type(L_LAYER, layer))
{
Sint32 prop_cnt = get_layer_property_count(layer);
if (layer->visible)
{
Sint32 index_height;
Sint32 index_width;
for (index_height = 0; index_height < (Sint32)core->map->handle->height; index_height += 1)
{
for (index_width = 0; index_width < (Sint32)core->map->handle->width; index_width += 1)
{
Sint32* layer_content = get_layer_content(layer);
Sint32 gid = remove_gid_flip_bits((Sint32)layer_content[(index_height * (Sint32)core->map->handle->width) + index_width]);
Sint32 local_id = gid - get_first_gid(core->map->handle);
if (is_gid_valid(gid, core->map->handle))
{
tmx_tileset* tileset = get_head_tileset(core->map->handle);
src.w = dst.w = get_tile_width(core->map->handle);
src.h = dst.h = get_tile_height(core->map->handle);
dst.x = (Sint32)(index_width * get_tile_width(core->map->handle));
dst.y = (Sint32)(index_height * get_tile_height(core->map->handle));
get_tile_position(gid, (Uint32*)&src.x, (Uint32*)&src.y, core->map->handle);
SDL_RenderCopy(core->renderer, core->map->tileset_texture, &src, &dst);
if (render_animated_tiles)
{
Sint32 animation_length = 0;
Sint32 id = 0;
if (is_tile_animated(gid, &animation_length, &id, core->map->handle))
{
core->map->animated_tile[core->map->animated_tile_index].gid = get_local_id(gid, core->map->handle);
core->map->animated_tile[core->map->animated_tile_index].id = id;
core->map->animated_tile[core->map->animated_tile_index].dst_x = dst.x;
core->map->animated_tile[core->map->animated_tile_index].dst_y = dst.y;
core->map->animated_tile[core->map->animated_tile_index].current_frame = 0;
core->map->animated_tile[core->map->animated_tile_index].animation_length = animation_length;
core->map->animated_tile_index += 1;
}
}
}
}
}
{
const char* layer_name = get_layer_name(layer);
dbgprint("Render map layer: %s", layer_name);
}
}
}
layer = layer->next;
}
if (0 > SDL_SetRenderTarget(core->renderer, core->map->render_target[render_layer]))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
if (0 > SDL_SetTextureBlendMode(core->map->layer_texture[level], SDL_BLENDMODE_BLEND))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
return CORE_OK;
}
status_t render_scene(core_t* core)
{
status_t status = CORE_OK;
Sint32 index;
for (index = 0; index < MAP_LAYER_MAX; index += 1)
{
status = render_map(index, core);
if (CORE_OK != status)
{
return status;
}
}
return status;
}
status_t draw_scene(core_t* core)
{
SDL_Rect dst;
Sint32 index;
if (0 > SDL_SetRenderTarget(core->renderer, NULL))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
}
if (! core->is_map_loaded)
{
SDL_SetRenderDrawColor(core->renderer, 0x00, 0x00, 0x00, 0x00);
SDL_RenderPresent(core->renderer);
SDL_RenderClear(core->renderer);
return CORE_OK;
}
dst.x = 0;
dst.y = 0;
dst.w = 176;
dst.h = 208;
for (index = 0; index < RENDER_LAYER_MAX; index += 1)
{
if (0 > SDL_RenderCopy(core->renderer, core->map->render_target[index], NULL, &dst))
{
dbgprint("%s: %s.", FUNCTION_NAME, SDL_GetError());
return CORE_ERROR;
}
}
SDL_RenderPresent(core->renderer);
SDL_RenderClear(core->renderer);
return CORE_OK;
}
static void tmxlib_store_property(tmx_property* property, void* core)
{
core_t* core_ptr = core;
if (core_ptr->map->hash_query == generate_hash((const unsigned char*)property->name))
{
switch (property->type)
{
case PT_COLOR:
case PT_NONE:
break;
case PT_BOOL:
dbgprint("Loading boolean property '%s': %u", property->name, property->value.boolean);
core_ptr->map->boolean_property = (SDL_bool)property->value.boolean;
break;
case PT_FILE:
dbgprint("Loading string property '%s': %s", property->name, property->value.file);
core_ptr->map->string_property = property->value.file;
break;
case PT_FLOAT:
dbgprint("Loading decimal property '%s': %f", property->name, (double)property->value.decimal);
core_ptr->map->decimal_property = (double)property->value.decimal;
break;
case PT_INT:
dbgprint("Loading integer property '%s': %d", property->name, property->value.integer);
core_ptr->map->integer_property = property->value.integer;
break;
case PT_STRING:
dbgprint("Loading string property '%s': %s", property->name, property->value.string);
core_ptr->map->string_property = property->value.string;
break;
}
}
}
|
#!/bin/bash
rm coreutils/gnulib/.git
rm coreutils/.git
git submodule update
|
ALTER TABLE version_downloads ADD CONSTRAINT fk_version_downloads_version_id
FOREIGN KEY (version_id) REFERENCES versions (id); |
<gh_stars>10-100
const path = require('path');
const nodeDiskInfo = require('node-disk-info');
const helper = require ("../helper");
const fs = require('fs');
const filesystem = require("./filesystem");
const { ipcRenderer } = require('electron');
const df = require('node-df');
const util = require('util');
const logger = require('../logger');
const dfp = util.promisify(df);
let devices = {
tonuino_system_folders: ['mp3', 'advert'],
list: async (callback) => {
if(helper.isWindows()) {
return await devices.listWindows();
}
else {
return await devices.listWinMac();
}
},
listWinMac: async () => {
let drives = await dfp();
logger.log(drives);
let out = [];
await helper.asyncForEach(drives, async (drive) => {
if(drive.size > 1065483 && drive.filesystem.indexOf('udev') === -1 && drive.filesystem.indexOf('tmpfs') === -1) {
let faktor = 1024;
out.push({
name: drive.filesystem,
path: drive.mount,
size: drive.size*faktor,
free: drive.available*faktor,
busy: drive.used*faktor,
size_format: helper.bytesToSize(drive.size*faktor),
free_format: helper.bytesToSize(drive.available*faktor),
busy_format: helper.bytesToSize(drive.used*faktor)
});
}
});
return out;
},
listWindows: async () => {
const drives = await nodeDiskInfo.getDiskInfoSync();
let out = [];
await helper.asyncForEach(drives, async (drive) => {
if(drive.blocks > 1999136 && drive.filesystem.indexOf('udev') === -1 && drive.filesystem.indexOf('tmpfs') === -1) {
let mount_parts = drive.mounted.split('/');
let name = mount_parts[mount_parts.length-1]+'';
if(name === '') {
name = drive.filesystem;
}
let faktor = 1024;
if(helper.isWindows()) {
faktor = 1;
name += '/';
}
else if(helper.isMac()) {
faktor = 512;
}
out.push({
name: name,
path: drive.mounted,
size: drive.blocks*faktor,
free: drive.available*faktor,
busy: drive.used*faktor,
size_format: helper.bytesToSize(drive.blocks*faktor),
free_format: helper.bytesToSize(drive.available*faktor),
busy_format: helper.bytesToSize(drive.used*faktor)
});
}
});
return out;
},
listAll: async (drive, cb) => {
let files = await filesystem.list(drive.path);
let out = {
tonuino_folder: [],
tonuino_system: [],
other: []
};
let i = 0;
await helper.asyncForEach(files, async (file) => {
i++;
let folder = {
name: file,
artists: [],
albums: [],
folder_name: file,
title: [],
type: 'other',
filetype: null,
path: path.join(drive.path, file),
image: null
};
if(fs.lstatSync(path.join(drive.path, file)).isDirectory()) {
/*
* sende status an main
*/
let status_message = '(' + i + '/' + files.length + ') Lese Ordner ' + file + '';
folder.filetype = 'folder';
if (file.length === 2) {
folder.number = parseInt(file);
if (folder.number > 0) {
folder.image = await filesystem.getFirstAlbumArtCover(path.join(drive.path, file), file);
folder.type = 'tonuino_folder';
folder.title = await filesystem.getAllMp3FromFolder(path.join(drive.path, file),{
status: true,
status_text: status_message
});
let folder_names = await devices.getNamesFromTracks(folder.title);
folder.artists = folder_names.artists;
folder.albums = folder_names.albums;
}
}
if (devices.tonuino_system_folders.indexOf(file) !== -1) {
folder.type = 'tonuino_system';
}
}
else {
folder.filetype = 'file';
}
out[folder.type].push(folder);
});
return out;
},
getNamesFromTracks: async (tracks) => {
let artists = [];
let albums = [];
await helper.asyncForEach(tracks, async (track) => {
artists.push(track.artist);
albums.push(track.album);
});
artists = await helper.arrayUnique(artists);
albums = await helper.arrayUnique(albums);
return {
artists: artists,
albums: albums
}
},
purge: async (drive) => {
let files = await filesystem.list(drive.path);
let allowed = ['mp3', 'advert'];
let system_mp3_folder = allowed;
for(let i=1;i<=99;i++) {
allowed.push(('00'+i).slice(-2))
}
await helper.asyncForEach(files, async (file) => {
let current_path = path.join(drive.path, file);
if(fs.lstatSync(current_path).isDirectory()) {
/*
* wenn Ordnername nicht erlaubt ist Verzeichnis rekursiv löschen
*/
if(allowed.indexOf(file) === -1) {
await filesystem.removeAll(current_path);
}
/*
* ansonsten alle Dateien ausser mp3s im Ordner löschen
*/
else {
let mp3s = await filesystem.list(current_path);
await helper.asyncForEach(mp3s, async (mp3) => {
let file_extension = mp3.split('.').pop();
if(file_extension !== 'mp3') {
await filesystem.removeAll(path.join(current_path, mp3));
}
});
/*
* Ordner löschen wenn er leer ist
*/
mp3s = await filesystem.list(current_path);
if(mp3s.length === 0) {
await filesystem.removeAll(current_path);
}
/*
* Mp3s sortieren, wenn kein system Ordner
*/
else if(system_mp3_folder.indexOf(file) === -1){
await filesystem.mp3Sorter(current_path);
}
}
}
/*
* wenn nicht Datei löschen
*/
else {
await fs.unlinkSync(current_path);
}
});
await devices.folderSorter(drive.path);
},
/*
* sortiert Ordner der 1. Ebene 01-99
*/
folderSorter: async (fullpath) => {
let folders = await filesystem.list(fullpath);
folders.sort();
let i = 0;
await helper.asyncForEach(folders, async (folder) => {
/*
* system Ordner ausblenden
*/
if(devices.tonuino_system_folders.indexOf(folder) === -1) {
i++;
let should_foldername = ('00' + i).slice(-2);
if(folder !== should_foldername) {
await fs.renameSync(path.join(fullpath, folder), path.join(fullpath, should_foldername));
}
}
});
return await filesystem.list(fullpath);
}
};
module.exports = devices; |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ratpack.http.internal;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import ratpack.http.MediaType;
import ratpack.util.internal.BoundedConcurrentHashMap;
import java.nio.charset.Charset;
import java.util.concurrent.ConcurrentMap;
public class DefaultMediaType implements MediaType {
public static final String CHARSET_KEY = "charset";
private final String type;
private final ImmutableListMultimap<String, String> params;
private final String string;
private static final int CACHE_SIZE = 1024;
private static final ConcurrentMap<String, MediaType> CACHE = new BoundedConcurrentHashMap<>(CACHE_SIZE, Runtime.getRuntime().availableProcessors());
public static MediaType get(final String contentType) {
String trimmed = contentType;
if (trimmed == null) {
trimmed = "";
} else {
trimmed = trimmed.trim();
}
return CACHE.computeIfAbsent(trimmed, DefaultMediaType::new);
}
public DefaultMediaType(String value) {
if (value == null || value.trim().length() == 0) {
type = null;
params = ImmutableListMultimap.of();
string = "";
} else {
com.google.common.net.MediaType mediaType = com.google.common.net.MediaType.parse(value.trim());
if (mediaType != null && mediaType.type() != null) {
if (mediaType.subtype() != null) {
type = mediaType.type() + "/" + mediaType.subtype();
} else {
type = mediaType.type();
}
params = mediaType.parameters();
string = mediaType.toString();
} else {
type = null;
params = ImmutableListMultimap.of();
string = "";
}
}
}
public String getType() {
return type;
}
public ImmutableListMultimap<String, String> getParams() {
return params;
}
public String getCharset() {
return getCharset(null);
}
public String getCharset(String defaultCharset) {
ImmutableList<String> charsetValues = params.get(CHARSET_KEY);
switch (charsetValues.size()) {
case 0:
return defaultCharset;
case 1:
return Charset.forName(charsetValues.get(0)).toString();
default:
throw new IllegalStateException("Multiple charset values defined: " + charsetValues);
}
}
public boolean isText() {
return getType() != null && getType().startsWith("text/");
}
public boolean isJson() {
return getType() != null && (getType().equals(APPLICATION_JSON) || getType().endsWith(JSON_SUFFIX));
}
public boolean isForm() {
return getType() != null && getType().equals(APPLICATION_FORM);
}
@Override
public boolean isHtml() {
return getType() != null && getType().equals(TEXT_HTML);
}
public boolean isEmpty() {
return getType() == null;
}
@Override
public String toString() {
return string;
}
}
|
import flask
from flask import request, jsonify
app = Flask(__name__)
# data to be searched
data = {...}
@app.route('/api/search', methods=['GET'])
def search():
query = request.args.get('query')
result = search_data(query, data)
return jsonify(result)
def search_data(query, data):
# implement a search algorithm here
...
if __name__ == '__main__':
app.run() |
<gh_stars>1-10
// 1260. DFS와 BFS
// 2019.05.14
// DFS, BFS
#include<iostream>
#include<queue>
using namespace std;
int arr[1001][1001];
bool visit[1001];
void DFS(int v, int n)
{
visit[v] = true;
cout << v << " ";
for (int i = 1; i <= n; i++)
{
if (arr[v][i] == 1 && visit[i] == false)
{
DFS(i, n);
}
}
}
void BFS(int v, int n)
{
queue<int> q;
visit[v] = true;
q.push(v);
cout << v << " ";
while (!q.empty())
{
int temp = q.front();
q.pop();
for (int i = 0; i <= n; i++)
{
if (arr[temp][i] == 1 && visit[i] == false)
{
q.push(i);
visit[i] = true;
cout << i << " ";
}
}
}
}
int main(void)
{
int n, m, v;
cin >> n >> m >> v;
while (m > 0)
{
m--;
int a, b;
cin >> a >> b;
arr[a][b] = 1;
arr[b][a] = 1;
}
DFS(v, n);
cout << endl;
for (int i = 0; i < 1001; i++)//방문여부 초기화
{
visit[i] = false;
}
BFS(v, n);
cout << endl;
return 0;
}
|
#!/bin/bash
lines=$(.github/files/jq-linux64 ' .results.TestResults.Issues.Issue[].Title' .github/files/results.json | wc -l);
i=0;
touch veracode-sarif.json;
echo '
{
"$schema" : "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
"version" : "2.1.0",
"runs" :
[
{
' >> veracode-sarif.json;
while [ $i != $lines ] ;
do
# strting the results array
echo '
"tool" : {
"driver" : {
"name" : "Veracode Pipeline Scanner"
}
},
"results" : [ {' >> veracode-sarif.json;
# strting the results array
# starting the message tag
title=$(.github/files/jq-linux64 ' .results.TestResults.Issues.Issue['$i'].Title' .github/files/results.json | sed 's/"//g');
issuetype=$(.github/files/jq-linux64 ' .results.TestResults.Issues.Issue['$i'].IssueType' .github/files/results.json | sed 's/"//g');
#echo $title;
echo '
"message" : {
"text" : "'$title' - '$issuetype'"
},
' >> veracode-sarif.json;
# ending the message tag
#starting locations tag
echo '
"locations" : [ ' >> veracode-sarif.json;
file=$(.github/files/jq-linux64 ' .results.TestResults.Issues.Issue['$i'].Files.SourceFile.File' .github/files/results.json | sed 's/"//g');
line=$(.github/files/jq-linux64 ' .results.TestResults.Issues.Issue['$i'].Files.SourceFile.Line' .github/files/results.json | sed 's/"//g');
function=$(.github/files/jq-linux64 ' .results.TestResults.Issues.Issue['$i'].Files.SourceFile.FunctionName' .github/files/results.json | sed 's/"//g');
echo '
{
"physicalLocation" : {
"artifactLocation" : {
"uri" : "File: '$file' - Line: '$line' - Function: '$function'"
},
' >> veracode-sarif.json
echo '
"region" : {
"startLine" : '$line',
"startColumn" : 0,
"endColumn" : 0
}
}
}],
' >> veracode-sarif.json
#ending locations tag
#start hash
echo '
"partialFingerprints" : {
"primaryLocationLineHash" : "NULL"
}' >> veracode-sarif.json
#ending hash
#ending the results array
echo '
}]
}' >> veracode-sarif.json;
i=$[$i+1];
if [ $i != $lines ];
then
echo '
,{' >> veracode-sarif.json;
fi
#ending the results array
done
echo '
]
}
' >> veracode-sarif.json; |
/* eslint-disable no-irregular-whitespace */
/* eslint-disable func-names */
/* eslint-disable max-len */
/* eslint quote-props: ['error', 'consistent'] */
module.exports = {
'COMMAND_EN_US':{
'save_changeset':'Git stash will temporarily store any changes you have made since your last commit.',
'restore_changeset':'Git stash pop or git stash apply will restore the most recently stashed files',
'get_changeset':'Git stash list, lists all stashed changesets',
'remove_changeset':'Git stash drop, will discards the most recently stashed changeset',
'get_history':'Git log lists version history for the current branch, Git log dash dash follow [file] lists version history for a file, including renames',
'get_differences':'Git diff shows file differences not yet staged, Git diff [first-branch]…[second-branch] shows content differences between two branches',
'get_commit':'Git show [commit] outputs metadata and content changes of the specified commit',
'undo_commit':'Git reset [commit] undoes all commits afer [commit], preserving changes locally, git reset dash dash hard [commit] discards all history and changes back to the specified commit',
'get_branch':'Git branch lists all local branches in the current repository',
'add_file':'Git add [file] snapshots the file in preparation for versioning',
'add_commit':'Git commit dash m records file snapshots permanently in version history',
'add_branch':'Git push [alias] [branch] uploads all local branch commits to GitHub',
'get_repository':'Git pull downloads bookmark history and incorporates changes, basically a git fetch and git merge in one!',
'create_repository':'Git init creates a new local repository with the specified name',
'copy_repository':'Git clone [url] downloads a project and its entire version history',
'get_file':'Git status lists all new or modified files to be commited',
'undo_file':'Git reset [file] unstages the file, but preserve its contents',
'create_branch':'Git branch-name creates a new branch, git branch dash b [branch-name] creates a new branch with the specified name and switches your working branch to that branch',
'update_branch':'Git checkout [branch-name] switches to the specified branch and updates the working directory',
'merge_branch':'Git merge [branch] combines the specified branch’s history into the current branch',
'update_file':'Git mv [file-original] [file-renamed] changes the file name and prepares it for commit',
'remove_branch':'Git branch dash d [branch-name] deletes the specified branch locally, git push dash d <remote_name> <branch_name> pushes the delete to remote',
'remove_file':'Git rm [file] deletes the file from the working directory and stages the deletion, git rm dash dash cached [file] removes the file from version control but preserves the file locally',
}
};
|
#! /bin/bash
grep --include='*.py' --include='*.html' -Roh -e 'word(u*"[^"]*"[,)]' -e "word(u*'[^']*'[,)]" | sed -e 's/word(u*"\(.*\)"[,)]/"\1": Null/' -e "s/word(u*'\(.*\)'[,)]/\"\1\": Null/" | sort | uniq
|
import { Grid, Tab, Tabs } from '@material-ui/core';
import Paper from '@material-ui/core/Paper';
import React, { useState } from 'react';
import { CustomButton } from 'views/home/components/button/button';
import { AddEmployerModal } from './add/add-employer';
import { DeletedTable } from './components/deleted-table/deleted-table';
import { EmployessTable } from './components/employees-table/employees-table';
import { a11yProps, TabPanel } from './components/tabPanel/tabPanel';
import { Updatedtable } from './components/updated-table/updated-table';
import { EditEmployerModal } from './edit/edit-employer';
import { useStyles } from './home.style';
export interface EmployeeData {
id: number,
name: string,
surname: string,
dateOfBirth: string,
position: string,
phoneNumber: string,
status: string,
}
export const Home = () => {
const classes = useStyles();
const [value, setValue] = React.useState(0);
const [open, setOpen] = React.useState(false);
const [openEditmodal, setOpenEditModal] = React.useState(false);
const handleChange = (_, newValue) => {
setValue(newValue);
};
const defaultValues = [
{
id: 1,
name: "Eli",
surname: "Eliyev",
dateOfBirth: "2011-01-01",
position: "Developer",
phoneNumber: "070-666-55-99",
status: "Active"
},
{
id: 2,
name: "Eli",
surname: "Eliyev",
dateOfBirth: "2018-01-01",
position: "Developer",
phoneNumber: "070-666-55-99",
status: "Active"
},
{
id: 3,
name: "Eli",
surname: "Eliyev",
dateOfBirth: "2020-01-01",
position: "Developer",
phoneNumber: "070-666-55-99",
status: "Active"
},
{
id: 4,
name: "Veli",
surname: "Israyilov",
dateOfBirth: "2011-01-01",
position: "Developer",
phoneNumber: "070-666-55-99",
status: "Active"
}
]
const [employers, setEmployers] = useState<EmployeeData[]>(defaultValues)
const [selRow, setSelRow] = useState<EmployeeData | any>({})
const [deletedEmployers, setDeletedEmployers] = useState<EmployeeData[] | any>([])
const [updatedEmployers, setUpdatedEmployers] = useState<EmployeeData[] | any>([])
const remove = (selId: number): void => {
// ADD deletedRow To DELETED []
const delRow = employers.find(e => e.id === selId)
setDeletedEmployers([...deletedEmployers, delRow])
// Change status in deletedRow and updated Employers[]
const resultEmployers = employers.filter(e => {
if (e.id === selId) {
e.status = "Deleted";
}
return e;
})
setEmployers(resultEmployers)
}
const update = (updatedEmp: EmployeeData): void => {
// Change status in updRow and updated Employers[]
const resultEmployers = employers.filter(e => {
if (e.id === updatedEmp.id) {
e.status = "Updated";
}
return e;
})
setEmployers(resultEmployers)
// ADD to D UPDATES[]
const employerExist = updatedEmployers.find(e => e.id === updatedEmp.id);
if (employerExist) {
const updEmployers = updatedEmployers.filter(e => e.id !== updatedEmp.id)
setUpdatedEmployers([...updEmployers, updatedEmp])
} else {
setUpdatedEmployers([...updatedEmployers, updatedEmp])
}
}
const undo = (selId: number): void => {
// Delete deletedRow from DELETED []
const delEmployers = deletedEmployers.filter(e => e.id !== selId)
setDeletedEmployers(delEmployers)
// Change status in undoRow and updated Employers[]
const resultEmployers = employers.filter(e => {
if (e.id === selId) {
e.status = "Active";
}
return e;
})
setEmployers(resultEmployers)
}
const findMaxValue = (): number => {
const idS = employers.map((e) => e.id);
let maxValue = idS[0];
for (let i = 0; i < idS.length; i++) {
if (idS[i] < idS[i + 1]) {
maxValue = idS[i + 1]
}
}
return maxValue;
}
return (
<div className={classes.container}>
<Grid container>
<Grid item xs={12} className={classes.navbar}>
<Paper square >
<Tabs
value={value}
indicatorColor="primary"
textColor="primary"
onChange={handleChange}
>
<Tab label="Employers" {...a11yProps(0)} />
<Tab label="Updated Employers" {...a11yProps(1)} />
<Tab label="Deleted Employers" {...a11yProps(2)} />
</Tabs>
</Paper>
</Grid>
<Grid item xs={12} className={classes.newEmployer}>
<CustomButton
title="Qeydiyyatdan kec"
backcolor="blue"
color="white"
func={() => {
setOpen(true)
}}
/>
</Grid>
<Grid item xs={12} className={classes.tables}>
<TabPanel value={value} index={0} >
<EmployessTable employers={employers} removeMethod={remove} setOpenEditModal={setOpenEditModal} setSelRow={setSelRow} />
</TabPanel>
<TabPanel value={value} index={1} >
<Updatedtable employers={updatedEmployers} />
</TabPanel>
<TabPanel value={value} index={2} >
<DeletedTable employers={deletedEmployers} undoMethod={undo} />
</TabPanel>
</Grid>
</Grid>
<AddEmployerModal open={open} setOpen={setOpen} maxEmpId={findMaxValue()} employers={employers} setEmployers={setEmployers} />
<EditEmployerModal open={openEditmodal} setOpen={setOpenEditModal} updateMethod={update} selectedRow={selRow} />
</div>
);
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.