text stringlengths 1 1.05M |
|---|
#!/bin/bash
#Build linux-version first and determine the build-version
GOOS=linux GOARCH=amd64 go build -ldflags '-s -w -extldflags "-static"' .
BUILD_VERSION=$(./space-cloud -v | cut -f3 -d ' ')
mkdir linux && mkdir windows && mkdir darwin
zip space-cloud.zip space-cloud
mv ./space-cloud.zip ./linux/
cp ./linux/space-cloud.zip ./linux/space-cloud_v$BUILD_VERSION.zip
rm space-cloud
GOOS=darwin GOARCH=amd64 go build -ldflags '-s -w -extldflags "-static"' .
zip space-cloud.zip space-cloud
mv ./space-cloud.zip ./darwin/
cp ./darwin/space-cloud.zip ./darwin/space-cloud_v$BUILD_VERSION.zip
rm space-cloud
GOOS=windows GOARCH=amd64 go build -ldflags '-s -w -extldflags "-static"' .
zip space-cloud.zip space-cloud.exe
mv ./space-cloud.zip ./windows/
cp ./windows/space-cloud.zip ./windows/space-cloud_v$BUILD_VERSION.zip
rm space-cloud.exe
# echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
# docker push spaceuptech/space-cloud:latest
#Upload as versioned build
curl -H "Authorization: Bearer $JWT_TOKEN" -F "file=@./darwin/space-cloud_v$BUILD_VERSION.zip" -F 'fileType=file' -F 'makeAll=false' -F 'path=/darwin' https://spaceuptech.com/v1/api/downloads/files
curl -H "Authorization: Bearer $JWT_TOKEN" -F "file=@./windows/space-cloud_v$BUILD_VERSION.zip" -F 'fileType=file' -F 'makeAll=false' -F 'path=/windows' https://spaceuptech.com/v1/api/downloads/files
curl -H "Authorization: Bearer $JWT_TOKEN" -F "file=@./linux/space-cloud_v$BUILD_VERSION.zip" -F 'fileType=file' -F 'makeAll=false' -F 'path=/linux' https://spaceuptech.com/v1/api/downloads/files
#Upload as latest build
curl -H "Authorization: Bearer $JWT_TOKEN" -F 'file=@./darwin/space-cloud.zip' -F 'fileType=file' -F 'makeAll=false' -F 'path=/darwin' https://spaceuptech.com/v1/api/downloads/files
curl -H "Authorization: Bearer $JWT_TOKEN" -F 'file=@./windows/space-cloud.zip' -F 'fileType=file' -F 'makeAll=false' -F 'path=/windows' https://spaceuptech.com/v1/api/downloads/files
curl -H "Authorization: Bearer $JWT_TOKEN" -F 'file=@./linux/space-cloud.zip' -F 'fileType=file' -F 'makeAll=false' -F 'path=/linux' https://spaceuptech.com/v1/api/downloads/files
|
#!/bin/bash
# Copyright 2020 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is mostly copied from the hack/verify-vendor.sh script located in k8s.io/kubernetes
set -o errexit
set -o nounset
set -o pipefail
source "$(dirname "${BASH_SOURCE}")/lib/init.sh"
DESCHEDULER_ROOT=$(dirname "${BASH_SOURCE}")/..
mkdir -p "${DESCHEDULER_ROOT}/_tmp"
_tmpdir="$(mktemp -d "${DESCHEDULER_ROOT}/_tmp/kube-vendor.XXXXXX")"
if [[ -z ${KEEP_TMP:-} ]]; then
KEEP_TMP=false
fi
function cleanup {
# make go module dirs writeable
chmod -R +w "${_tmpdir}"
if [ "${KEEP_TMP}" == "true" ]; then
echo "Leaving ${_tmpdir} for you to examine or copy. Please delete it manually when finished. (rm -rf ${_tmpdir})"
else
echo "Removing ${_tmpdir}"
rm -rf "${_tmpdir}"
fi
}
trap "cleanup" EXIT
_deschedulertmp="${_tmpdir}"
mkdir -p "${_deschedulertmp}"
git archive --format=tar --prefix=descheduler/ "$(git write-tree)" | (cd "${_deschedulertmp}" && tar xf -)
_deschedulertmp="${_deschedulertmp}/descheduler"
pushd "${_deschedulertmp}" > /dev/null 2>&1
# Destroy deps in the copy of the kube tree
rm -rf ./vendor
# Recreate the vendor tree using the nice clean set we just downloaded
hack/update-vendor.sh
popd > /dev/null 2>&1
ret=0
pushd "${DESCHEDULER_ROOT}" > /dev/null 2>&1
# Test for diffs
if ! _out="$(diff -Naupr --ignore-matching-lines='^\s*\"GoVersion\":' go.mod "${_deschedulertmp}/go.mod")"; then
echo "Your go.mod file is different:" >&2
echo "${_out}" >&2
echo "Vendor Verify failed." >&2
echo "If you're seeing this locally, run the below command to fix your go.mod:" >&2
echo "hack/update-vendor.sh" >&2
ret=1
fi
if ! _out="$(diff -Naupr -x "BUILD" -x "AUTHORS*" -x "CONTRIBUTORS*" vendor "${_deschedulertmp}/vendor")"; then
echo "Your vendored results are different:" >&2
echo "${_out}" >&2
echo "Vendor Verify failed." >&2
echo "${_out}" > vendordiff.patch
echo "If you're seeing this locally, run the below command to fix your directories:" >&2
echo "hack/update-vendor.sh" >&2
ret=1
fi
popd > /dev/null 2>&1
if [[ ${ret} -gt 0 ]]; then
exit ${ret}
fi
echo "Vendor Verified."
|
// Copyright (c) 2021, Microsoft Corporation, <NAME>
// Licensed under the MIT License.
package main
import (
"bufio"
"bytes"
"crypto/rand"
"encoding/json"
"errors"
"flag"
"fmt"
"math/big"
"net/http"
"os"
"regexp"
"strings"
"github.com/seh-msft/cfg"
"github.com/seh-msft/openapi"
)
// Result indicates the type of result of a lookup
type Result int
const (
something Result = iota // Something was found (1+ results)
nothing // Nothing was found
fuzzing // The caller should invoke contextual fuzzing
)
// RequestStrings is a table of HTTP requests to emit
// For serialization
type RequestStrings struct {
Requests []string
}
// Request represents an HTTP request and associated meta-information.
type Request struct {
*http.Request // HTTP request
Method *openapi.Method // Method related to our request
}
var (
auth = flag.String("auth", "", "'Authorization: Bearer' header token value")
apiName = flag.String("api", "", "OpenAPI JSON file to parse")
dbName = flag.String("db", "", "key=value database to read identifiers from")
chatty = flag.Bool("D", false, "verbose logging output")
printReqs = flag.Bool("printreqs", false, "log HTTP bodies")
strict = flag.Bool("strict", false, "if a value can't be filled, fail")
proto = flag.String("proto", "https", "HTTP protocol to use")
outName = flag.String("o", "-", "file name to write output to")
allBodies = flag.Bool("allbodies", false, "force writing a body for ALL requests")
port = flag.String("listen", "", "TCP port to listen on for HTTP (if any)")
cert = flag.String("cert", "", "Certificate (if listening HTTPS)")
key = flag.String("key", "", "Private key (if listening HTTPS)")
noReplay = flag.Bool("noreplay", false, "Do not replay built requests")
ado = flag.Bool("ado", false, "Use ADO output mode for replay results")
ignoreMethods = flag.String("ignoremethods", "", "HTTP methods to not build (PUT,PATCH)")
noAuth = flag.Bool("noauth", false, "Strip Authorization: and Cookie: headers")
target = flag.String("target", "", "Hostname to force target replay to")
stderr *bufio.Writer
)
// Generator is a tool to generate HTTP requests from an OpenAPI specification.
func main() {
flag.Parse()
stderr = bufio.NewWriter(os.Stderr)
defer stderr.Flush()
// TODO - output file flag
var out *bufio.Writer = bufio.NewWriter(os.Stdout)
defer out.Flush()
// Generator As A Service
// TODO - propagate flags as setting defaults for listener?
if *port != "" {
if (*cert != "" || *key != "") && (*cert == "" || *key == "") {
fatal("err: if using TLS, both -key and -cert must be provided")
}
listen(*port, *cert, *key)
return
}
// TODO - 'Cookie:' header
if (*auth == "" && !*noAuth) || *apiName == "" || *dbName == "" {
fatal("err: must supply all of -auth, -api, and -db ")
}
f, err := os.Open(*apiName)
if err != nil {
fatal("err: could not open API file →", err)
}
api, err := openapi.Parse(f)
if err != nil {
fatal("err: could not parse API →", err)
}
// Override target
if *target != "" {
api.Servers = []openapi.Server{{URL: *target}}
}
// Remove relevant methods from API signatures
if len(*ignoreMethods) > 0 {
for _, method := range strings.Split(*ignoreMethods, ",") {
for _, methods := range api.Paths {
down := strings.ToLower(method)
delete(methods, down)
}
}
}
db := ingestDb(*dbName)
// Insert authorization
// TODO - Make cleaner as per https://github.com/seh-msft/cfg/issues/1
if !*noAuth {
// TODO - this might need to be stubbed for after to permit api path building
db.Records = append(db.Records, &cfg.Record{Tuples: []*cfg.Tuple{{Attributes: []*cfg.Attribute{{Name: "Authorization", Value: "Bearer " + *auth}}}}})
}
db.BuildMap()
requests, missing, totalPossible, err := generate(api, db)
if err != nil {
fatal("fatal: generation failed ⇒ ", err)
}
chat(fmt.Sprintf("Built %d/%d requests (%.0f%%)\n", len(requests), totalPossible, 100*(float64(len(requests))/float64(totalPossible))))
chat(fmt.Sprintf("Parameters missed: %v\n", missing))
// If we don't replay, emit built requests
if *noReplay {
enc := json.NewEncoder(out)
enc.Encode(requests2strings(requests))
return
}
// Optionally replay requests
results := make(map[*Request]*Response)
for _, request := range requests {
resp := replay(request.Request, nil)
results[request] = &resp
}
// Optionally validate against spec
sus, ok, err := validate(results)
// Emit ADO format
if *ado {
printADO(out, requests, missing, sus, ok)
return
}
// Emit as JSON by default
err = printJSON(out, requests, missing, sus, ok)
if err != nil {
fatal("err: could not marshal requests →", err)
}
out.Flush()
}
// Convert []requests → []string
func requests2strings(requests []*Request) RequestStrings {
var reqStrings []string
for _, request := range requests {
reqStrings = append(reqStrings, prettyRequest(request.Request))
if *printReqs {
emit(prettyRequest(request.Request) + "\n\n")
}
}
return RequestStrings{reqStrings}
}
// Do generation step, all we need is an api and a db
func generate(api openapi.API, db cfg.Cfg) ([]*Request, map[string]uint64, uint64, error) {
failed := make(map[string]error)
var requests []*Request
totalPossible := uint64(0)
missing := make(map[string]uint64)
// "/foo/bar", map["get"]Method{}
for path, methods := range api.Paths {
chat(path + ":\n")
// "get", Method{}
methods:
for httpMethod, method := range methods {
totalPossible++
// TODO - openapi parse "requestBody" for POST, etc.
chat("\t" + httpMethod + ":\n")
chat("\t\t" + method.Summary + "\n\n")
// Were all the parameters filled from the db?
var paths, queries, headers []openapi.Parameter
var body bytes.Buffer
// Scan parameters for where they will be substituted in the request to build
// Parameter.In = "path", "query", or "header"
for _, param := range method.Parameters {
if !param.Required {
// TODO - attempt to fill non-required parameters
// Might be non-trivial
continue
}
switch strings.ToLower(param.In) {
case "path":
paths = append(paths, param)
case "query":
queries = append(queries, param)
case "header":
headers = append(headers, param)
}
chat("\t\t" + param.In + " — " + param.Name + "\n")
}
// Insert path parameters
// TODO - build URL/request for each server if multiple servers exist
if len(api.Servers) < 1 {
return nil, nil, 0, errors.New("err: need at least one server to call, none provided")
}
fullPath := *proto + api.Servers[0].URL + path
for _, parameter := range paths {
values, r := lookup(db, parameter.Name, path, api.Info.Title)
switch r {
case something:
apiForm := fmt.Sprintf(`{%s}`, parameter.Name)
// TODO - sequencing
fullPath = strings.ReplaceAll(fullPath, apiForm, values[0])
case nothing:
if *strict {
return nil, nil, 0, errors.New("err: could not find path parameter →" + parameter.Name)
}
missing[parameter.Name]++
failed[path] = errors.New(fmt.Sprint("could not find query parameters → ", parameter))
continue methods
case fuzzing:
// TODO - fuzz - maybe should remove this 'feature' skeleton
default:
}
}
// Build body, if required
if method.RequestBody.Required || *allBodies {
// TODO - break out different formats
ref := method.RequestBody.Content["application/json"]["schema"].Ref
// We get #/components/schemas/ as a prefix sometimes
refLess := strings.TrimPrefix(ref, "#/components/schemas/")
found := false
var target openapi.Type
// Find our definition by ref
search:
// All types in the schema table
for typeName, t := range api.Components["schemas"] {
// Properties are elements in the body
for _, property := range t.Properties {
schema := property.Items
if schema.Ref == ref || schema.Ref == refLess || typeName == ref || typeName == refLess {
// We found our type ref
target = t
found = true
break search
}
}
}
// Start constructing JSON for the body
// TODO - an actual recursive object builder?
// "object" could trigger a new map[] level
obj := make(map[string]string)
if found {
// We know the scheme, fill all we can
for name, property := range target.Properties {
// Fill values we know
values, r := lookup(db, name, path, api.Info.Title)
switch r {
case something:
// TODO - sequencing?
obj[name] = values[0]
case nothing:
fallthrough
case fuzzing:
obj = randProperty(obj, name, property)
}
}
} else {
// Unknown scheme - let object be {}
// TODO - strict mode fatal?
}
enc := json.NewEncoder(&body)
enc.Encode(obj)
}
// Generate request structure
httpReq, err := http.NewRequest(strings.ToUpper(httpMethod), fullPath, &body)
if err != nil {
if *strict {
return nil, nil, 0, errors.New("err: could not build request → " + err.Error())
}
failed[path] = err
continue methods
}
// Insert query parameters
vals := httpReq.URL.Query()
for _, parameter := range queries {
values, r := lookup(db, parameter.Name, path, api.Info.Title)
switch r {
case something:
// TODO - sequencing/fuzzing?
vals[parameter.Name] = []string{values[0]}
case nothing:
if *strict {
return nil, nil, 0, errors.New("err: could not find query parameter → " + parameter.Name)
}
missing[parameter.Name]++
failed[path] = errors.New(fmt.Sprint("could not find query parameters → ", parameter))
continue methods
case fuzzing:
// TODO - fuzzing?
}
}
httpReq.URL.RawQuery = vals.Encode()
// Override HTTP headers
for _, parameter := range headers {
values, r := lookup(db, parameter.Name, path, api.Info.Title)
switch r {
case something:
// TODO - sequencing
httpReq.Header[parameter.Name] = []string{values[0]}
case nothing:
if *strict {
return nil, nil, 0, errors.New("err: could not find header parameter → " + parameter.Name)
}
missing[parameter.Name]++
failed[path] = errors.New(fmt.Sprint("could not find header parameter - ", parameter))
continue methods
case fuzzing:
// TODO - fuzzing?
}
}
requests = append(requests, &Request{httpReq, &method})
}
chat("\n")
}
return requests, missing, totalPossible, nil
}
// Lookup an identifier name for a given path in a given API
// Return the set of values which are usable and an 'ok' indicator
// Path should be in the original OpenAPI {someId} form
func lookup(c cfg.Cfg, name, path, title string) ([]string, Result) {
//chat("≡ lookup ⇒ ", name, path, title)
var out []string
hasRegex := func(tuple *cfg.Tuple) bool {
_, has := tuple.Map["regex"]
return has
}
// The attributes for record 'name' with the tuple 'name'
primaryAttributes, ok := c.Map[name][name]
if !ok {
return out, nothing
}
primaryValue, hasValue := primaryAttributes[name]
if hasValue {
// Only true if we contain at least one element
hasValue = len(primaryValue) > 0
}
// Get properties for record 'name'
properties, hasProperties := c.Map[name]["properties"]
// Short circuit if 'name' has no rules and no enumerated values
_, hasDisallows := c.Map[name]["disallow"]
_, hasPermits := c.Map[name]["permit"]
_, hasEnums := c.Map[name]["values"]
if !hasValue && !hasEnums {
// Value omitted for this identifier
// TODO - maybe a flag to handle this case?
return out, nothing
}
if !hasDisallows && !hasPermits && !hasEnums && hasValue {
// Just the value
return primaryValue, something
}
// Records are identified by the identifier name
records, ok := c.Lookup(name)
if !ok {
return out, nothing
}
fuzz := false
// Determine if the identifier is valid
// We do costly lookups here to guarantee ordering of 'permit', 'disallow', and 'values'
// As they are ordered and maps play with ordering
recordSearch:
for _, record := range records {
// Sees if the tuple set has a matching attribute
match := func(tuples []*cfg.Tuple) bool {
for _, tuple := range tuples {
attributes := tuple.Attributes
// Strip 'except' or 'permit'
if len(attributes) > 1 {
attributes = attributes[1:]
}
// Valid determines if a given attribute entry and our name/path/title are compatible
valid := func(value, other string) bool {
return value == other
}
// Use regex to test equality if requested
if len(attributes) > 1 && hasRegex(tuple) {
valid = func(value, other string) bool {
regex, err := regexp.Compile(value)
if err != nil {
fatal(`err: could not compile regex "`+value+`" →`, err)
}
return regex.MatchString(other)
}
// Strip 'regex'
attributes = attributes[1:]
}
result := false
// Search attributes in the tuple
searchAttributes:
for _, attr := range attributes {
test := ""
switch attr.Name {
case "title":
test = title
case "path":
test = path
default:
// Unknown keyword
// Skip
continue searchAttributes
}
if valid(attr.Value, test) {
// Valid and we had an invalid result
result = true
} else {
// Invalid and result was true
// A rule in the tuple was violated
result = false
break searchAttributes
}
}
if result {
return true
}
}
// Do not match by default
return false
}
exceptions, ok := record.Lookup("disallow")
if ok && match(exceptions) {
// We are an exception
continue recordSearch
}
constraints, ok := record.Lookup("permit")
if ok && !match(constraints) {
// We are not in scope
continue recordSearch
}
// Populate properties
if hasProperties {
if _, hasFuzz := properties["fuzz"]; hasFuzz {
fuzz = true
}
}
// Search for enumerated values - ordered
values, ok := record.Lookup("values")
var vals []string
// Build table of enumerated values
if ok {
for _, tuple := range values {
attributes := tuple.Attributes
if len(attributes) > 1 {
for _, v := range attributes[1:] {
vals = append(vals, v.Name)
}
}
}
}
// Insert an enumerated value if any was supplied, short circuit
if len(vals) > 0 {
if fuzz {
// Select at random
index, err := rand.Int(rand.Reader, big.NewInt(int64(len(vals))))
if err != nil {
fatal("err: could not rng for value fuzz -", err)
}
// One, single, randomly selected, value
// TODO - just shuffle and append?
out = append(out, vals[int(index.Int64())])
continue recordSearch
}
// All values, in order
out = append(out, vals...)
continue recordSearch
}
// Insert the primary value for this identifier
if !fuzz && len(primaryValue) > 0 {
out = append(out, primaryValue...)
continue recordSearch
}
// TODO - fuzzing?
}
r := nothing
if fuzz {
r = fuzzing
} else if len(out) > 0 {
r = something
}
return out, r
}
|
#!/bin/bash
# Move Docker log files from locations
time=$(date +%s)
mkdir -p "./logs/$time"
echo "Logging into ./logs/$time"
for file in `sudo find /var/lib/docker/containers -name '*json.log'`; do
filename=$(basename -- "$file")
sudo cat $file > "./logs/$time/$filename"
echo "Logged ./logs/$time/$filename"
done
sudo chown -R kevin "./logs/$time"
|
#/bin/bash
#
# Extract test with specific file arguments. Follow the same ordering in argv
# as in the archive. Just check the file listing first, do not check the
# contents.
source $configvar
cd $tmpdir
first=$(echo "$inputfiles" | head -1)
third=$(echo "$inputfiles" | head -3 | tail -1)
seventh=$(echo "$inputfiles" | head -7 | tail -1)
last=$(echo "$inputfiles" | tail -1)
tmp2=tmp-$(basename $0)
mkdir $tmp2 || { echo "mkdir failed" && exit 1; }
cd $tmp2
$MYTAR -x -v -f ../$tarfile $first $third $seventh $last
(($? == 0)) || exit 1
|
#!/bin/bash -l
#SBATCH --output=/mnt/lustre/users/%u/%j.out
#SBATCH --mem=30000
#SBATCH --job-name=gpu
#SBATCH --gres=gpu
#SBATCH --constrain=v100
#SBATCH --time=4-0:00
conda activate py2
#module load libs/cuda
export CUDA_VISIBLE_DEVICES=0 && python -m code.scripts.cluster.YT_BB_script --model_ind 212 --arch ClusterNet5gTwoHead --mode IID --dataset YT_BB --dataset_root "/users/k1763920/yt_bb_small" --out_root "/users/k1763920/out/" --gt_k 10 --output_k_A 70 --output_k_B 10 --lamb 1.0 --lr 0.0001 --num_epochs 100 --batch_sz 660 --num_dataloaders 3 --num_sub_heads 5 --input_sz 32 --rand_crop --rand_crop_sz 20 --head_A_first --head_B_epochs 2 --base_frame 0 --base_interval 1 --base_num 10 --interval 2 --frame_increment --train_partition 'train' --test_partition 'test' --assignment_partition 'train' --test_on_all_frame
|
#import library
import datetime
# Set time limit
time_limit = datetime.datetime.now() - datetime.timedelta(1 month)
# Loop over emails
for email in mailbox:
# Check whether the email is older than 1 month
if email.sent_date < time_limit:
# Delete the email
mailbox.remove(email) |
public static void main(String[] args) {
int[] numbers = {25, 50, 75, 100, 125};
int smallestNumberIndex = 0;
for(int i=1; i<numbers.length; i++){
if(numbers[i] < numbers[smallestNumberIndex]){
smallestNumberIndex = i;
}
}
System.out.println("Index of smallest number: "+smallestNumberIndex);
} |
<reponame>IjzerenHein/ttvflash
/* @flow */
import { observable, runInAction } from 'mobx';
import type { IObservableValue, IObservableArray } from 'mobx';
import { TTAppAPI } from './TTAppAPI';
import { TTAppTeam } from './TTAppTeam';
import moment from 'moment';
import { TTAppEventStream } from './TTAppEventStream';
import type { Club, Match } from './types';
// const CLUB_ID = '1057';
const CLUB_ID = '1088'; // Flash
export class TTAppStore {
_api = new TTAppAPI();
_eventStream = new TTAppEventStream();
_isEnabled: IObservableValue<boolean> = observable.box(false);
_club: IObservableValue<Club> = observable.box({});
_teams: IObservableArray<TTAppTeam> = observable.array([]);
_groups: IObservableValue<any> = observable.box(undefined);
_lastUpdated: IObservableValue<?Date> = observable.box(undefined);
get isEnabled(): boolean {
return this._isEnabled.get();
}
get club(): Club {
return this._club.get();
}
get teams(): IObservableArray<TTAppTeam> {
return this._teams;
}
get eventStream(): TTAppEventStream {
return this._eventStream;
}
getMatchesForWeek(
weekOffset: number = 0,
isYouth: boolean = false,
): Array<{
team: TTAppTeam,
match: Match,
isLive: boolean,
}> | void {
const date = moment(this._api.currentDate).add(weekOffset, 'week');
const result = this.teams
.filter(team => team.isYouthTeam === isYouth)
.map(team => {
const match = team.getMatchForWeek(date);
return {
team,
match,
isLive: match ? team.isMatchLive(match) : undefined,
};
})
.filter(({ match }) => match);
//.sort((a, b) => a.match.playtime.localeCompare(b.match.playtime));
// $$FlowFixMe
return result.length ? result : undefined;
}
get lastUpdated(): ?Date {
let lastUpdated = this._lastUpdated.get();
if (!lastUpdated) return undefined;
this.teams.forEach(team => {
const lu = team.lastUpdated;
// $FlowFixMe
if (lu.getTime() > lastUpdated.getTime()) lastUpdated = lu;
});
return lastUpdated;
}
get currentDate(): ?Date {
return this._api.currentDate;
}
async init(clubId: string = CLUB_ID) {
await this._api.login();
const { club, groups } = await this._api.getTeams(clubId);
const teams = [];
for (let i = 0; i < groups.length; i++) {
const group = groups[i];
for (let j = 0; j < group.teams.length; j++) {
const teamInfo = group.teams[j];
const team = new TTAppTeam({
api: this._api,
group,
team: teamInfo,
eventStream: this._eventStream,
});
teams.push(team);
team.init();
}
}
teams.sort((a, b) => a.teamNumber - b.teamNumber);
runInAction(() => {
this._lastUpdated.set(new Date());
this._club.set(club);
this._teams.replace(teams);
});
}
async cleanup() {
this.teams.forEach(team => team.cleanup());
runInAction(() => {
this._lastUpdated.set(undefined);
this._club.set({});
this._teams.replace([]);
});
}
}
|
def reverse_string(string):
reversed_string = ""
for char in string:
reversed_string = char + reversed_string
return reversed_string
result = reverse_string("Hello World!")
print(result) |
#!/usr/bin/env bash
set -x
echo "Testng for $TEST_PLATFORM"
${UNITY_EXECUTABLE:-xvfb-run --auto-servernum --server-args='-screen 0 640x480x24' /opt/Unity/Editor/Unity} \
-projectPath $(pwd)/$PROJECT_PATH \
-runTests \
-testPlatform $TEST_PLATFORM \
-testResults $(pwd)/$TEST_PLATFORM-results.xml \
-logFile \
-batchmode
UNITY_EXIT_CODE=$?
if [ $UNITY_EXIT_CODE -eq 0 ]; then
echo "Run succeeded, no failures occurred";
elif [ $UNITY_EXIT_CODE -eq 2 ]; then
echo "Run succeeded, some tests failed";
elif [ $UNITY_EXIT_CODE -eq 3 ]; then
echo "Run failure (other failure)";
else
echo "Unexpected exit code $UNITY_EXIT_CODE";
fi
cat $(pwd)/$TEST_PLATFORM-results.xml | grep test-run | grep Passed
exit $UNITY_TEST_EXIT_CODE |
cd $(dirname $0)
#docker run -i loadimpact/k6 run --vus 5 --duration 20s - <script.js
docker run -i --network host loadimpact/k6 run - </home/mike/IdeaProjects/packt-ktor/bookstore/script/performance/script.js |
makeindex dissertation.glo -s dissertation.ist -t dissertation.glg -o dissertation.gls
|
#!/bin/bash
export INGRESS_HOST=$(minikube ip)
export INGRESS_PORT=$(kubectl -n istio-system get service istio-ingressgateway -o jsonpath='{.spec.ports[?(@.name=="http2")].nodePort}')
export GATEWAY_URL=$INGRESS_HOST:$INGRESS_PORT
echo "Gateway: $GATEWAY_URL"
echo "1. Without JWT"
curl $GATEWAY_URL/headers -s -o /dev/null -w "%{http_code}\n"
echo "2. With JWT"
TOKEN=$(curl https://raw.githubusercontent.com/istio/istio/release-1.0/security/tools/jwt/samples/demo.jwt -s)
curl --header "Authorization: Bearer $TOKEN" $GATEWAY_URL/headers -s -o /dev/null -w "%{http_code}\n"
|
<reponame>shrishankit/prisma
package com.prisma.shared.models
import com.prisma.deploy.specutils.DeploySpecBase
import com.prisma.shared.schema_dsl.SchemaDsl
import org.scalatest.{FlatSpec, Matchers}
class ModelsSpec extends FlatSpec with Matchers with DeploySpecBase {
"a related field" should "be found when the related fields have the same name" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Model1 {
| id: ID! @id
| field: Model2 @relation(link: INLINE)
|}
|
|type Model2 {
| id: ID! @id
| field: Model1
|}
""".stripMargin
}
project.schema.allRelationFields.foreach { rf =>
rf.relatedField // let's see whether this blows up
}
}
}
|
<reponame>cereallarceny/gradient-path
export const DEFAULT_PRECISION = 2;
|
#!/bin/bash
set -e
NGINX_CONF_FILE=/etc/nginx/conf.d/reverse-proxy.conf
LETSENCRYPT_CRON_FILE=/etc/cron.d/letsencrypt-cron
if [ ! -z "$HTTPS_CERT_PUBLIC_PATH" ] && [ ! -z "$HTTPS_CERT_PRIVATE_PATH" ]; then
# Existing certificate. Set it into config file.
echo "Valid certificate detected. Configuring nginx..."
sed -i 's|${SSL_CERTIFICATE}|'"$HTTPS_CERT_PUBLIC_PATH"'|g' "$NGINX_CONF_FILE"
sed -i 's|${SSL_CERTIFICATE_KEY}|'"$HTTPS_CERT_PRIVATE_PATH"'|g' "$NGINX_CONF_FILE"
elif [ ! -z "$HTTPS_LETSENCRYPT_EMAIL" ]; then
# Install and Generate/Renew LetsEncrypt certificate.
echo "Installing LetsEncrypt..."
echo 'deb http://ftp.debian.org/debian jessie-backports main' | tee /etc/apt/sources.list.d/backports.list
apt-get update -y
apt-get install -y --no-install-recommends certbot
echo "LetsEncrypt intallation is finished."
if [ ! -d "/etc/letsencrypt/live/"$WEB_DOMAIN"" ]; then
echo "Start LetsEncrypt certificate generation for '"$WEB_DOMAIN"' (email: '"$HTTPS_LETSENCRYPT_EMAIL"')..."
if [ "$SUPPORTS_WWW_SUBDOMAIN" = True ]; then
letsencrypt certonly \
--webroot -w "/var/www/"$WEB_DOMAIN"" \
-d "$WEB_DOMAIN" -d "www.""$WEB_DOMAIN" \
--email "$HTTPS_LETSENCRYPT_EMAIL" \
--agree-tos \
--non-interactive \
--keep-until-expiring
else
letsencrypt certonly \
--webroot -w "/var/www/"$WEB_DOMAIN"" \
-d "$WEB_DOMAIN" \
--email "$HTTPS_LETSENCRYPT_EMAIL" \
--agree-tos \
--non-interactive \
--keep-until-expiring
fi
echo "LetsEncrypt certificate is generated."
else
echo "Trying to renew LetsEncrypt certificate..."
letsencrypt renew
fi
sed -i 's|${SSL_CERTIFICATE}|/etc/letsencrypt/live/'"$WEB_DOMAIN"'/fullchain.pem|g' "$NGINX_CONF_FILE"
sed -i 's|${SSL_CERTIFICATE_KEY}|/etc/letsencrypt/live/'"$WEB_DOMAIN"'/privkey.pem|g' "$NGINX_CONF_FILE"
# Set letsencrypt renew crontab
echo "Setting up cron job for LetsEncrypt renew procedure..."
apt-get install -y --no-install-recommends cron
rm -f /etc/cron.d/certbot # remove default certbot cron-job
printf "0 1 * * * letsencrypt renew --post-hook \"nginx -s reload\"\n" > "$LETSENCRYPT_CRON_FILE"
chmod 0644 "$LETSENCRYPT_CRON_FILE"
crontab "$LETSENCRYPT_CRON_FILE"
echo "Cron job for LetsEncrypt renew procedure is setted up."
else
echo "Invalid HTTPS settings"
exit 1
fi
# Generate dhparams if missing
if [ ! -f "/etc/nginx/dhparams" ]; then
echo "Generating dhparams with length '"$HTTPS_DHPARAM_LEN"'..."
openssl dhparam -out /etc/nginx/dhparam "$HTTPS_DHPARAM_LEN"
echo "dhparams with length '"$HTTPS_DHPARAM_LEN"' is generated."
fi
# Patch nginx conf file for HTTPS
sed -i 's|80; # ||g' "$NGINX_CONF_FILE"
sed -i 's|# ||g' "$NGINX_CONF_FILE"
# Update nginx configuration
nginx -s reload
|
#!/usr/bin/env bash
# Theme inspired on:
# - Ronacher's dotfiles (mitsuhikos) - http://github.com/mitsuhiko/dotfiles/tree/master/bash/
# - Glenbot - http://theglenbot.com/custom-bash-shell-for-development/
# - My extravagant zsh - http://stevelosh.com/blog/2010/02/my-extravagant-zsh-prompt/
# - Monokai colors - http://monokai.nl/blog/2006/07/15/textmate-color-theme/
# - Bash_it modern theme
#
# by Rana Amrit Parth<ramrit9@gmaiil.com>
# For the real Monokai colors you should add these to your .XDefaults or
# terminal configuration:
#! ----------------------------------------------------------- TERMINAL COLORS
#! monokai - http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
#*background: #272822
#*foreground: #E2DA6E
#*color0: black
#! mild red
#*color1: #CD0000
#! light green
#*color2: #A5E02D
#! orange (yellow)
#*color3: #FB951F
#! "dark" blue
#*color4: #076BCC
#! hot pink
#*color5: #F6266C
#! cyan
#*color6: #64D9ED
#! gray
#*color7: #E5E5E5
# ----------------------------------------------------------------- DEF COLOR
RCol='\e[0m' # Text Reset
# Regular
Bla='\e[0;30m';
Red='\e[0;31m';
Gre='\e[0;32m';
Yel='\e[0;33m';
Blu='\e[0;34m';
Pur='\e[0;35m';
Cya='\e[0;36m';
Whi='\e[0;37m';
# Bold
BBla='\e[1;30m';
BRed='\e[1;31m';
BYel='\e[1;33m';
BGre='\e[1;32m';
BBlu='\e[1;34m';
BPur='\e[1;35m';
BCya='\e[1;36m';
BWhi='\e[1;37m';
# High Intensity
IBla='\e[0;90m';
IRed='\e[0;91m';
IGre='\e[0;92m';
IYel='\e[0;93m';
IBlu='\e[0;94m';
IPur='\e[0;95m';
ICya='\e[0;96m';
IWhi='\e[0;97m';
# ----------------------------------------------------------------- COLOR CONF
D_DEFAULT_COLOR="${Whi}"
D_INTERMEDIATE_COLOR="${BWhi}"
D_USER_COLOR="${Yel}"
D_SUPERUSER_COLOR="${Red}"
D_MACHINE_COLOR="${IYel}"
D_DIR_COLOR="${Gre}"
D_GIT_COLOR="${BBlu}"
D_SCM_COLOR="${BYel}"
D_BRANCH_COLOR="${BYel}"
D_CHANGES_COLOR="${Whi}"
D_CMDFAIL_COLOR="${Red}"
D_VIMSHELL_COLOR="${Cya}"
# ------------------------------------------------------------------ FUNCTIONS
case $TERM in
xterm*)
TITLEBAR="\033]0;\w\007"
;;
*)
TITLEBAR=""
;;
esac
is_vim_shell() {
if [ ! -z "$VIMRUNTIME" ];
then
echo "${D_INTERMEDIATE_COLOR}on ${D_VIMSHELL_COLOR}\
vim shell${D_DEFAULT_COLOR} "
fi
}
mitsuhikos_lastcommandfailed() {
code=$?
if [ $code != 0 ];
then
echo "${D_INTERMEDIATE_COLOR}exited ${D_CMDFAIL_COLOR}\
$code ${D_DEFAULT_COLOR}"
fi
}
# vcprompt for scm instead of oh-my-bash default
demula_vcprompt() {
if [ ! -z "$VCPROMPT_EXECUTABLE" ];
then
local D_VCPROMPT_FORMAT="on ${D_SCM_COLOR}%s${D_INTERMEDIATE_COLOR}:\
${D_BRANCH_COLOR}%b %r ${D_CHANGES_COLOR}%m%u ${D_DEFAULT_COLOR}"
$VCPROMPT_EXECUTABLE -f "$D_VCPROMPT_FORMAT"
fi
}
# checks if the plugin is installed before calling battery_charge
safe_battery_charge() {
if [ -e "${OSH}/plugins/battery/battery.plugin.sh" ];
then
battery_charge
fi
}
prompt_git() {
local s='';
local branchName='';
# Check if the current directory is in a Git repository.
if [ $(git rev-parse --is-inside-work-tree &>/dev/null; echo "${?}") == '0' ]; then
# check if the current directory is in .git before running git checks
if [ "$(git rev-parse --is-inside-git-dir 2> /dev/null)" == 'false' ]; then
# Ensure the index is up to date.
git update-index --really-refresh -q &>/dev/null;
# Check for uncommitted changes in the index.
if ! $(git diff --quiet --ignore-submodules --cached); then
s+='+';
fi;
# Check for unstaged changes.
if ! $(git diff-files --quiet --ignore-submodules --); then
s+='!';
fi;
# Check for untracked files.
if [ -n "$(git ls-files --others --exclude-standard)" ]; then
s+='?';
fi;
# Check for stashed files.
if $(git rev-parse --verify refs/stash &>/dev/null); then
s+='$';
fi;
fi;
# Get the short symbolic ref.
# If HEAD isn’t a symbolic ref, get the short SHA for the latest commit
# Otherwise, just give up.
branchName="$(git symbolic-ref --quiet --short HEAD 2> /dev/null || \
git rev-parse --short HEAD 2> /dev/null || \
echo '(unknown)')";
[ -n "${s}" ] && s=" [${s}]";
echo -e "${1}${branchName}${Cya}${s}";
else
return;
fi;
}
# -------------------------------------------------------------- PROMPT OUTPUT
prompt() {
local LAST_COMMAND_FAILED=$(mitsuhikos_lastcommandfailed)
local SAVE_CURSOR='\033[s'
local RESTORE_CURSOR='\033[u'
local MOVE_CURSOR_RIGHTMOST='\033[500C'
local MOVE_CURSOR_5_LEFT='\033[5D'
if [ $(uname) = "Linux" ];
then
PS1="${TITLEBAR}
${SAVE_CURSOR}${MOVE_CURSOR_RIGHTMOST}${MOVE_CURSOR_5_LEFT}\
$(safe_battery_charge)${RESTORE_CURSOR}\
${D_USER_COLOR}\u ${D_INTERMEDIATE_COLOR}\
at ${D_MACHINE_COLOR}\h ${D_INTERMEDIATE_COLOR}\
in ${D_DIR_COLOR}\w ${D_INTERMEDIATE_COLOR}\
$(prompt_git "$D_INTERMEDIATE_COLOR on $D_GIT_COLOR")\
${LAST_COMMAND_FAILED}\
$(demula_vcprompt)\
$(is_vim_shell)
${D_INTERMEDIATE_COLOR}$ ${D_DEFAULT_COLOR}"
else
PS1="${TITLEBAR}
${D_USER_COLOR}\u ${D_INTERMEDIATE_COLOR}\
at ${D_MACHINE_COLOR}\h ${D_INTERMEDIATE_COLOR}\
in ${D_DIR_COLOR}\w ${D_INTERMEDIATE_COLOR}\
$(prompt_git "$D_INTERMEDIATE_COLOR on $D_GIT_COLOR")\
${LAST_COMMAND_FAILED}\
$(demula_vcprompt)\
$(is_vim_shell)\
$(safe_battery_charge)
${D_INTERMEDIATE_COLOR}$ ${D_DEFAULT_COLOR}"
fi
PS2="${D_INTERMEDIATE_COLOR}$ ${D_DEFAULT_COLOR}"
}
# Runs prompt (this bypasses oh-my-bash $PROMPT setting)
_omb_util_add_prompt_command prompt
|
// This demo shows a simple real-world use case scenario with draggable elements being moved into a container.
const Demo3DraggableElement = {
template: `
<div v-draggable.move="content" class="draggableContainer">
{{ content }}
</div>
`,
props: ['content']
};
const Demo3DropZone = {
components: {
Demo3DraggableElement
},
template: `
<div v-droppable @v-drag-drop="onDrop" class="droppableContainer">
<span v-if="droppedItems.length === 0">Drop items here</span>
<!-- Render elements that have already been dropped -->
<demo3-draggable-element
v-else
v-for="item in droppedItems" :key="item"
:content="item"
>
</demo3-draggable-element>
</div>
`,
props: ['droppedItems'],
methods: {
onDrop(item) {
if (this.droppedItems.includes(item)) {
// Enable re-sorting the items using drag&drop
this.droppedItems.splice(this.droppedItems.indexOf(item), 1);
}
this.droppedItems.push(item);
}
}
};
window.demos.Demo3 = {
components: {
Demo3DraggableElement,
Demo3DropZone
},
template: `
<div>
<h4>Demo 3: Real-world usage</h4>
<!-- Elements that have not been dropped yet -->
<demo3-draggable-element
v-for="item in draggableItems" :key="item"
v-if="isItemDraggable(item)"
:content="item"
>
</demo3-draggable-element>
<demo3-drop-zone :dropped-items="droppedItems"></demo3-drop-zone>
</div>
`,
data() {
return {
draggableItems: [1, 2, 3],
droppedItems: []
};
},
methods: {
isItemDraggable(item) {
return !this.droppedItems.includes(item);
}
}
}; |
package blatt2;
import java.util.InputMismatchException;
import java.util.Random;
import java.util.Scanner;
/**
* Created by data on 01.11.17.
*/
public class LustigeSieben {
private static Scanner s = new Scanner(System.in);
public static void main(String[] args) {
int cash = 100;
while (true) {
System.out.println("enter the fields you want to pick, then the stake you want so set on it");
int field = inputInRange(2, 13);
int stake = inputInRange(1, cash + 1);
cash -= stake;
int roll = roll();
System.out.printf("you put %d dollars on %d and rolled a %d\n", stake, field, roll);
cash += eval(roll, field, stake);
if (cash <= 0) {
System.out.println("you're out of cash, goodbye.");
}
System.out.printf("you currently have %d dollars, do you want to continue playing?\n"
+ "enter 0 - if not, any other digit otherwise\n", cash);
if (inputInRange(0, Integer.MAX_VALUE) == 0) {
System.out.println("don't spend it all in one place ;)");
break;
}
}
}
private static int roll() {
Random r = new Random();
return r.nextInt(6) + r.nextInt(6) + 2;
}
private static int inputInRange(int low, int high) {
int res = 0;
try {
res = s.nextInt();
} catch (InputMismatchException e) {
s.next();
System.out.println("the input value is not an integer, please try again");
return inputInRange(low, high);
}
if (res < low || res >= high) {
System.out
.printf("the input value is not in the range %d -> %d enter a new value\n", low, high);
return inputInRange(low, high);
}
return res;
}
private static int eval(int res, int field, int stake) {
if (res == 7 && field == 7) {
return stake * 3;
}
if (res == field) {
return stake * 2;
}
if (res == 7) {
return stake;
}
if (sameRange(2, 6, res, field) || sameRange(8, 12, res, field)) {
return stake;
}
return 0;
}
private static boolean sameRange(int low, int high, int val1, int val2) {
return (low <= val1 && val1 <= high) && (low <= val2 && val2 <= high);
}
}
|
'use strict';
(function() {
var listeners = {};
var _iccId = Date.now();
var _iccIds = [
_iccId
];
var _iccInfo = {
addEventListener: function() {},
getCardLockRetryCount: function() {
return 4;
},
unlockCardLock: function(options) {
return {
set onsuccess(callback) {
this.result = true;
callback.call(this);
}
};
}
};
function _setProperty(property, newState) {
_iccInfo[property] = newState;
}
function _addEventListener(evtName, func) {
listeners[evtName] = listeners[evtName] || [];
listeners[evtName].push(func);
}
function _removeEventListener(evtName, func) {
if (listeners[evtName]) {
var listenerArray = listeners[evtName];
var index = listenerArray.indexOf(func);
if (index !== -1) {
listenerArray.splice(index, 1);
}
}
}
function _getIccById(iccId) {
if (iccId === _iccId) {
return _iccInfo;
}
return null;
}
window.MockNavigatorMozIccManager = {
iccIds: _iccIds,
getIccById: _getIccById,
setProperty: _setProperty,
addEventListener: _addEventListener,
removeEventListener: _removeEventListener
};
})();
|
oc logs $(oc get pod -l=camel.apache.org/integration=stock-service -o jsonpath='{.items[0].metadata.name}') -c integration -f |
<html>
<head>
<title>Add Student Form</title>
</head>
<body>
<h2>Add Student Form</h2>
<form action="addStudent.php" method="post">
Name: <input type="text" name="name"><br>
Age: <input type="text" name="age"><br>
<input type="submit" value="Add Student">
</form>
</body>
</html>
<php
if(isset($_POST['name']) && isset($_POST['age'])){
$name = $_POST['name'];
$age = $_POST['age'];
// save the record to the database
$sql = "INSERT INTO Students (name, age) VALUES (?, ?)";
$stmt = $db->prepare($sql);
$stmt->execute([$name, $age]);
echo "Student added successfully!";
}
?> |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package TabularFunctions;
import java.util.ArrayList;
/**
*
* @author Will_and_Sara
*/
public interface ISampleWithUncertainty extends IWriteToXML {
public double GetYFromX(double x, double probability);
public ArrayList<Double> GetYValues(double probability);
public ArrayList<Distributions.ContinuousDistribution> GetYDistributions();
public ISampleDeterministically CurveSample(double probability);
}
|
#!/usr/bin/env bash
sudo chown -R $(whoami) /usr/local/bin
brew install wget
brew install git
brew install coreutils
brew install vim
brew install tmux
brew install fzf
brew install pygments
brew install ack
brew install watchman
brew install postgresql
brew cask install iterm2
brew cask install vlc
brew cask install brave-browser
brew cask install firefox
brew cask install spotify
brew cask install the-unarchiver
brew cask install slack
brew cask install postman
brew cask install karabiner-elements
brew cask install qbittorrent
brew cask install android-studio
brew cask install visual-studio-code
brew cask install sublime-text
brew cask install folx
brew cask install zoomus
brew cask install whatsapp
brew cask install android-file-transfer
brew cask install discord
|
SELECT first_name, last_name, age
FROM users
WHERE age >= 25
ORDER BY age ASC; |
package entities.ai;
import render.Model;
import entities.Entity;
public class EntityAIMoveWings extends EntityAI {
protected boolean wing;
public EntityAIMoveWings(Entity target) {
super(target);
}
@Override
public void update(){
Model wing1 = (Model) getMethod("wing1");
Model wing2 = (Model) getMethod("wing2");
double mx = Math.hypot(entity.motionX > 0 ? entity.motionX : -entity.motionX, entity.motionY > 0 ? entity.motionY : -entity.motionY) / 2;
if(entity.cooldown > 0)
mx /= 2;
if(wing){
wing1.rotation += mx;
wing2.rotation -= mx;
}
else {
wing1.rotation -= mx;
wing2.rotation += mx;
}
int dis = 10;
if(wing1.rotation > dis || wing2.rotation > dis){
if(wing){
wing1.rotation = dis;
wing2.rotation = -dis;
}
else {
wing1.rotation = -dis;
wing2.rotation = dis;
}
wing = !wing;
}
}
}
|
package io.github.rcarlosdasilva.weixin.model.response.media;
import com.google.gson.annotations.SerializedName;
public class MediaAddMassResponse {
@SerializedName("media_id")
private String mediaId;
private String type;
@SerializedName("created_at")
private long createAt;
private String url;
/**
* 媒体文件/图文消息上传后获取的唯一标识.
*
* @return media id
*/
public String getMediaId() {
return mediaId;
}
/**
* 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb),图文消息(news).
*
* @return type
*/
public String getType() {
return type;
}
/**
* 媒体文件上传时间.
*
* @return time
*/
public long getCreateAt() {
return createAt;
}
/**
* 上传图片的URL,可用于后续群发中,放置到图文消息中.
*
* @return url
*/
public String getUrl() {
return url;
}
}
|
package rs.aidev.jengine;
import java.awt.Color;
import java.awt.Graphics;
public class Score extends Sprite{
private int score=0;
public void add() {
score++;
}
public void add(int num) {
score+=num;
}
public Score(Scene scene, double x, double y, int width, int height) {
super(scene, x, y, width, height);
}
@Override
public void paint(Graphics g) {
g.setColor(Color.BLACK);
g.drawString(Integer.toString(score),(int) x, (int)y);
}
@Override
public void update() {
}
@Override
public String toString() {
return "Score";
}
}
|
import os
import re
import SCons.Builder
import SCons.Scanner
import SCons.Script
###################
# the DSP builder #
###################
INCLUDE_RE = re.compile(r'import\s*\(\s*"([^"]+)"\s*\)\s*;', re.M)
def dsp_source_scanner(node, env, path):
"""Scan source files for imported files in `path'."""
contents = node.get_contents()
includes = [env.File(i) for i in INCLUDE_RE.findall(contents)]
path = [node.Dir('.').path] + list(path)
deps = [env.FindFile(str(f), path) for f in includes]
return deps
def dsp_target_scanner(node, env, path):
"""Search for architecture file in `path'."""
arch = env.subst('${FAUST_GET_ARCH}')
return [env.FindFile(arch, path)]
dsp_src_scanner = SCons.Scanner.Scanner(
function = dsp_source_scanner,
recursive = True,
path_function = SCons.Scanner.FindPathDirs('FAUST_PATH')
)
dsp_tgt_scanner = SCons.Scanner.Scanner(
function = dsp_target_scanner,
path_function = SCons.Scanner.FindPathDirs('FAUST_PATH')
)
faust_lang_suffix_map = {
"cpp": ".cpp",
"c": ".c",
"java": ".jar",
"js": ".js",
"llvm": ".ll",
"fir": ".fir", # TODO: not sure about this
}
faust_action = '$FAUST_FAUST \
${FAUST_FLAGS} \
${FAUST_VERSION >= "2" and "-lang $FAUST_LANG" or ""} \
-a ${FAUST_GET_ARCH} \
-o $TARGET $SOURCE'
dsp = SCons.Builder.Builder(
action = faust_action,
suffix = lambda env,srcs: faust_lang_suffix_map[env['FAUST_LANG']],
src_suffix = '.dsp',
source_scanner = dsp_src_scanner,
target_scanner = dsp_tgt_scanner
)
###################
# the XML builder #
###################
xml = SCons.Builder.Builder(
action = ['$FAUST_FAUST ${FAUST_FLAGS} -o /dev/null -xml $SOURCE',
SCons.Script.Move('$TARGET', '${SOURCE}.xml')],
suffix = '.dsp.xml',
src_suffix = '.dsp',
single_source = True,
source_scanner = dsp_src_scanner
)
###################
# the SVG builder #
###################
svg = SCons.Builder.Builder(
action = ['$FAUST_FAUST ${FAUST_FLAGS} -o /dev/null -svg $SOURCE',
SCons.Script.Move('$TARGET', '${SOURCE.base}-svg')],
suffix = lambda env,srcs: "-svg",
src_suffix = '.dsp',
single_source = True,
source_scanner = dsp_src_scanner,
target_factory = SCons.Script.Dir
)
##############################
# the supercollider builders #
##############################
sc = SCons.Builder.Builder(
action = '$FAUST2SC_FAUST2SC --lang=sclang --prefix="${FAUST2SC_PREFIX}" -o $TARGET $SOURCES',
suffix = '.sc',
src_suffix = '.dsp.xml',
source_scanner = dsp_src_scanner,
multi = True
)
hs = SCons.Builder.Builder(
action = '$FAUST2SC_FAUST2SC --lang=haskell --prefix="${FAUST2SC_HASKELL_MODULE}" -o $TARGET $SOURCES',
suffix = '.hs',
src_suffix = '.dsp.xml',
source_scanner = dsp_src_scanner,
multi = True
)
|
<reponame>tsamb/rts-tower-defence
var View = (function() {
return {
init: function() {
this.renderBuildingButtons(BuildingsList);
},
///// Event listeners /////
setBuildListeners: function(buildingsList, game) {
this.setBuildKeyListeners(buildingsList, game);
for (var i = 0; i < buildingsList.length; i++) {
$(".building-container").on("click", "#new-building-" + i, i, game.chooseBuilding.bind(game));
}
},
setBuildKeyListeners: function(buildingsList, game) {
$(document).keyup(game.chooseBuildingFromKey.bind(game));
},
setCanvasClickListeners: function(board) {
$("canvas").on("click", board.handleClicks.bind(board));
},
enablePauseButton: function(pauseFunction) {
$("#pause").on("click", pauseFunction);
},
///// DOM manipulation: updates /////
updateTimer: function(seconds) {
$("#timer").text(seconds);
},
displayResources: function(resources) {
$("#matter-display").text("Matter: " + resources.matter);
$("#energy-display").text("Energy: " + resources.energy);
$("#net-matter-flow").text("MATTER | income: " + resources.matterIncome + " | expenses: " + resources.matterExpenses);
$("#net-energy-flow").text("ENERGY | income: " + resources.energyIncome + " | expenses: " + resources.energyExpenses);
},
displayStatusMessage: function(message) {
var para = "<p>" + message + "</p>"
$(para).prependTo("#status-messages").hide().slideDown();
},
updateScore: function(enemies, buildings) {
$("#enemies-destroyed").text(enemies);
$("#buildings-destroyed").text(buildings);
},
displayInfo: function(building) {
var buildingToDisplay;
if (building) {
this.currentDisplayInfoBuilding = building;
buildingToDisplay = this.buildingInfoTemplate(building);
} else {
this.currentDisplayInfoBuilding = undefined;
buildingToDisplay = "";
}
$("#info-panel-data").html(buildingToDisplay);
},
updateDisplayInfo: function() {
if (this.currentDisplayInfoBuilding) {
$("#info-panel-data").html(this.buildingInfoTemplate(this.currentDisplayInfoBuilding));
}
},
///// DOM manipulation: append/show/hide /////
initialStartPrompt: function(pauseFunction) {
$("#start-prompt").show();
$("#start-prompt button").on('click', function() {
$("#start-prompt").hide();
pauseFunction();
})
},
appendCanvas: function(width, height) {
return $("<canvas width='" + width + "' height='" + height + "'></canvas>").appendTo("#canvas-container")[0];
},
renderBuildingButtons: function(buildingsList) {
for (var i = 0; i < buildingsList.length; i++) {
if (buildingsList[i].buildable) {
$("#build-menu").append(this.buildingsTemplate(buildingsList[i], i));
}
}
},
displayGameOver: function(enemyStats, buildingStats, time) {
var gameOverMessage = "<p>You destroyed " + enemyStats.numDestroyed + " enemies and dealt " + enemyStats.totalDamageDealt + " total damage.</p>" +
"<p>Enemies destroyed " + buildingStats.numDestroyed + " of your buildings and dealt " + buildingStats.totalDamageDealt + " total damage.</p>" +
"<p>You survived for " + time + " seconds.</p>";
$("#game-over-message").append(gameOverMessage);
$("#game-over-message").show();
},
///// DOM manipulation: highlighting/user feedback /////
highlightBuildingByElement: function(element) {
this.deselectBuilding();
$(element).addClass("selected-building");
},
highlightBuildingById: function(id) {
this.deselectBuilding();
$("#building-container-" + id).addClass("selected-building");
},
deselectBuilding: function() {
$(".building-container").removeClass("selected-building");
},
///// HTML templates /////
buildingsTemplate: function(building, buildingIndex) {
var attrWhitelist = ["name", "matterCost", "energyCost", "size"];
var htmlString = "<div class='building-container' id='building-container-" + buildingIndex + "'><table>";
for (var attr in building) {
if (attrWhitelist.indexOf(attr) >= 0) {
htmlString += "<tr>";
htmlString += "<td>" + attr + ": </td>";
if (attr === "size") {
htmlString += "<td>" + building[attr].x + " x " + building[attr].y + "</td>";
} else {
htmlString += "<td>" + building[attr] + "</td>";
}
htmlString += "</tr>";
}
}
htmlString += "</table><button id='new-building-" + buildingIndex + "'>Build " + building.name + "</button></div>";
return htmlString;
},
buildingInfoTemplate: function(building) {
return ["<table>",
"<tr>",
"<td>Name:</td>",
"<td>", building.name, "</td>",
"</tr>",
"<tr>",
"<td>Hit points:</td>",
"<td>", building.hp, " / ", building.maxHp, "</td>",
"</tr>",
"<tr>",
"<tr>",
"<td>Experience:</td>",
"<td>", building.xp, " / ", building.xpForNextLevel(), "</td>",
"</tr>",
"<tr>",
"<td>Level:</td>",
"<td>", building.level, "</td>",
"</tr>",
"<tr>",
"<td>Matter production:</td>",
"<td>", building.matterProduction, "</td>",
"</tr>",
"<tr>",
"<td>Energy production:</td>",
"<td>", building.energyProduction, "</td>",
"</tr>",
"</table>"].join("");
}
};
})();
|
// Index renders a colon separated list enclosed by square brackets
// Use for array / slice indexes and definitions
package main
import (
"fmt"
. "github.com/dave/jennifer/jen"
)
func main() {
c := Var().Id("a").Index().String()
fmt.Printf("%#v\n", c)
c = Id("a").Op(":=").Id("b").Index(Lit(0), Lit(1))
fmt.Printf("%#v\n", c)
c = Id("a").Op(":=").Id("b").Index(Lit(1), Empty())
fmt.Printf("%#v\n", c)
} |
function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); }
function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
window.modules.Colors = (({
Utils: {
getNumbers
}
}) => {
const {
abs
} = Math;
/*
* For each color in colorList, check if there's an equivalent color
* with a different name.
* Eliminate the non-preferred colors that have equivalents.
* A color can be thought "preferred", if it has `alternativeName` field in it.
* e.g. Eliminates fuchsia and aqua in favor of magenta and cyan, respectively.
*/
const removeAlternativeColors = colorList => {
return colorList.map((color, index) => {
const equivalent = colorList.find(c => c !== color && c.hex === color.hex);
if (!equivalent || color.alternativeName === equivalent.name) {
return color;
}
return null;
}).filter(c => !!c);
};
const parseColorStrings = color => _objectSpread(_objectSpread({}, color), {}, {
rgb: color.rgb.match(/rgb\((\d+),(\d+),(\d+)\)/).slice(1).map(Number),
hsl: color.hsl.match(/hsl\((.*),(.*)%,(.*)%\)/).slice(1).map(Number)
});
const isMonochrome = color => color.hsl[1] === 0;
const isNonMonochrome = color => !isMonochrome(color);
const filterColorsByHue = (colorList, hue, tolerance) => {
const colors = colorList.filter(color => abs(hue - color.hsl[0]) < tolerance);
if (colors.length) {
return {
list: colors,
tolerance
};
}
return filterColorsByHue(colorList, hue, tolerance + 1);
};
const groupColorsByLightness = (colorList, tolerance) => {
return getNumbers(100 / tolerance + 1).map(t => colorList.filter(color => {
const difference = 100 - color.hsl[2] - t * tolerance;
const differenceLimit = tolerance / 2;
if (abs(difference) === differenceLimit) {
return difference > 0;
}
return abs(difference) < differenceLimit;
})).filter(group => !!group.length);
};
const groupColors = ({
colorList,
hue,
tolerance,
mono
}) => {
const baseColors = colorList.filter(mono ? isMonochrome : isNonMonochrome);
const sortedColors = _toConsumableArray(baseColors).sort((a, b) => a.hsl[1] - b.hsl[1]);
const colorsFilteredByHue = filterColorsByHue(sortedColors, hue, tolerance.min);
const lightnessGroups = groupColorsByLightness(colorsFilteredByHue.list, tolerance.min);
return {
list: lightnessGroups,
tolerance: colorsFilteredByHue.tolerance
};
};
const formatRGB = rgb => `rgb(${rgb.join(', ')})`;
const formatHSL = hsl => `hsl(${hsl.map((_, i) => i === 0 ? _ : `${_}%`).join(', ')})`;
return {
removeAlternativeColors,
parseColorStrings,
isMonochrome,
isNonMonochrome,
filterColorsByHue,
groupColorsByLightness,
groupColors,
formatRGB,
formatHSL
};
})(window.modules); |
package com.blinkbox.books.config
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite, Matchers}
@RunWith(classOf[JUnitRunner])
class ConfigurationTests extends FunSuite with BeforeAndAfterEach with Matchers {
class Configured extends Configuration
val originalEnvironment = System.getenv()
override def afterEach(): Unit = {
setEnv(originalEnvironment)
System.clearProperty("testing.conf.test")
}
test("Loads configuration normally when no environment variable is set") {
val config = loadTestConfig
config.getString("application.conf.test") should be("application")
config.getString("reference.conf.test") should be("reference")
}
test("Loads config from classpath URL specified in CONFIG_URL") {
setConfigUrl(Some("classpath:///testing.conf"))
val config = loadTestConfig
config.getString("application.conf.test") should be("application")
config.getString("testing.conf.test") should be("testing")
config.getString("reference.conf.test") should be("reference")
}
test("Loads config from file URL specified in CONFIG_URL") {
setConfigUrl(Some(resourceFile("testing.conf")))
val config = loadTestConfig
config.getString("application.conf.test") should be("application")
config.getString("testing.conf.test") should be("testing")
config.getString("reference.conf.test") should be("reference")
}
test("Config files are merged with the external one taking precedence") {
setConfigUrl(Some("classpath:///testing.conf"))
val config = loadTestConfig
config.getString("key1") should be("testing")
config.getString("key2") should be("application")
config.getString("key3") should be("reference")
}
test("External config can be overridden by system properties") {
setConfigUrl(Some("classpath:///testing.conf"))
System.setProperty("testing.conf.test", "overridden!")
val config = loadTestConfig
config.getString("testing.conf.test") should be("overridden!")
}
test("GetConfigObjectOption") {
setConfigUrl(Some(resourceFile("testing.conf")))
val config = loadTestConfig
val map = config.getConfigObjectOption("map").get
assert( "value1" == map.get("key1").unwrapped())
assert( "value2" == map.get("key2").unwrapped())
}
// TODO: Could test HTTP loading using URLStreamHandlerFactory, but is it worth the effort?
private def setConfigUrl(url: Option[String]) = {
val newEnv = new java.util.HashMap[String, String](System.getenv())
url.foreach(newEnv.put("CONFIG_URL", _))
setEnv(newEnv)
}
// dirty dirty hack to allow setting environment variables
private def setEnv(newEnv: java.util.Map[String, String]): Unit = {
val classes = classOf[java.util.Collections].getDeclaredClasses
val cl = classes.filter(_.getName == "java.util.Collections$UnmodifiableMap").head
val field = cl.getDeclaredField("m")
field.setAccessible(true)
val map = field.get(System.getenv).asInstanceOf[java.util.Map[String, String]]
map.clear()
map.putAll(newEnv)
}
private def resourceFile(filename: String) = getClass.getClassLoader.getResource(filename).toString
private def loadTestConfig = {
ConfigFactory.invalidateCaches() // ensure we're loading afresh
new Configured().config
}
}
|
#pragma once
/*
** Copyright (C) 2013 Aldebaran Robotics
** See COPYING for the license
*/
#ifndef _QIPYTHON_PYSESSION_HPP_
#define _QIPYTHON_PYSESSION_HPP_
#include <boost/python.hpp>
#include <qimessaging/session.hpp>
#include <qipython/api.hpp>
namespace qi {
namespace py {
QIPYTHON_API boost::python::object makePySession(const SessionPtr& ses);
void export_pysession();
}
}
#endif // _QIPYTHON_PYSESSION_HPP_
|
#!/bin/bash
# Increment a version string using Semantic Versioning (SemVer) terminology.
# Parse command line options.
while getopts ":Mmp" Option
do
case $Option in
M ) major=true;;
m ) minor=true;;
p ) patch=true;;
esac
done
shift $(($OPTIND - 1))
version=$1
# Build array from version string.
a=( ${version//./ } )
# If version string is missing or has the wrong number of members, show usage message.
if [ ${#a[@]} -ne 3 ]
then
echo "usage: $(basename $0) [-Mmp] major.minor.patch"
exit 1
fi
# Increment version numbers as requested.
if [ ! -z $major ]
then
((a[0]++))
a[1]=0
a[2]=0
fi
if [ ! -z $minor ]
then
((a[1]++))
a[2]=0
fi
if [ ! -z $patch ]
then
((a[2]++))
fi
echo "${a[0]}.${a[1]}.${a[2]}"
|
import { Sequelize } from 'sequelize';
export const sequelize: Sequelize = new Sequelize({
database: process.env.DB_NAME,
username: process.env.DB_USER,
password: <PASSWORD>,
host: process.env.DB_HOST,
port: +process.env.DB_PORT!,
// @ts-ignore
dialect: 'postgres',
dialectOptions: {
ssl: {
require: true,
rejectUnauthorized: false,
},
},
pool: {
max: 5,
min: 0,
acquire: 30000,
idle: 10000,
},
});
export const databaseGenerate = () => {
sequelize
.authenticate()
.then(() => console.log('✅ Connection to batabase is successfully set on port ' + process.env.DB_PORT))
.catch(err => console.log(err));
// sync db
sequelize.sync({ force: false }).then(() => {
console.log('✅ Drop and re-sync db.');
});
};
|
#!/bin/bash
. sanitycheck_functions.sh
NODENAME=127.0.0.1
PORT=8080
URL="http://${NODENAME}:${PORT}/zenboot/rest/sanitycheck"
test_setup
# First call it to verify that this job get created
assert_http_code $URL 201 "POST" "Accept: text/xml" "sanitycheck:sanitycheck"
echo "# Make a second call "
RETURNVALUE=`curl -sL --write-out '%{http_code}' --request POST --max-time 5 --basic --user 'sanitycheck:sanitycheck' -H 'Accept: text/xml' http://127.0.0.1:8080/zenboot/rest/sanitycheck`
echo "RETURNVALUE:-----------------------------------------------------"
echo $RETURNVALUE
echo "END--------------------------------------------------------------"
CALLBACK=`echo $RETURNVALUE | sed "s/.*<referral>//" | sed "s/<\/referral>.*//"`
echo "# CALLBACK is $CALLBACK"
sleep 1
assert_http_response $CALLBACK "RUNNING" "-H 'Accept: text/xml' --user sanitycheck:sanitycheck"
sleep 7
assert_http_response $CALLBACK "SUCCESS" "-H 'Accept: text/xml' --user sanitycheck:sanitycheck"
test_teardown
|
<reponame>dutinmeow/library<filename>utility/fast-pow.hpp
#pragma region fast_pow
#ifndef FAST_POW_HPP
#define FAST_POW_HPP
template<typename T, typename U>
T fast_pow(T a, U b) {
T ret = 1;
for (; b; b /= 2) {
if (b & 1)
ret = ret * a;
a = a * a;
}
return ret;
}
template<typename T, typename U, typename S>
T fast_pow(T a, U b, const S &mod) {
T ret = 1;
for (a % mod; b; b /= 2) {
if (b & 1)
ret = ret * a % mod;
a = a * a % mod;
}
return ret;
}
#endif
#pragma endregion fast_pow |
#!/bin/sh
# Enable strict shell mode
set -euo pipefail
PATH=/sbin:/bin:/usr/sbin:/usr/bin
MOUNT="/bin/mount"
UMOUNT="/bin/umount"
INIT="/sbin/init"
ROOT_ROINIT="/sbin/init"
ROOT_MOUNT="/mnt"
ROOT_RODEVICE=""
ROOT_RWDEVICE=""
ROOT_ROMOUNT="/media/rfs/ro"
ROOT_RWMOUNT="/media/rfs/rw"
ROOT_RWRESET="no"
ROOT_ROFSTYPE=""
ROOT_ROMOUNTOPTIONS="bind"
ROOT_ROMOUNTOPTIONS_DEVICE="noatime,nodiratime"
ROOT_RWFSTYPE=""
ROOT_RWMOUNTOPTIONS="rw,noatime tmpfs"
ROOT_RWMOUNTOPTIONS_DEVICE="rw,noatime"
early_setup() {
mkdir -p /proc
mkdir -p /sys
$MOUNT -t proc proc /proc
$MOUNT -t sysfs sysfs /sys
grep -w "/dev" /proc/mounts >/dev/null || $MOUNT -t devtmpfs none /dev
}
read_args() {
[ -z "${CMDLINE+x}" ] && CMDLINE=`cat /proc/cmdline`
for arg in $CMDLINE; do
# Set optarg to option parameter, and '' if no parameter was
# given
optarg=`expr "x$arg" : 'x[^=]*=\(.*\)' || echo ''`
case $arg in
root=*)
ROOT_RODEVICE=$optarg ;;
rootfstype=*)
ROOT_ROFSTYPE="$optarg"
modprobe $optarg 2> /dev/null || \
log "Could not load $optarg module";;
rootinit=*)
ROOT_ROINIT=$optarg ;;
rootoptions=*)
ROOT_ROMOUNTOPTIONS_DEVICE="$optarg" ;;
rootrw=*)
ROOT_RWDEVICE=$optarg ;;
rootrwfstype=*)
ROOT_RWFSTYPE="$optarg"
modprobe $optarg 2> /dev/null || \
log "Could not load $optarg module";;
rootrwreset=*)
ROOT_RWRESET=$optarg ;;
rootrwoptions=*)
ROOT_RWMOUNTOPTIONS_DEVICE="$optarg" ;;
init=*)
INIT=$optarg ;;
esac
done
}
fatal() {
echo "rorootfs-overlay: $1" >$CONSOLE
echo >$CONSOLE
exec sh
}
log() {
echo "rorootfs-overlay: $1" >$CONSOLE
}
early_setup
[ -z "${CONSOLE+x}" ] && CONSOLE="/dev/console"
read_args
mount_and_boot() {
mkdir -p $ROOT_MOUNT $ROOT_ROMOUNT $ROOT_RWMOUNT
# Build mount options for read only root file system.
# If no read-only device was specified via kernel command line, use
# current root file system via bind mount.
ROOT_ROMOUNTPARAMS_BIND="-o ${ROOT_ROMOUNTOPTIONS} /"
if [ -n "${ROOT_RODEVICE}" ]; then
ROOT_ROMOUNTPARAMS="-o ${ROOT_ROMOUNTOPTIONS_DEVICE} $ROOT_RODEVICE"
if [ -n "${ROOT_ROFSTYPE}" ]; then
ROOT_ROMOUNTPARAMS="-t $ROOT_ROFSTYPE $ROOT_ROMOUNTPARAMS"
fi
else
ROOT_ROMOUNTPARAMS="$ROOT_ROMOUNTPARAMS_BIND"
fi
# Mount root file system to new mount-point, if unsuccessful, try bind
# mounting current root file system.
if ! $MOUNT $ROOT_ROMOUNTPARAMS "$ROOT_ROMOUNT" 2>/dev/null && \
[ "x$ROOT_ROMOUNTPARAMS_BIND" == "x$ROOT_ROMOUNTPARAMS" ] || \
log "Could not mount $ROOT_RODEVICE, bind mounting..." && \
! $MOUNT $ROOT_ROMOUNTPARAMS_BIND "$ROOT_ROMOUNT"; then
fatal "Could not mount read-only rootfs"
fi
# Remounting root file system as read only.
if ! $MOUNT -o remount,ro "$ROOT_ROMOUNT"; then
fatal "Could not remount read-only rootfs as read only"
fi
# If future init is the same as current file, use $ROOT_ROINIT
# Tries to avoid loop to infinity if init is set to current file via
# kernel command line
if cmp -s "$0" "$INIT"; then
INIT="$ROOT_ROINIT"
fi
# Build mount options for read write root file system.
# If a read-write device was specified via kernel command line, use
# it, otherwise default to tmpfs.
if [ -n "${ROOT_RWDEVICE}" ]; then
ROOT_RWMOUNTPARAMS="-o $ROOT_RWMOUNTOPTIONS_DEVICE $ROOT_RWDEVICE"
if [ -n "${ROOT_RWFSTYPE}" ]; then
ROOT_RWMOUNTPARAMS="-t $ROOT_RWFSTYPE $ROOT_RWMOUNTPARAMS"
fi
else
ROOT_RWMOUNTPARAMS="-t tmpfs -o $ROOT_RWMOUNTOPTIONS"
fi
# Mount read-write file system into initram root file system
if ! $MOUNT $ROOT_RWMOUNTPARAMS $ROOT_RWMOUNT ; then
fatal "Could not mount read-write rootfs"
fi
# Reset read-write file system if specified
if [ "yes" == "$ROOT_RWRESET" -a -n "${ROOT_RWMOUNT}" ]; then
rm -rf $ROOT_RWMOUNT/*
fi
# Determine which unification file system to use
union_fs_type=""
if grep -w "overlay" /proc/filesystems >/dev/null; then
union_fs_type="overlay"
elif grep -w "aufs" /proc/filesystems >/dev/null; then
union_fs_type="aufs"
else
union_fs_type=""
fi
# Create/Mount overlay root file system
case $union_fs_type in
"overlay")
mkdir -p $ROOT_RWMOUNT/upperdir $ROOT_RWMOUNT/work
$MOUNT -t overlay overlay \
-o "$(printf "%s%s%s" \
"lowerdir=$ROOT_ROMOUNT," \
"upperdir=$ROOT_RWMOUNT/upperdir," \
"workdir=$ROOT_RWMOUNT/work")" \
$ROOT_MOUNT
;;
"aufs")
$MOUNT -t aufs i\
-o "dirs=$ROOT_RWMOUNT=rw:$ROOT_ROMOUNT=ro" \
aufs $ROOT_MOUNT
;;
"")
fatal "No overlay filesystem type available"
;;
esac
# Move read-only and read-write root file system into the overlay
# file system
mkdir -p $ROOT_MOUNT/$ROOT_ROMOUNT $ROOT_MOUNT/$ROOT_RWMOUNT
$MOUNT -n --move $ROOT_ROMOUNT ${ROOT_MOUNT}/$ROOT_ROMOUNT
$MOUNT -n --move $ROOT_RWMOUNT ${ROOT_MOUNT}/$ROOT_RWMOUNT
$MOUNT -n --move /proc ${ROOT_MOUNT}/proc
$MOUNT -n --move /sys ${ROOT_MOUNT}/sys
$MOUNT -n --move /dev ${ROOT_MOUNT}/dev
cd $ROOT_MOUNT
# switch to actual init in the overlay root file system
exec chroot $ROOT_MOUNT $INIT ||
fatal "Couldn't chroot, dropping to shell"
}
mount_and_boot
|
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.commons.util;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.Type;
import org.apache.uima.resource.ResourceInitializationException;
import java.util.Arrays;
/**
* @author <NAME>
*/
public class AnnotatorUtils {
private AnnotatorUtils() {
}
public static void mandatoryParam(String paramName, Object value)
throws ResourceInitializationException {
if (value == null) {
throw new ResourceInitializationException(
new IllegalStateException(String.format(
"Missing mandatory parameter '%s' value", paramName)));
}
}
public static void requireParam(boolean expr, String paramName, Object value)
throws ResourceInitializationException {
if (!expr) {
throw new ResourceInitializationException(
new IllegalStateException(String.format(
"Illegal value of parameter '%s': %s",
paramName, value)));
}
}
public static void requireParams(boolean expr, String[] paramNames, Object[] paramValues)
throws ResourceInitializationException {
if (!expr) {
throw new ResourceInitializationException(
new IllegalStateException(String.format(
"Illegal value of parameters '%s': %s",
Arrays.toString(paramNames), Arrays.toString(paramValues))));
}
}
public static void mandatoryResourceObject(String resKey, Object resource)
throws ResourceInitializationException {
if (resource == null) {
throw new ResourceInitializationException(
new IllegalStateException(String.format(
"Missing mandatory resource under '%s' key", resKey)));
}
}
public static void annotationTypeExist(String typeName, Type type)
throws AnalysisEngineProcessException {
if (type == null) {
throw new AnalysisEngineProcessException(
new IllegalStateException(String.format(
"Unknown type - '%s'", typeName)));
}
}
public static Feature featureExist(Type type, String featureName)
throws AnalysisEngineProcessException {
Feature result = type.getFeatureByBaseName(featureName);
if (result == null) {
throw new AnalysisEngineProcessException(
new IllegalStateException(String.format(
"Type %s doesn't have feature '%s'", type, featureName)));
}
return result;
}
} |
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { AppComponent } from './app.component';
import { RepositoryModule } from './repository/repository.module';
import { SidebarModule, DialogModule, ButtonModule, MessageService } from 'primeng/primeng';
import { CardModule } from 'primeng/card';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
import { TooltipModule } from 'primeng/tooltip';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { ToastModule } from 'primeng/toast';
import { ReactiveFormsModule } from '@angular/forms';
import { ThoughtModule } from './thought/thought.module';
import { DocumentModule } from './document/document.module';
import { ContactModule } from './contact/contact.module';
const appRoutes: Routes = [
// { path: 'repo', component: OverviewComponent}, // <-- delete this line
{path: '', redirectTo: '/repo', pathMatch: 'full'},
// { path: '**', component: PageNotFoundComponent }
];
@NgModule({
declarations: [
AppComponent,
],
imports: [
RepositoryModule,
ThoughtModule,
DocumentModule,
ContactModule,
BrowserModule,
BrowserAnimationsModule,
RouterModule.forRoot(
appRoutes,
),
SidebarModule,
FontAwesomeModule,
TooltipModule,
DialogModule,
ButtonModule,
CardModule,
ToastModule,
ReactiveFormsModule,
],
providers: [MessageService],
bootstrap: [AppComponent],
})
export class AppModule {
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
cd "build/nightcoin-$HOST" || (echo "could not enter distdir build/nightcoin-$HOST"; exit 1)
if [ "$RUN_UNIT_TESTS" = "true" ]; then
BEGIN_FOLD unit-tests
DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1
END_FOLD
fi
if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC test/functional/test_runner.py --ci --combinedlogslen=4000 --coverage --quiet --failfast
END_FOLD
fi
if [ "$RUN_FUZZ_TESTS" = "true" ]; then
BEGIN_FOLD fuzz-tests
DOCKER_EXEC test/fuzz/test_runner.py -l DEBUG ${DIR_FUZZ_IN}
END_FOLD
fi
|
<reponame>Yabetti/springboot-training
package system.common;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import system.model.Book;
import system.repository.BookRepository;
@Component
@ConditionalOnProperty(prefix = "extension.test.generator", name = "enabled", matchIfMissing = false)
public class TestDataGenerator {
@Autowired
private BookRepository repo;
private static final int SIZE = 7;
@PostConstruct
@Transactional
public void initialize() {
for (int i=0; i<SIZE; i++) {
Book b = new Book("テスト_" + i, "テスト", String.valueOf(i * 1000), "978-4-7981-4247-0");
repo.insert(b);
}
}
}
|
<reponame>hoenic07/plugchecker-client
import { html, render } from "lit-html";
import ViewBase from "../component/viewBase";
import {unsafeHTML} from 'lit-html/directives/unsafe-html.js';
import AuthService from "../repository/authorizationService";
export default class Authorization extends ViewBase {
constructor(depts) {
super(depts);
this.root = "messageDialog";
this.settingsRepo = depts.settingsPrimitive();
this.authService = new AuthService();
this.validation = {
email: new RegExp(/^[\w-+_\.]+@([\w-]+\.)+[\w-]{2,}$/),
password: new RegExp(/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9]).{8,64}$/),
username: new RegExp(/^\w+(\s\w+)*$/),
};
this.errorMessages = {
email: `<p class="w3-text-red">${this.t("authEmailValidationError")}</p>`,
username: `<p class="w3-text-red">${this.t("authUsernameValidationError")}</p>`,
serviceUnavailable: `<p class="w3-text-red">${this.t("authServiceNotAvailable")}</p>`,
passwordNotValid: `<p class="w3-text-red">${this.t("authPasswordValidationError")}</p>`,
};
}
loginTemplate() {
return html`
<div class="w3-modal-content w3-card-4 w3-animate-top">
<div class="w3-row w3-bar pc-secondary">
<button
@click="${() => this.onCloseModal()}"
class="w3-col w3-button w3-right w3-hover-dark-gray popup-header-close"
>
<img class="inverted" class="w3-button " src="img/close.svg" />
</button>
<div class="w3-rest w3-large popup-header">${this.t("authModalHeader")}</div>
</div>
<div class="w3-container auth-modal-container">
<div class="w3-section">
<div class="w3-bar">
<button
id="sign_in_button"
class="w3-bar-item w3-button pc-main"
@click="${() => this.onTabChange("sign_in")}"
>
${this.t("authLogInTabLabel")}
</button>
<button
id="sign_up_button"
class="w3-bar-item w3-button"
@click="${() => this.onTabChange("sign_up")}"
>
${this.t("authSignUpTabLabel")}
</button>
</div>
<form id="sign_in">
<div>
<label>${this.t("authLabelEmail")}:</label>
<input type="text" name="sign_in_email" class="w3-input w3-border w3-margin-bottom" />
</div>
<div>
<label>${this.t("authLabelPassword")}:</label>
<input type="password" name="sign_in_password" class="w3-input w3-border" />
</div>
<label class="link-text" @click="${() => this.onResetPasswordRequest()}">
${this.t("authForgotPasswordLink")}
</label>
<button
class="w3-button w3-block pc-main w3-section w3-padding"
type="submit"
@click="${(event) => this.onLogin(event)}"
>
<i class="fa fa-refresh fa-spin"></i>${this.t("authLogInBtnText")}
</button>
</form>
<form id="sign_up" style="display:none" @submit="return false;">
<div>
<label>${this.t("authLabelEmail")}:</label>
<input type="text" name="sign_up_email" class="w3-input w3-border w3-margin-bottom" />
</div>
<div>
<label>${this.t("authLabelPassword")}:</label>
<input type="password" name="sign_up_password" class="w3-input w3-border w3-margin-bottom" />
</div>
<div>
<label>${this.t("authLabelUsername")}:</label>
<input type="text" name="sign_up_username" class="w3-input w3-border w3-margin-bottom" />
</div>
<div>
<input @change="${(event) => this.validateRegistrationForm(event)}" type="checkbox" id="sign_up_policy_agreement" name="sign_up_policy_agreement" class="w3-margin-bottom" />
<label for="sign_up_policy_agreement">${unsafeHTML(this.t("authLabelPrivatePolicy"))}</label>
</div>
<button
class="w3-button w3-block pc-main w3-section w3-padding"
type="submit"
disabled
@click="${(event) => this.onRegister(event)}"
>
<i class="fa fa-refresh fa-spin"></i>${this.t("authSignUpBtnText")}
</button>
</form>
<div id="error-list"></div>
<div>${unsafeHTML(this.t("accountBenefits"))}</div>
</div>
</div>
</div>
`;
}
resetPasswordTemplate() {
return html`
<div class="w3-modal-content w3-card-4 w3-animate-top">
<div class="w3-row w3-bar pc-secondary">
<button
@click="${() => this.onCloseModal()}"
class="w3-col w3-button w3-right w3-hover-dark-gray popup-header-close"
>
<img class="inverted" class="w3-button " src="img/close.svg" />
</button>
</div>
<div class="w3-container auth-modal-container">
<div class="w3-section">
<div class="w3-bar w3-margin-bottom">
<i class="fa fa-lock"></i>
<b>${this.t("authForgotPasswordModalHeader")}</b>
</div>
<form id="reset_password">
<label>${this.t("authLabelForgotPassword")}</label>
<input type="text" name="reset_password_email" class="w3-input w3-border w3-margin-bottom" />
<button
class="w3-button w3-block pc-main w3-section w3-padding"
type="submit"
disabled
@click="${(event) => this.onPasswordReset(event)}"
>
<i class="fa fa-refresh fa-spin"></i>${this.t("authForgotPasswordBtnText")}
</button>
<div id="error-list"></div>
</form>
</div>
</div>
</div>
`;
}
successfulRegistrationTemplate() {
return html`
<div class="w3-modal-content w3-card-4 w3-animate-top">
<div class="w3-row w3-bar pc-secondary">
<button
@click="${() => render(this.loginTemplate(), this.getEl(this.root))}"
class="w3-col w3-button w3-right w3-hover-dark-gray popup-header-close"
>
<img class="inverted" class="w3-button" src="img/close.svg" />
</button>
</div>
<div class="w3-container">
<div class="w3-section">${this.t("authSignUpSuccessfulText")}</div>
</div>
</div>
`;
}
successfulResetPasswordTemplate() {
return html`
<div class="w3-modal-content w3-card-4 w3-animate-top">
<div class="w3-row w3-bar pc-secondary">
<button
@click="${() => this.onCloseModal()}"
class="w3-col w3-button w3-right w3-hover-dark-gray popup-header-close"
>
<img class="inverted" class="w3-button " src="img/close.svg" />
</button>
</div>
<div class="w3-container">
<div class="w3-section">
<label>${this.t("authForgotPasswordSuccessfulText")}</label>
</div>
</div>
</div>
`;
}
render() {
render(this.loginTemplate(), this.getEl(this.root));
this.getEl(this.root).style.display = "block";
this.getEl("sign_in").addEventListener("keyup", (event) => this.validateLoginForm(event));
this.getEl("sign_up").addEventListener("keyup", (event) => this.validateRegistrationForm(event));
this.getEl("sign_in").addEventListener("submit", (event) => event.preventDefault());
this.getEl("sign_up").addEventListener("submit", (event) => event.preventDefault());
}
onResetPasswordRequest() {
render(this.resetPasswordTemplate(), this.getEl(this.root));
this.getEl("reset_password")
.addEventListener("keyup", (event) => this.validateResetPasswordForm(event));
this.getEl("reset_password").addEventListener("submit", (event) => event.preventDefault());
}
onTabChange(type) {
const signIn = this.getEl("sign_in");
const signUp = this.getEl("sign_up");
const errorsContainer = this.getEl("error-list");
errorsContainer.innerHTML = "";
if (type === "sign_in" && signIn.style.display === "none") {
this.getEl("sign_in").style.display = "block";
this.getEl("sign_up").style.display = "none";
this.getEl("sign_in_button").classList.toggle("pc-main");
this.getEl("sign_up_button").classList.toggle("pc-main");
}
if (type === "sign_up" && signUp.style.display === "none") {
this.getEl("sign_in").style.display = "none";
this.getEl("sign_up").style.display = "block";
this.getEl("sign_in_button").classList.toggle("pc-main");
this.getEl("sign_up_button").classList.toggle("pc-main");
}
}
async onLogin(event) {
const loginBtn = event.target;
const errorsContainer = this.getEl("error-list");
const formData = {
email: document.getElementsByName("sign_in_email")[0].value,
password: document.getElementsByName("sign_in_password")[0].value,
};
// Toggle button loading state
loginBtn.classList.toggle("loading");
let result = null;
try {
result = await this.authService.signIn(formData);
loginBtn.classList.toggle("loading");
} catch (error) {
loginBtn.classList.toggle("loading");
errorsContainer.innerHTML = this.errorMessages.serviceUnavailable;
return;
}
errorsContainer.innerHTML = "";
// After successful login save access_token and refresh_token to local storage.
if (result.data) {
this.getEl(this.root).style.display = "none";
this.saveAuthResult(result.data);
}
// TODO: Next code should be placed into global error handling
if (result.errors) {
for (let error of result.errors) {
if (error.status && error.status === 400) {
errorsContainer.innerHTML = `<p class="w3-text-red">${error.message}</p>`;
}
if (error.status && error.status === 401) {
errorsContainer.innerHTML = `<p class="w3-text-red">${this.t('authPasswordWrong')}</p>`;
if (error.message === "no_user_found") {
errorsContainer.innerHTML = `<p class="w3-text-red">${this.t('authLoginUserNotFound')}</p>`;
}
if (error.message === "email_unconfirmed") {
errorsContainer.innerHTML = `<p class="w3-text-red">${this.t('authEmailNotVerified')}</p>`;
}
}
}
return;
}
}
async onRegister(event) {
const registerBtn = event.target;
const errorsContainer = this.getEl("error-list");
const formData = {
email: document.getElementsByName("sign_up_email")[0].value,
password: document.getElementsByName("sign_up_password")[0].value,
username: document.getElementsByName("sign_up_username")[0].value,
};
// Toggle button loading state
registerBtn.classList.toggle("loading");
let result = null;
try {
result = await this.authService.signUp(formData);
registerBtn.classList.toggle("loading");
} catch (error) {
registerBtn.classList.toggle("loading");
errorsContainer.innerHTML = this.errorMessages.serviceUnavailable;
return;
}
errorsContainer.innerHTML = "";
// TODO: Next code should be placed into global error handling
if (result && result.errors) {
for (let error of result.errors) {
errorsContainer.innerHTML = `<p class="w3-text-red">${error.message}</p>`;
}
return;
}
render(this.successfulRegistrationTemplate(), this.getEl(this.root));
}
async onPasswordReset(event) {
const pwdResetBtn = event.target;
const errorsContainer = this.getEl("error-list");
const email = document.getElementsByName("reset_password_email")[0].value;
pwdResetBtn.classList.toggle("loading");
if (email) {
let result = null;
try {
result = await this.authService.requestPasswordChange({ email });
pwdResetBtn.classList.toggle("loading");
} catch (error) {
pwdResetBtn.classList.toggle("loading");
errorsContainer.innerHTML = this.errorMessages.serviceUnavailable;
return;
}
if (result && result.errors) {
for (let error of result.errors) {
errorsContainer.innerHTML = `<p class="w3-text-red">${error.message}</p>`;
}
return;
}
if (result == null) {
render(this.successfulResetPasswordTemplate(), this.getEl(this.root));
}
}
}
async saveAuthResult(data) {
if (data.type !== "authentication_result") return;
const { access_token, refresh_token } = data.attributes;
this.settingsRepo.authTokens().set({
accessToken: access_token,
refreshToken: refresh_token
});
location.reload(true);
}
onCloseModal() {
this.getEl(this.root).style.display = "none";
}
validateLoginForm(event) {
const errorsContainer = this.getEl("error-list");
// const form = event.target.closest("form");
// const submitBtn = form.querySelector('button[type="submit"]');
// const inputEmail = form.querySelector('input[name="sign_in_email"]');
// const inputPassword = form.querySelector('input[name="sign_in_password"]');
// const isEmailValid = this.validation.email.exec(inputEmail.value);
// const isPasswordValid = this.validation.password.exec(inputPassword.value);
errorsContainer.innerHTML = "";
// submitBtn.disabled = true;
// if (!isEmailValid) {
// !inputEmail.classList.contains("error") && inputEmail.classList.toggle("error");
// errorsContainer.innerHTML = this.errorMessages.email;
// return;
// } else {
// inputEmail.classList.contains("error") && inputEmail.classList.toggle("error");
// }
// if (!isPasswordValid) {
// !inputPassword.classList.contains("error") && inputPassword.classList.toggle("error");
// errorsContainer.innerHTML = this.errorMessages.passwordNotValid;
// return;
// } else {
// inputPassword.classList.contains("error") && inputPassword.classList.toggle("error");
// }
// if (isEmailValid && isPasswordValid) {
// submitBtn.disabled = false;
// }
}
validateRegistrationForm(event) {
const errorsContainer = this.getEl("error-list");
const form = event.target.closest("form");
const submitBtn = form.querySelector('button[type="submit"]');
const inputEmail = form.querySelector('input[name="sign_up_email"]');
const inputPassword = form.querySelector('input[name="sign_up_password"]');
const inputUsername = form.querySelector('input[name="sign_up_username"]');
const inputPrivacyPolicy = form.querySelector('input[name="sign_up_policy_agreement"]');
const isEmailValid = this.validation.email.exec(inputEmail.value);
const isPasswordValid = this.validation.password.exec(inputPassword.value);
const isUsernameValid = this.validation.username.exec(inputUsername.value);
const isPrivacyPolicyChecked = inputPrivacyPolicy.checked;
errorsContainer.innerHTML = "";
submitBtn.disabled = true;
if (!isEmailValid) {
!inputEmail.classList.contains("error") && inputEmail.classList.toggle("error");
errorsContainer.innerHTML = this.errorMessages.email;
return;
} else {
inputEmail.classList.contains("error") && inputEmail.classList.toggle("error");
}
if (!isPasswordValid) {
!inputPassword.classList.contains("error") && inputPassword.classList.toggle("error");
errorsContainer.innerHTML = this.errorMessages.passwordNotValid;
return;
} else {
inputPassword.classList.contains("error") && inputPassword.classList.toggle("error");
}
if (!isUsernameValid) {
!inputUsername.classList.contains("error") && inputUsername.classList.toggle("error");
errorsContainer.innerHTML = this.errorMessages.username;
return;
} else {
inputUsername.classList.contains("error") && inputUsername.classList.toggle("error");
}
if (isEmailValid && isPasswordValid && isUsernameValid && isPrivacyPolicyChecked) {
submitBtn.disabled = false;
}
}
validateResetPasswordForm(event) {
const errorsContainer = this.getEl("error-list");
const form = event.target.closest("form");
const submitBtn = form.querySelector('button[type="submit"]');
const inputEmail = form.querySelector('input[name="reset_password_email"]');
// const isEmailValid = this.validation.email.exec(inputEmail.value);
errorsContainer.innerHTML = "";
submitBtn.disabled = true;
// if (!isEmailValid) {
// !inputEmail.classList.contains("error") && inputEmail.classList.toggle("error");
// errorsContainer.innerHTML = this.errorMessages.email;
// return;
// } else {
// inputEmail.classList.contains("error") && inputEmail.classList.toggle("error");
// }
if (inputEmail.value) {
submitBtn.disabled = false;
}
}
}
|
#!/usr/bin/env bash
# VisDA2017
CUDA_VISIBLE_DEVICES=0 python examples/mcd.py data/visda-2017 -d VisDA2017 -s T -t V -a resnet50 --epochs 20 --center-crop --seed 0 -i 500 > benchmarks/mcd/VisDA2017.txt
CUDA_VISIBLE_DEVICES=0 python examples/mcd.py data/visda-2017 -d VisDA2017 -s T -t V -a resnet101 --epochs 20 --center-crop --seed 0 -i 500 > benchmarks/mcd/VisDA2017_resnet101.txt |
cat <<- doc
****************************************************
This script shows how to get the length of a string
****************************************************
doc
string='0123456789'
echo "string=$string"
echo "length=${#string}"
string='ABC'
echo "string=$string"
echo "length=${#string}"
|
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 26 22:51:21 2016
@author: jerry
"""
from data_parsing.Planet import *
from tests.PlanetaryObjectTest import *
import unittest
class TestPlanet(TestPlanetaryObject):
def testInit(self):
planet1 = Planet("testPlanet")
self.assertEquals("testPlanet", planet1.getName())
self.assertEquals(None, planet1.starObject)
self.assertEquals(dict(), planet1.starObjectNamesToStar)
self.assertEquals(dict(), planet1.errors)
self.assertEquals([], planet1.otherNamesPlanet)
self.assertEquals(dict(), planet1.data)
self.assertEquals("", planet1.nameStar)
self.assertEquals("00/00/00", planet1.lastupdate)
if __name__ == '__main__':
unittest.main(exit=False)
|
import grpc
import dog_pb2
import dog_pb2_grpc
def call_dog(dog_name):
channel = grpc.insecure_channel('localhost:50055')
stub = dog_pb2_grpc.dogStub(channel)
req = dog_pb2.DogRequest(dogName=dog_name)
response = stub.CallDog(req)
return response
|
<gh_stars>0
package com.example
import play.api.libs.json.Json
import play.api.libs.ws.WS
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
object ChainedFutureSamples {
type UnappliedFuture[T] = () => Future[T]
abstract class Sample
case class SampleData(id: String, email: Option[String]) extends Sample {
println(toString)
}
case class Empty(url: String) extends Sample {
println(toString)
}
object SampleData {
implicit val sampleDataFormat = Json.format[SampleData]
}
def main(args: Array[String]): Unit = 1 to 100 map toUrl map toUnappliedFuture reduce toChainedUnappliedFuture apply
private def toUrl: Int => String = (i: Int) => s"http://www.baidu.com/${i}"
private def toUnappliedFuture(url: String): UnappliedFuture[SampleData] = () => Future(SampleData(url, None))
private def toChainedUnappliedFuture: (UnappliedFuture[SampleData], UnappliedFuture[SampleData]) => UnappliedFuture[SampleData] = (a1, a2) => () => {
a1 apply() flatMap { _ => a2 apply } recoverWith { case _ => a2 apply }
}
private def packageFuture(url: String): () => Future[Sample] = () => WS.clientUrl(url)
.withQueryString(("key", ""))
.withRequestTimeout(1000)
.get().map(response => response.json.validate[SampleData].getOrElse(Empty(url)))
def toChainUppliedFutureWithFold : (UnappliedFuture[SampleData], UnappliedFuture[SampleData]) => UnappliedFuture[SampleData] = ???
def retry[T](maximizeRetries: Int, future: UnappliedFuture[T]): Future[T] = ???
}
|
import unittest
from your_flask_app import app # Import the Flask app
class TestScheduleMaker(unittest.TestCase):
def test_views_scheduleMaker_load(self):
tester = app.test_client(self)
response = tester.get('/class-select', content_type='html/text')
self.assertTrue(b'Subject:' in response.data)
def test_correct_change(self):
tester = app.test_client(self)
# Simulate the button click to navigate to the second page
response = tester.post('/first-page-button', data=dict(button='clicked'))
# Assuming the second page URL is '/second-page', assert that the response leads to the second page
self.assertEqual(response.status_code, 200)
self.assertIn(b'Second Page Content', response.data) # Replace 'Second Page Content' with the expected content
if __name__ == '__main__':
unittest.main() |
#!/bin/bash
# Merlin, utop, ocp-indent, ocamlformat, and patdiff are all for developer assistance
opam install -y ocamlformat.0.8 merlin utop ocp-indent patdiff
|
<filename>code/test/PutString_2.c
#include "syscall.h"
/*
Affichage d'une chaine de taille suppérieure à la constante
MAX_STRING_SIZE(=100) définie dans system.h.
*/
int main() {
int t = 100;
int depassement = 10;
char string[t+depassement];
int i;
/* Remplissage du buffer */
for(i=0; i<t; i++) {
string[i] = '1';
}
for(i=0; i<depassement; i++) {
string[t+i] = 'x';
}
PutString(string);
PutString("");
return(0);
}
|
from typing import List, Tuple
import re
from collections import Counter
def top_n_words(file_path: str, n: int) -> List[Tuple[str, int]]:
with open(file_path, 'r') as file:
text = file.read().lower()
words = re.findall(r'\b\w+\b', text)
stop_words = {'the', 'and', 'is', 'it', 'or', 'as', 'by', 'of', 'a', 'that', 'will', 'be', 'for', 'more', 'you', 'have', 'received'}
filtered_words = [word for word in words if word not in stop_words]
word_counts = Counter(filtered_words)
top_n = word_counts.most_common(n)
return top_n |
def predict_spam(email):
# Extract the features from the email
features = extract_features(email)
# Run the features through a machine learning model
pred = model.predict(features)
if pred == 0:
return "Not Spam"
else:
return "Spam" |
package gov.cms.bfd.model.codebook.data;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
/** Unit tests for {@link gov.cms.bfd.model.codebook.data.CcwCodebookVariable}. */
public final class CcwCodebookVariableTest {
/**
* Verifies that {@link gov.cms.bfd.model.codebook.data.CcwCodebookVariable} was generated as
* expected.
*/
@Test
public void constants() {
assertTrue(CcwCodebookVariable.values().length > 0);
}
/**
* Verifies that {@link gov.cms.bfd.model.codebook.data.CcwCodebookVariable#getVariable()} works
* as expected.
*/
@Test
public void getVariable() {
for (CcwCodebookVariable variableEnum : CcwCodebookVariable.values()) {
assertNotNull(variableEnum.getVariable());
}
}
}
|
<gh_stars>0
/**
*
* Leetcode 最优解
*/
var oddEvenList = function(head) {
if(head === null)
return head;
let h1 = null, // odd head
p1 = null, // odd node
p = head,
t = null;
while(p.next && p.next.next) {
t = p.next.next;
if(h1 === null) {
h1 = p.next;
p1 = h1;
} else {
p1.next = p.next;
p1 = p1.next;
}
p.next = p.next.next;
p = p.next;
}
if(p.next) {
if(h1 === null) {
h1 = p.next;
p1 = h1;
} else {
p1.next = p.next;
p1 = p1.next;
}
}
if(p1) {
p1.next = null;
}
p.next = h1;0
return head;
};
|
#=======
# Author: <NAME> (<EMAIL>)
#=======
module UICov
class ElementData < MemberData
end
end
|
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <stdlib.h>
// Must be included before anything else.
#include "src/tools/singlejar/port.h"
#include "src/main/cpp/util/file.h"
#include "src/main/cpp/util/port.h"
#include "src/main/cpp/util/strings.h"
#include "src/tools/singlejar/input_jar.h"
#include "src/tools/singlejar/options.h"
#include "src/tools/singlejar/output_jar.h"
#include "src/tools/singlejar/test_util.h"
#include "googletest/include/gtest/gtest.h"
#if !defined(JAR_TOOL_PATH)
#error "The path to jar tool has to be defined via -DJAR_TOOL_PATH="
#endif
#ifdef _WIN32
#define unlink _unlink
#define CMD_SEPARATOR "&"
#else
#define CMD_SEPARATOR ";"
#endif
namespace {
using bazel::tools::cpp::runfiles::Runfiles;
using singlejar_test_util::CreateTextFile;
using singlejar_test_util::GetEntryContents;
using singlejar_test_util::OutputFilePath;
using singlejar_test_util::RunCommand;
using singlejar_test_util::VerifyZip;
using std::string;
#if !defined(DATA_DIR_TOP)
#define DATA_DIR_TOP
#endif
const char kPathLibData1[] =
"io_bazel/src/tools/singlejar/libdata1.jar";
const char kPathLibData2[] =
"io_bazel/src/tools/singlejar/libdata2.jar";
static bool HasSubstr(const string &s, const string &what) {
return string::npos != s.find(what);
}
// A subclass of the OutputJar which concatenates the contents of each
// entry in the data/ directory from the input archives.
class CustomOutputJar : public OutputJar {
public:
~CustomOutputJar() override {}
void ExtraHandler(const CDH *cdh,
const std::string *input_jar_aux_label) override {
auto file_name = cdh->file_name();
auto file_name_length = cdh->file_name_length();
if (file_name_length > 0 && file_name[file_name_length - 1] != '/' &&
begins_with(file_name, file_name_length, "tools/singlejar/data/")) {
// The contents of the data/<FILE> on the output is the
// concatenation of the data/<FILE> files from all inputs.
std::string metadata_file_path(file_name, file_name_length);
if (NewEntry(metadata_file_path)) {
ExtraCombiner(metadata_file_path, new Concatenator(metadata_file_path));
}
}
}
};
class OutputJarSimpleTest : public ::testing::Test {
protected:
void SetUp() override { runfiles.reset(Runfiles::CreateForTest()); }
void CreateOutput(const string &out_path, const std::vector<string> &args) {
const char *option_list[100] = {"--output", out_path.c_str()};
int nargs = 2;
for (auto &arg : args) {
if (arg.empty()) {
continue;
}
option_list[nargs++] = arg.c_str();
if (arg.find(' ') == string::npos) {
fprintf(stderr, " '%s'", arg.c_str());
} else {
fprintf(stderr, " %s", arg.c_str());
}
}
fprintf(stderr, "\n");
options_.ParseCommandLine(nargs, option_list);
ASSERT_EQ(0, output_jar_.Doit(&options_));
EXPECT_EQ(0, VerifyZip(out_path));
}
string CompressionOptionsTestingJar(const string &compression_option) {
string cp_res_path =
CreateTextFile("cp_res", "line1\nline2\nline3\nline4\n");
string out_path = OutputFilePath("out.jar");
CreateOutput(
out_path,
{compression_option, "--sources",
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/libtest1.jar")
.c_str(),
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/stored.jar")
.c_str(),
"--resources", cp_res_path, "--deploy_manifest_lines",
"property1: value1", "property2: value2"});
return out_path;
}
OutputJar output_jar_;
Options options_;
std::unique_ptr<Runfiles> runfiles;
};
// No inputs at all.
TEST_F(OutputJarSimpleTest, Empty) {
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {});
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
int entry_count = 0;
const LH *lh;
const CDH *cdh;
const uint8_t cafe_extra_field[] = {0xFE, 0xCA, 0, 0};
while ((cdh = input_jar.NextEntry(&lh))) {
++entry_count;
ASSERT_TRUE(cdh->is()) << "No expected tag in the Central Directory Entry.";
ASSERT_NE(nullptr, lh) << "No local header.";
ASSERT_TRUE(lh->is()) << "No expected tag in the Local Header.";
EXPECT_EQ(lh->file_name_string(), cdh->file_name_string());
if (!cdh->no_size_in_local_header()) {
EXPECT_EQ(lh->compressed_file_size(), cdh->compressed_file_size())
<< "Entry: " << lh->file_name_string();
EXPECT_EQ(lh->uncompressed_file_size(), cdh->uncompressed_file_size())
<< "Entry: " << cdh->file_name_string();
}
// Verify that each entry has a reasonable timestamp.
EXPECT_EQ(lh->last_mod_file_date(), cdh->last_mod_file_date())
<< "Entry: " << lh->file_name_string();
EXPECT_EQ(lh->last_mod_file_time(), cdh->last_mod_file_time())
<< "Entry: " << lh->file_name_string();
uint16_t dos_time = lh->last_mod_file_time();
uint16_t dos_date = lh->last_mod_file_date();
// Current time, rounded to even number of seconds because MSDOS timestamp
// does this, too.
time_t now = (time(nullptr) + 1) & ~1;
struct tm tm_now;
localtime_r(&now, &tm_now);
char now_time_str[50];
strftime(now_time_str, sizeof(now_time_str), "%c", &tm_now);
// Unpack MSDOS file timestamp. See the comment about its format in
// output_jar.cc.
struct tm tm;
tm.tm_sec = (dos_time & 31) << 1;
tm.tm_min = (dos_time >> 5) & 63;
tm.tm_hour = (dos_time >> 11) & 31;
tm.tm_mday = (dos_date & 31);
tm.tm_mon = ((dos_date >> 5) & 15) - 1;
tm.tm_year = ((dos_date >> 9) & 127) + 80;
tm.tm_isdst = tm_now.tm_isdst;
time_t entry_time = mktime(&tm);
char entry_time_str[50];
strftime(entry_time_str, sizeof(entry_time_str), "%c", &tm);
// Without --normalize option all the entries should have reasonably
// current timestamp (which we arbitrarily choose to be <5 minutes).
EXPECT_GE(now, entry_time) << now_time_str << " vs. " << entry_time_str;
EXPECT_LE(now, entry_time + 300) << now_time_str << " vs. "
<< entry_time_str;
// The first entry should be for the META-INF/ directory, and it should
// contain a single extra field 0xCAFE. Although
// https://bugs.openjdk.java.net/browse/JDK-6808540 claims that this extra
// field is optional, 'file' utility in Linux relies on to distinguish
// jar from zip.
if (entry_count == 1) {
ASSERT_EQ("META-INF/", lh->file_name_string());
ASSERT_EQ(4, lh->extra_fields_length());
ASSERT_EQ(0, memcmp(cafe_extra_field, lh->extra_fields(), 4));
ASSERT_EQ(4, cdh->extra_fields_length());
ASSERT_EQ(0, memcmp(cafe_extra_field, cdh->extra_fields(), 4));
}
}
input_jar.Close();
string manifest = GetEntryContents(out_path, "META-INF/MANIFEST.MF");
EXPECT_EQ(
"Manifest-Version: 1.0\r\n"
"Created-By: singlejar\r\n"
"\r\n",
manifest);
string build_properties = GetEntryContents(out_path, "build-data.properties");
EXPECT_PRED2(HasSubstr, build_properties, "build.target=");
}
// Source jars.
TEST_F(OutputJarSimpleTest, Source) {
string out_path = OutputFilePath("out.jar");
CreateOutput(
out_path,
{"--sources",
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/libtest1.jar")
.c_str(),
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/libtest2.jar")
.c_str()});
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
int file_count = 0;
while ((cdh = input_jar.NextEntry(&lh))) {
ASSERT_TRUE(cdh->is()) << "No expected tag in the Central Directory Entry.";
ASSERT_NE(nullptr, lh) << "No local header.";
ASSERT_TRUE(lh->is()) << "No expected tag in the Local Header.";
EXPECT_EQ(lh->file_name_string(), cdh->file_name_string());
if (!cdh->no_size_in_local_header()) {
EXPECT_EQ(lh->compressed_file_size(), cdh->compressed_file_size())
<< "Entry: " << lh->file_name_string();
EXPECT_EQ(lh->uncompressed_file_size(), cdh->uncompressed_file_size())
<< "Entry: " << cdh->file_name_string();
}
if (lh->file_name()[lh->file_name_length() - 1] != '/') {
++file_count;
}
}
ASSERT_LE(4, file_count);
input_jar.Close();
}
// Verify --java_launcher argument
TEST_F(OutputJarSimpleTest, JavaLauncher) {
string out_path = OutputFilePath("out.jar");
std::string launcher_path = runfiles->Rlocation(
"io_bazel/src/tools/singlejar/libtest1.jar");
CreateOutput(out_path, {"--java_launcher", launcher_path});
// check that the offset of the first entry equals launcher size.
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path.c_str()));
const LH *lh;
const CDH *cdh;
cdh = input_jar.NextEntry(&lh);
ASSERT_NE(nullptr, cdh);
struct stat statbuf;
ASSERT_EQ(0, stat(launcher_path.c_str(), &statbuf));
EXPECT_TRUE(cdh->is());
EXPECT_TRUE(lh->is());
EXPECT_EQ(statbuf.st_size, cdh->local_header_offset());
input_jar.Close();
}
// --main_class option.
TEST_F(OutputJarSimpleTest, MainClass) {
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--main_class", "com.google.my.Main"});
string manifest = GetEntryContents(out_path, "META-INF/MANIFEST.MF");
EXPECT_EQ(
"Manifest-Version: 1.0\r\n"
"Created-By: singlejar\r\n"
"Main-Class: com.google.my.Main\r\n"
"\r\n",
manifest);
}
// --deploy_manifest_lines option.
TEST_F(OutputJarSimpleTest, DeployManifestLines) {
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path,
{"--deploy_manifest_lines", "property1: foo", "property2: bar"});
string manifest = GetEntryContents(out_path, "META-INF/MANIFEST.MF");
EXPECT_EQ(
"Manifest-Version: 1.0\r\n"
"Created-By: singlejar\r\n"
"property1: foo\r\n"
"property2: bar\r\n"
"\r\n",
manifest);
}
// --extra_build_info option
TEST_F(OutputJarSimpleTest, ExtraBuildInfo) {
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--extra_build_info", "property1=value1",
"--extra_build_info", "property2=value2"});
string build_properties = GetEntryContents(out_path, "build-data.properties");
EXPECT_PRED2(HasSubstr, build_properties, "\nproperty1=value1\n");
EXPECT_PRED2(HasSubstr, build_properties, "\nproperty2=value2\n");
}
// --build_info_file and --extra_build_info options.
TEST_F(OutputJarSimpleTest, BuildInfoFile) {
string build_info_path1 =
CreateTextFile("buildinfo1", "property11=value11\nproperty12=value12\n");
string build_info_path2 =
CreateTextFile("buildinfo2", "property21=value21\nproperty22=value22\n");
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--build_info_file", build_info_path1,
"--extra_build_info", "property=value",
"--build_info_file", build_info_path2.c_str()});
string build_properties = GetEntryContents(out_path, "build-data.properties");
EXPECT_PRED2(HasSubstr, build_properties, "property11=value11\n");
EXPECT_PRED2(HasSubstr, build_properties, "property12=value12\n");
EXPECT_PRED2(HasSubstr, build_properties, "property21=value21\n");
EXPECT_PRED2(HasSubstr, build_properties, "property22=value22\n");
EXPECT_PRED2(HasSubstr, build_properties, "property=value\n");
}
// --resources option.
TEST_F(OutputJarSimpleTest, Resources) {
string res11_path = CreateTextFile("res11", "res11.line1\nres11.line2\n");
string res11_spec = res11_path + ":res1";
string res12_path = CreateTextFile("res12", "res12.line1\nres12.line2\n");
string res12_spec = res12_path + ":res1";
string res2_path = CreateTextFile("res2", "res2.line1\nres2.line2\n");
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--resources", res11_spec, res12_spec, res2_path});
// The output should have 'res1' entry containing the concatenation of the
// 'res11' and 'res12' files.
string res1 = GetEntryContents(out_path, "res1");
EXPECT_EQ("res11.line1\nres11.line2\nres12.line1\nres12.line2\n", res1);
// The output should have res2 path entry and contents.
string res2 = GetEntryContents(out_path, res2_path);
EXPECT_EQ("res2.line1\nres2.line2\n", res2);
}
TEST_F(OutputJarSimpleTest, ResourcesParentDirectories) {
string res1_path = CreateTextFile("res1", "res1.line1\nres1.line2\n");
string res2_path = CreateTextFile("res2", "res2.line1\nres2.line2\n");
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--exclude_build_data", "--resources",
res1_path + ":the/resources/res1",
res2_path + ":the/resources2/res2"});
string res1 = GetEntryContents(out_path, "the/resources/res1");
EXPECT_EQ("res1.line1\nres1.line2\n", res1);
string res2 = GetEntryContents(out_path, "the/resources2/res2");
EXPECT_EQ("res2.line1\nres2.line2\n", res2);
// The output should contain entries for parent directories
std::vector<string> expected_entries(
{"META-INF/", "META-INF/MANIFEST.MF", "the/", "the/resources/",
"the/resources/res1", "the/resources2/", "the/resources2/res2"});
std::vector<string> jar_entries;
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
jar_entries.push_back(cdh->file_name_string());
}
input_jar.Close();
EXPECT_EQ(expected_entries, jar_entries);
}
TEST_F(OutputJarSimpleTest, ResourcesDirectories) {
string dir_path = OutputFilePath("resource_dir");
blaze_util::MakeDirectories(dir_path, 0777);
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path,
{"--exclude_build_data", "--resources", dir_path + ":the/dir"});
// The output should contain entries for the directory
std::vector<string> expected_entries({
"META-INF/", "META-INF/MANIFEST.MF", "the/", "the/dir/",
});
std::vector<string> jar_entries;
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
jar_entries.push_back(cdh->file_name_string());
}
input_jar.Close();
EXPECT_EQ(expected_entries, jar_entries);
}
// --classpath_resources
TEST_F(OutputJarSimpleTest, ClasspathResources) {
string res1_path = OutputFilePath("cp_res");
ASSERT_TRUE(blaze_util::WriteFile("line1\nline2\n", res1_path));
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--classpath_resources", res1_path.c_str()});
string res = GetEntryContents(out_path, "cp_res");
EXPECT_EQ("line1\nline2\n", res);
}
// Duplicate entries for --resources or --classpath_resources
TEST_F(OutputJarSimpleTest, DuplicateResources) {
string cp_res_path = CreateTextFile("cp_res", "line1\nline2\n");
string res1_path = CreateTextFile("res1", "resline1\nresline2\n");
string res1_spec = res1_path + ":foo";
string res2_path = CreateTextFile("res2", "line3\nline4\n");
string res2_spec = res2_path + ":foo";
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path,
{"--warn_duplicate_resources", "--resources", res1_spec,
res2_spec, "--classpath_resources", cp_res_path, cp_res_path});
string cp_res = GetEntryContents(out_path, "cp_res");
EXPECT_EQ("line1\nline2\n", cp_res);
string foo = GetEntryContents(out_path, "foo");
EXPECT_EQ("resline1\nresline2\n", foo);
}
// Extra combiners
TEST_F(OutputJarSimpleTest, ExtraCombiners) {
string resolvedLibDataPath1 = runfiles->Rlocation(kPathLibData1);
string resolvedLibDataPath2 = runfiles->Rlocation(kPathLibData2);
string out_path = OutputFilePath("out.jar");
const char kEntry[] = "tools/singlejar/data/extra_file1";
output_jar_.ExtraCombiner(kEntry, new Concatenator(kEntry));
CreateOutput(out_path, {"--sources", resolvedLibDataPath1.c_str(),
resolvedLibDataPath2.c_str()});
string contents1 = GetEntryContents(resolvedLibDataPath1.c_str(), kEntry);
string contents2 = GetEntryContents(resolvedLibDataPath2.c_str(), kEntry);
EXPECT_EQ(contents1 + contents2, GetEntryContents(out_path, kEntry));
}
// Test ExtraHandler override.
TEST_F(OutputJarSimpleTest, ExtraHandler) {
string resolvedLibDataPath1 = runfiles->Rlocation(kPathLibData1);
string resolvedLibDataPath2 = runfiles->Rlocation(kPathLibData2);
string out_path = OutputFilePath("out.jar");
const char kEntry[] = "tools/singlejar/data/extra_file1";
const char *option_list[] = {"--output", out_path.c_str(), "--sources",
resolvedLibDataPath1.c_str(),
resolvedLibDataPath2.c_str()};
CustomOutputJar custom_output_jar;
options_.ParseCommandLine(arraysize(option_list), option_list);
ASSERT_EQ(0, custom_output_jar.Doit(&options_));
EXPECT_EQ(0, VerifyZip(out_path));
string contents1 = GetEntryContents(resolvedLibDataPath1.c_str(), kEntry);
string contents2 = GetEntryContents(resolvedLibDataPath2.c_str(), kEntry);
EXPECT_EQ(contents1 + contents2, GetEntryContents(out_path, kEntry));
}
// --include_headers
TEST_F(OutputJarSimpleTest, IncludeHeaders) {
string resolvedLibDataPath1 = runfiles->Rlocation(kPathLibData1);
string out_path = OutputFilePath("out.jar");
CreateOutput(
out_path,
{"--sources",
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/libtest1.jar")
.c_str(),
resolvedLibDataPath1.c_str(), "--include_prefixes",
"tools/singlejar/data"});
std::vector<string> expected_entries(
{"META-INF/", "META-INF/MANIFEST.MF", "build-data.properties",
"tools/singlejar/data/", "tools/singlejar/data/extra_file1",
"tools/singlejar/data/extra_file2"});
std::vector<string> jar_entries;
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
jar_entries.push_back(cdh->file_name_string());
}
input_jar.Close();
EXPECT_EQ(expected_entries, jar_entries);
}
// --normalize
TEST_F(OutputJarSimpleTest, Normalize) {
// Creates output jar containing entries from all possible sources:
// * archives created by java_library rule, by jar tool, by zip
// * resource files
// * classpath resource files
// *
string out_path = OutputFilePath("out.jar");
string testjar_path = OutputFilePath("testinput.jar");
{
std::string jar_tool_path = runfiles->Rlocation(JAR_TOOL_PATH);
string textfile_path = CreateTextFile("jar_testinput.txt", "jar_inputtext");
string classfile_path = CreateTextFile("JarTestInput.class", "Dummy");
unlink(testjar_path.c_str());
ASSERT_EQ(
0, RunCommand(jar_tool_path.c_str(), "-cf", testjar_path.c_str(),
textfile_path.c_str(), classfile_path.c_str(), nullptr));
}
string testzip_path = OutputFilePath("testinput.zip");
{
string textfile_path = CreateTextFile("zip_testinput.txt", "zip_inputtext");
string classfile_path = CreateTextFile("ZipTestInput.class", "Dummy");
unlink(testzip_path.c_str());
ASSERT_EQ(
0, RunCommand("zip", "-m", testzip_path.c_str(), textfile_path.c_str(),
classfile_path.c_str(), nullptr));
}
string resource_path = CreateTextFile("resource", "resource_text");
string cp_resource_path = CreateTextFile("cp_resource", "cp_resource_text");
// TODO(asmundak): check the following generated entries, too:
// * services
// * spring.schemas
// * spring.handlers
// * protobuf.meta
// * extra combiner
CreateOutput(
out_path,
{"--normalize", "--sources",
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/libtest1.jar")
.c_str(),
testjar_path, testzip_path, "--resources", resource_path,
"--classpath_resources", cp_resource_path});
// Scan all entries, verify that *.class entries have timestamp
// 01/01/2010 00:00:02 and the rest have the timestamp of 01/01/2010 00:00:00.
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
string entry_name = cdh->file_name_string();
EXPECT_EQ(lh->last_mod_file_date(), cdh->last_mod_file_date())
<< entry_name << " modification date";
EXPECT_EQ(lh->last_mod_file_time(), cdh->last_mod_file_time())
<< entry_name << " modification time";
EXPECT_EQ(15393, cdh->last_mod_file_date())
<< entry_name << " modification date should be 01/01/2010";
auto n = entry_name.size() - strlen(".class");
if (0 == strcmp(entry_name.c_str() + n, ".class")) {
EXPECT_EQ(1, cdh->last_mod_file_time())
<< entry_name
<< " modification time for .class entry should be 00:00:02";
} else {
EXPECT_EQ(0, cdh->last_mod_file_time())
<< entry_name
<< " modification time for non .class entry should be 00:00:00";
}
// Zip creates Unix timestamps, too. Check that normalization removes them.
ASSERT_EQ(nullptr, cdh->unix_time_extra_field())
<< entry_name << ": CDH should not have Unix Time extra field";
ASSERT_EQ(nullptr, lh->unix_time_extra_field())
<< entry_name << ": LH should not have Unix Time extra field";
}
input_jar.Close();
}
// The files names META-INF/services/<something> are concatenated.
// The files named META-INF/spring.handlers are concatenated.
// The files named META-INF/spring.schemas are concatenated.
TEST_F(OutputJarSimpleTest, Services) {
CreateTextFile("META-INF/services/spi.DateProvider",
"my.DateProviderImpl1\n");
CreateTextFile("META-INF/services/spi.TimeProvider",
"my.TimeProviderImpl1\n");
CreateTextFile("META-INF/spring.handlers", "handler1\n");
CreateTextFile("META-INF/spring.schemas", "schema1\n");
// We have to be in the output directory if we want to have entries in the
// archive to start with META-INF. The resulting zip will contain 4 entries:
// META-INF/services/spi.DateProvider
// META-INF/services/spi.TimeProvider
// META-INF/spring.handlers
// META-INF/spring.schemas
string out_dir = OutputFilePath("");
ASSERT_EQ(0, RunCommand("cd", out_dir.c_str(), CMD_SEPARATOR, "zip", "-mr",
"testinput1.zip", "META-INF", nullptr));
string zip1_path = OutputFilePath("testinput1.zip");
// Create the second zip, with 3 files:
// META-INF/services/spi.DateProvider.
// META-INF/spring.handlers
// META-INF/spring.schemas
CreateTextFile("META-INF/services/spi.DateProvider",
"my.DateProviderImpl2\n");
CreateTextFile("META-INF/spring.handlers", "handler2\n");
CreateTextFile("META-INF/spring.schemas", "schema2\n");
ASSERT_EQ(0, RunCommand("cd ", out_dir.c_str(), CMD_SEPARATOR, "zip", "-mr",
"testinput2.zip", "META-INF", nullptr));
string zip2_path = OutputFilePath("testinput2.zip");
// The output jar should contain two service entries. The contents of the
// META-INF/services/spi.DateProvider should be the concatenation of the
// contents of this entry from both archives. And it should also contain
// spring.handlers and spring.schemas entries.
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--sources", zip1_path, zip2_path});
EXPECT_EQ("my.DateProviderImpl1\n" "my.DateProviderImpl2\n",
GetEntryContents(out_path, "META-INF/services/spi.DateProvider"));
EXPECT_EQ("my.TimeProviderImpl1\n",
GetEntryContents(out_path, "META-INF/services/spi.TimeProvider"));
EXPECT_EQ("schema1\n" "schema2\n",
GetEntryContents(out_path, "META-INF/spring.schemas"));
EXPECT_EQ("handler1\n" "handler2\n",
GetEntryContents(out_path, "META-INF/spring.handlers"));
}
// Test that in the absence of the compression option all the plain files in
// the output archive are not compressed but just stored.
TEST_F(OutputJarSimpleTest, NoCompressionOption) {
string out_path = CompressionOptionsTestingJar("");
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
string entry_name = lh->file_name_string();
// Each file entry is compressed, each directory entry is uncompressed.
EXPECT_EQ(lh->compression_method(), cdh->compression_method());
EXPECT_EQ(Z_NO_COMPRESSION, lh->compression_method())
<< "Entry " << entry_name << " should be stored.";
}
input_jar.Close();
}
// Test --compression option. If enabled, all file entries are compressed
// while all directory entries remain uncompressed.
TEST_F(OutputJarSimpleTest, CompressionOption) {
string out_path = CompressionOptionsTestingJar("--compression");
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
string entry_name = lh->file_name_string();
// Each file entry is compressed, each directory entry is uncompressed.
EXPECT_EQ(lh->compression_method(), cdh->compression_method());
if (lh->file_name()[lh->file_name_length() - 1] != '/') {
EXPECT_EQ(Z_DEFLATED, lh->compression_method())
<< "File entry " << entry_name << " should be compressed.";
} else {
EXPECT_EQ(Z_NO_COMPRESSION, lh->compression_method())
<< "Directory entry " << entry_name << " should be stored.";
}
}
input_jar.Close();
}
// Test --dontchangecompression option. If enabled, existing file entries are
// copied as is, and created entries are compressed.
// Test --compression option. If enabled, all file entries are compressed
// while all directory entries remain uncompressed.
TEST_F(OutputJarSimpleTest, DontChangeCompressionOption) {
string out_path = CompressionOptionsTestingJar("--dont_change_compression");
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
std::string kStoredEntry = DATA_DIR_TOP "src/tools/singlejar/output_jar.cc";
while ((cdh = input_jar.NextEntry(&lh))) {
string entry_name = lh->file_name_string();
EXPECT_EQ(lh->compression_method(), cdh->compression_method());
if (lh->file_name()[lh->file_name_length() - 1] != '/') {
// All created file entries are compressed, and so are all the file
// entries from the input jar created by the java_library rule. Only
// the file entries from the 'stored_jar' should be uncompressed, and
// it contains a single one:
if (entry_name == kStoredEntry) {
EXPECT_EQ(Z_NO_COMPRESSION, lh->compression_method())
<< "File entry " << entry_name << " should be stored.";
} else {
EXPECT_EQ(Z_DEFLATED, lh->compression_method())
<< "File entry " << entry_name << " should be compressed.";
}
} else {
EXPECT_EQ(Z_NO_COMPRESSION, lh->compression_method())
<< "Directory entry " << entry_name << " should be stored.";
}
}
input_jar.Close();
}
const char kBuildDataFile[] = "build-data.properties";
// Test --exclude_build_data option when none of the source archives contain
// build-data.properties file: no such file in the output archive.
TEST_F(OutputJarSimpleTest, ExcludeBuildData1) {
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--exclude_build_data"});
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
string entry_name = lh->file_name_string();
EXPECT_NE(kBuildDataFile, lh->file_name_string());
}
input_jar.Close();
}
// Test --exclude_build_data option when a source archive contains
// build-data.properties file, it should be then copied to the output.
TEST_F(OutputJarSimpleTest, ExcludeBuildData2) {
string out_dir = OutputFilePath("");
string testzip_path = OutputFilePath("testinput.zip");
string buildprop_path = CreateTextFile(kBuildDataFile, "build: foo");
unlink(testzip_path.c_str());
ASSERT_EQ(0, RunCommand("cd ", out_dir.c_str(), CMD_SEPARATOR, "zip", "-m",
"testinput.zip", kBuildDataFile, nullptr));
string out_path = OutputFilePath("out.jar");
CreateOutput(out_path, {"--exclude_build_data", "--sources", testzip_path});
EXPECT_EQ("build: foo", GetEntryContents(out_path, kBuildDataFile));
}
// Test that the entries with suffixes in --nocompressed_suffixes are
// not compressed. This applies both to the source archives' entries and
// standalone files.
TEST_F(OutputJarSimpleTest, Nocompress) {
string res1_path =
CreateTextFile("resource.foo", "line1\nline2\nline3\nline4\n");
string res2_path =
CreateTextFile("resource.bar", "line1\nline2\nline3\nline4\n");
string out_path = OutputFilePath("out.jar");
CreateOutput(
out_path,
{"--compression", "--sources",
runfiles
->Rlocation(
"io_bazel/src/tools/singlejar/libtest1.jar")
.c_str(),
"--resources", res1_path, res2_path, "--nocompress_suffixes", ".foo",
".h"});
InputJar input_jar;
ASSERT_TRUE(input_jar.Open(out_path));
const LH *lh;
const CDH *cdh;
while ((cdh = input_jar.NextEntry(&lh))) {
const char *entry_name_end = lh->file_name() + lh->file_name_length();
if (!strncmp(entry_name_end - 4, ".foo", 4) ||
!strncmp(entry_name_end - 2, ".h", 2)) {
EXPECT_EQ(Z_NO_COMPRESSION, lh->compression_method())
<< "Expected " << lh->file_name_string() << " uncompressed";
} else if (!strncmp(entry_name_end - 3, ".cc", 3) ||
!strncmp(entry_name_end - 4, ".bar", 4)) {
EXPECT_EQ(Z_DEFLATED, lh->compression_method())
<< "Expected " << lh->file_name_string() << " compressed";
}
}
input_jar.Close();
}
} // namespace
|
#!/bin/bash
WORKLOAD=$1
NODE01="l4tm@192.168.122.169"
NODE02="l4tm@192.168.122.171"
NODE03="l4tm@192.168.122.170"
COMMAND="ulimit -c unlimited; cd YCSB-C/build/bin; ./ycsbc -db kvs -threads 4 -P workloads/workload${WORKLOAD}.spec -mode load 2>&1; exit"
ssh ${NODE01} "${COMMAND}"
|
function parseFile(fileExt, buffer, parsers) {
if (fileExt) {
const fileExtLower = fileExt.toLowerCase();
if (parsers[fileExtLower]) {
return parsers[fileExtLower](buffer);
}
return null;
}
for (const key of Object.keys(parsers)) {
const parsed = parsers[key](buffer);
if (parsed) {
return parsed;
}
}
} |
#!/bin/sh -e
# Copyright 2018 ConsenSys AG.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# write pub key for checking that network works
node_id=`freight-devnet`
PUBLIC_KEYS_DIR=${BESU_PUBLIC_KEY_DIRECTORY:=/opt/besu/public-keys/}
/opt/besu/bin/besu $@ public-key export --to="${PUBLIC_KEYS_DIR}${node_id}"
BOOTNODE_KEY_FILE="${PUBLIC_KEYS_DIR}bootnode"
# sleep loop to wait for the public key file to be written
while [ ! -f $BOOTNODE_KEY_FILE ]
do
sleep 1
done
# get bootnode enode address
bootnode_pubkey=`sed 's/^0x//' $BOOTNODE_KEY_FILE`
boonode_ip=`getent hosts bootnode | awk '{ print $1 }'`
BOOTNODE_P2P_PORT="30303"
bootnode_enode_address="enode://${bootnode_pubkey}@${boonode_ip}:${BOOTNODE_P2P_PORT}"
# run with bootnode param
/opt/besu/bin/besu $@ --bootnodes=$bootnode_enode_address
|
#!/bin/sh
#####################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#####################################################################
set -e
# this script requires a posix shell; namely, $(( math evaluation.
help() {
cat << _EOF_
$0 [options]
mergefromtrunk.sh merge \$rev Apply revision \$rev from trunk.
mergefromtrunk.sh test Run test suite(clean-all, load-demo, run-tests).
mergefromtrunk.sh commit Commit current fix to svn.
mergefromtrunk.sh abort Abort current merge session.
-h | --help Show this help.
_EOF_
}
cmd=""
rev=""
while [ $# -gt 0 ]; do
case "$1" in
(-h|--help)
help
exit 0
;;
(-*)
echo "Unknown arg ($1)." 1>&2
help 1>&2
exit 1
;;
(*)
if [ z = "z$cmd" ]; then
cmd="$1"
else
case "$cmd" in
(merge)
rev="$1"
;;
(*)
echo "Too many arguments." 1>&2
help 1>&2
exit 1
;;
esac
fi
;;
esac
shift
done
case "$cmd" in
(merge)
if [ z = "z$rev" ]; then
echo "Need a revision." 1>&2
help 1>&2
exit 1
fi
if [ -d runtime/merge-state ]; then
echo "Merge session already started." 1>&2
help 1>&2
exit 1
fi
mkdir -p runtime/merge-state
echo "$rev" > runtime/merge-state/revision
# do not run any of the following commands in a complex
# chained pipe; if one of the commands in the pipe fails,
# it isn't possible to detect the failure.
printf "Applied fix from trunk for revision: %s \n===\n\n" "$rev" > runtime/merge-state/log-message
svn log https://svn.apache.org/repos/asf/ofbiz/ofbiz-framework/trunk -r "$rev" > runtime/merge-state/log.txt
set -- $(wc -l runtime/merge-state/log.txt)
head -n $(($1 - 1)) < runtime/merge-state/log.txt > runtime/merge-state/log.txt.head
tail -n $(($1 - 4)) < runtime/merge-state/log.txt.head >> runtime/merge-state/log-message
prevRev=$(($rev - 1))
svn up
svn merge -r "$prevRev:$rev" https://svn.apache.org/repos/asf/ofbiz/ofbiz-framework/trunk
;;
(test)
./ant clean-all
./ant load-demo
./ant run-tests
;;
(commit)
svn commit -F runtime/merge-state/log-message
rm -rf runtime/merge-state
;;
(abort)
svn resolved . -R
svn revert . -R
rm -rf runtime/merge-state
;;
("")
echo "Need a command and a revision." 1>&2
help 1>&2
exit 1
;;
(*)
echo "Unknown command($cmd)." 1>&2
help 1>&2
exit 1
;;
esac
|
#!/bin/sh
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
ARTIFACTS=(
"tools/api_classes_generator" "run"
"wrapper" "publish"
"tools/entry_generator" "build"
"tools/gradle_plugin" "publish"
"samples/games/kotlin" "build"
"samples/coroutines/kotlin" "build"
)
cd $CURRENT_PATH
function executeGradle() {
set -x
cd $1
./gradlew $2 --stacktrace
set +x
cd $CURRENT_PATH
}
artifactLength=${#ARTIFACTS[@]}
i=0
while [ $i -lt $artifactLength ]
do
executeGradle ${ARTIFACTS[$i]} ${ARTIFACTS[(( i + 1 ))]}
(( i += 2 ))
done
|
use libfuzzer_sys::arbitrary;
use libfuzzer_sys::fuzz_target;
#[derive(Debug)]
struct TreeNode {
value: i32,
left: Option<Box<TreeNode>>,
right: Option<Box<TreeNode>>,
}
impl arbitrary::Arbitrary for TreeNode {
fn arbitrary(u: &mut arbitrary::Unstructured) -> arbitrary::Result<Self> {
let value = u.arbitrary::<i32>()?;
let left = if u.arbitrary::<bool>()? {
Some(Box::new(u.arbitrary()?))
} else {
None
};
let right = if u.arbitrary::<bool>()? {
Some(Box::new(u.arbitrary()?))
} else {
None
};
Ok(TreeNode { value, left, right })
}
}
fuzz_target!(|data: &[u8]| {
if let Ok(tree) = arbitrary::Arbitrary::arbitrary(&mut arbitrary::Unstructured::new(data)) {
println!("{:?}", tree);
// Perform operations or validations on the generated tree
}
}); |
import * as React from 'react';
import { Scope } from 'effector/fork';
import { Provider } from 'effector-react/ssr';
import { Pages } from './pages';
import { Globals } from './globals';
interface Props {
scope: Scope;
}
export const Application: React.FC<Props> = ({ scope }) => (
<Provider value={scope}>
<>
<Globals />
<Pages />
</>
</Provider>
);
|
#! /bin/sh
versiondir=$1
# Copyright (C) 2013 Free Software Foundation, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Emit a header similar to that from diff -u; Print the simulated "diff"
# command so that the order of arguments is clear. Don't bother with @@ lines.
emit_diff_u_header_ ()
{
printf '%s\n' "diff -u $*" \
"--- $1 1970-01-01" \
"+++ $2 1970-01-01"
}
# Arrange not to let diff or cmp operate on /dev/null,
# since on some systems (at least OSF/1 5.1), that doesn't work.
# When there are not two arguments, or no argument is /dev/null, return 2.
# When one argument is /dev/null and the other is not empty,
# cat the nonempty file to stderr and return 1.
# Otherwise, return 0.
compare_dev_null_ ()
{
test $# = 2 || return 2
if test "x$1" = x/dev/null; then
test -s "$2" || return 0
emit_diff_u_header_ "$@"; sed 's/^/+/' "$2"
return 1
fi
if test "x$2" = x/dev/null; then
test -s "$1" || return 0
emit_diff_u_header_ "$@"; sed 's/^/-/' "$1"
return 1
fi
return 2
}
if diff_out_=`exec 2>/dev/null; diff -u "$0" "$0" < /dev/null` \
&& diff -u Makefile "$0" 2>/dev/null | grep '^[+]#!' >/dev/null; then
# diff accepts the -u option and does not (like AIX 7 'diff') produce an
# extra space on column 1 of every content line.
if test -z "$diff_out_"; then
compare_ () { diff -u "$@"; }
else
compare_ ()
{
if diff -u "$@" > diff.out; then
# No differences were found, but Solaris 'diff' produces output
# "No differences encountered". Hide this output.
rm -f diff.out
true
else
cat diff.out
rm -f diff.out
false
fi
}
fi
elif diff_out_=`exec 2>/dev/null; diff -c "$0" "$0" < /dev/null`; then
if test -z "$diff_out_"; then
compare_ () { diff -c "$@"; }
else
compare_ ()
{
if diff -c "$@" > diff.out; then
# No differences were found, but AIX and HP-UX 'diff' produce output
# "No differences encountered" or "There are no differences between the
# files.". Hide this output.
rm -f diff.out
true
else
cat diff.out
rm -f diff.out
false
fi
}
fi
elif ( cmp --version < /dev/null 2>&1 | grep GNU ) > /dev/null 2>&1; then
compare_ () { cmp -s "$@"; }
else
compare_ () { cmp "$@"; }
fi
# Usage: compare EXPECTED ACTUAL
#
# Given compare_dev_null_'s preprocessing, defer to compare_ if 2 or more.
# Otherwise, propagate $? to caller: any diffs have already been printed.
compare ()
{
# This looks like it can be factored to use a simple "case $?"
# after unchecked compare_dev_null_ invocation, but that would
# fail in a "set -e" environment.
if compare_dev_null_ "$@"; then
return 0
else
case $? in
1) return 1;;
*) compare_ "$@";;
esac
fi
}
die() {
echo "$@" >&2
exit 1
}
fail=0
$versiondir/find/find -newerat
if test $? = 139; then
fail=1
fi
#if [ $fail -eq 1 ]; then
# fail=0
#elif [ $fail -eq 0 ]; then
# fail=1
#fi
exit $fail
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.resource.accesscontrol.ui;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.segmentedview.SegmentViewComponent;
import org.olat.core.gui.components.segmentedview.SegmentViewEvent;
import org.olat.core.gui.components.segmentedview.SegmentViewFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.id.Identity;
import org.olat.resource.accesscontrol.provider.auto.ui.AdvanceOrderController;
/**
*
* Initial date: 08.09.2017<br>
* @author uhensler, <EMAIL>, http://www.<EMAIL>
*
*/
public class UserOrderController extends BasicController {
private final VelocityContainer mainVC;
private final Link ordersLink, advanceOrdersLink;
private final SegmentViewComponent segmentView;
private OrdersController ordersCtrl;
private AdvanceOrderController advanceOrdersCtrl;
private Identity identity;
public UserOrderController(UserRequest ureq, WindowControl wControl, Identity identity) {
super(ureq, wControl);
this.identity = identity;
mainVC = createVelocityContainer("segments");
segmentView = SegmentViewFactory.createSegmentView("segments", mainVC, this);
ordersLink = LinkFactory.createLink("segment.orders", mainVC, this);
segmentView.addSegment(ordersLink, true);
advanceOrdersLink = LinkFactory.createLink("segment.advance.orders", mainVC, this);
segmentView.addSegment(advanceOrdersLink, false);
doOpenOrders(ureq);
putInitialPanel(mainVC);
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if(source == segmentView) {
if(event instanceof SegmentViewEvent) {
SegmentViewEvent sve = (SegmentViewEvent)event;
String segmentCName = sve.getComponentName();
Component clickedLink = mainVC.getComponent(segmentCName);
if (clickedLink == ordersLink) {
doOpenOrders(ureq);
} else if (clickedLink == advanceOrdersLink){
doOpenAdvanceOrders(ureq);
}
}
}
}
private void doOpenOrders(UserRequest ureq) {
if(ordersCtrl == null) {
ordersCtrl = new OrdersController(ureq, getWindowControl(), identity);
listenTo(ordersCtrl);
}
mainVC.put("segmentCmp", ordersCtrl.getInitialComponent());
}
private void doOpenAdvanceOrders(UserRequest ureq) {
if(advanceOrdersCtrl == null) {
advanceOrdersCtrl = new AdvanceOrderController(ureq, getWindowControl(), identity);
listenTo(advanceOrdersCtrl);
}
mainVC.put("segmentCmp", advanceOrdersCtrl.getInitialComponent());
}
}
|
<filename>algorithms/code/hackerrank/encryption/SolutionCore.java
package hackerrank.encryption;
/**
* https://www.hackerrank.com/challenges/encryption
*/
public final class SolutionCore {
private SolutionCore() {
}
protected static String encrypt(final String plain) {
int l = plain.length();
int ceil = (int) Math.ceil(Math.sqrt(l));
int rows;
int cols = ceil;
// do {
rows = (l / cols) + ((l % cols) > 0 ? 1 : 0);
// } while (rows * cols-- < l);
// cols++;
StringBuilder result = new StringBuilder();
for (int c = 0; c < cols; c++) {
if (c != 0) {
result.append(" ");
}
for (int r = 0; r < rows; r++) {
int index = r * cols + c;
if (index < l) {
result.append(plain.charAt(index));
}
}
}
return result.toString();
}
}
|
export default function(ngapp, fileHelpers) {
ngapp.service('settingsService', function() {
var service = this;
var defaultSettings = {
'cacheErrors': true,
'theme': 'Vanilla'
};
this.loadSettings = function(profileName) {
service.currentProfile = profileName;
service.profilePath = `app/profiles/${profileName}`;
service.settingsPath = `${service.profilePath}/settings.json`;
service.settings = fileHelpers.loadJsonFile(service.settingsPath, defaultSettings);
};
this.saveSettings = function() {
fileHelpers.saveJsonFile(service.settingsPath, service.settings);
};
});
}
|
package com.ulfy.master.ui.view;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.Button;
import com.ulfy.android.mvvm.IViewModel;
import com.ulfy.android.ui_injection.Layout;
import com.ulfy.android.ui_injection.ViewById;
import com.ulfy.android.ui_injection.ViewClick;
import com.ulfy.android.views.ChartView;
import com.ulfy.master.R;
import com.ulfy.master.application.vm.ChartViewVM;
import com.ulfy.master.ui.base.BaseView;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@Layout(id = R.layout.view_chart_view)
public class ChartViewView extends BaseView {
@ViewById(id = R.id.refreshBT) private Button refreshBT;
@ViewById(id = R.id.charCV) private ChartView charCV;
private ChartViewVM vm;
public ChartViewView(Context context) {
super(context);
init(context, null);
}
public ChartViewView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
}
@Override public void bind(IViewModel model) {
vm = (ChartViewVM) model;
}
/**
* click: refreshBT
* 刷新表格数据
*/
@ViewClick(ids = R.id.refreshBT) private void refreshBT(View v) {
List<ChartView.Data> dataList = new ArrayList<>();
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
dataList.add(new ChartView.Data("03.11", new Random().nextInt(100)));
charCV.setDataList(dataList);
}
} |
#!/usr/bin/env bash
set -euo pipefail
source scripts/utils.sh
export NODE_IP=$(get_main_ip)
export UI_PORT=${UI_PORT:-6008}
export KUBECONFIG=${KUBECONFIG:-$HOME/.kube/config}
export CONTAINER_COMMAND=${CONTAINER_COMMAND:-podman}
export UI_DEPLOY_FILE=build/ui_deploy.yaml
export UI_SERVICE_NAME=ocp-metal-ui
export NO_UI=${NO_UI:-n}
export NAMESPACE=${NAMESPACE:-assisted-installer}
if [ "${CONTAINER_COMMAND}" = "podman" ]; then
export PODMAN_FLAGS="--pull=always"
else
export PODMAN_FLAGS=""
fi
if [ "${NO_UI}" != "n" ]; then
exit 0
fi
mkdir -p build
#In case deploy tag is empty use latest
[[ -z "${DEPLOY_TAG}" ]] && export DEPLOY_TAG=latest
print_log "Starting ui"
${CONTAINER_COMMAND} pull quay.io/ocpmetal/ocp-metal-ui:latest
${CONTAINER_COMMAND} run ${PODMAN_FLAGS} --rm quay.io/ocpmetal/ocp-metal-ui:latest /deploy/deploy_config.sh -u http://assisted-service.${NAMESPACE}.svc.cluster.local:8090 -i quay.io/ocpmetal/ocp-metal-ui:${DEPLOY_TAG} -n ${NAMESPACE} >${UI_DEPLOY_FILE}
kubectl --kubeconfig=${KUBECONFIG} apply -f ${UI_DEPLOY_FILE}
print_log "Wait till ui api is ready"
wait_for_url_and_run "$(minikube service ${UI_SERVICE_NAME} -n ${NAMESPACE} --url)" "echo \"waiting for ${UI_SERVICE_NAME}\""
print_log "Starting port forwarding for deployment/${UI_SERVICE_NAME}"
wait_for_url_and_run "http://${NODE_IP}:${UI_PORT}" "spawn_port_forwarding_command ${UI_SERVICE_NAME} ${UI_PORT}"
print_log "OCP METAL UI can be reached at http://${NODE_IP}:${UI_PORT}"
print_log "Done"
|
<filename>src/main/java/com/alipay/api/domain/AlipayDataDataserviceAntdataassetsPublishtaskCreateModel.java
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 发布D2任务节点
*
* @author auto create
* @since 1.0, 2019-04-30 14:33:25
*/
public class AlipayDataDataserviceAntdataassetsPublishtaskCreateModel extends AlipayObject {
private static final long serialVersionUID = 1335878913992482171L;
/**
* ODPS表的guid
*/
@ApiField("guid")
private String guid;
public String getGuid() {
return this.guid;
}
public void setGuid(String guid) {
this.guid = guid;
}
}
|
from PyQt5 import QtWidgets, QtCore
class GUIManager:
def __init__(self):
self.buttons = []
self.checkboxes = []
def add_button(self, button):
self.buttons.append(button)
def add_checkbox(self, checkbox):
self.checkboxes.append(checkbox)
def set_button_properties(self, icon_size, tool_button_style, object_name):
for button in self.buttons:
button.setIconSize(QtCore.QSize(icon_size[0], icon_size[1]))
button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
button.setObjectName(object_name)
def set_checkbox_style(self, style_sheet):
for checkbox in self.checkboxes:
checkbox.setStyleSheet(style_sheet) |
#include <resource/scene.h>
#include <resource/mesh.h>
#include <resource/material.h>
#include <resource/texture.h>
#include <utility/profiler.h>
#include <vk_mem_alloc.h>
#include <unordered_set>
#include <gtx/matrix_decompose.hpp>
namespace helios
{
// -----------------------------------------------------------------------------------------------------------------------------------
enum LightType
{
LIGHT_DIRECTIONAL,
LIGHT_SPOT,
LIGHT_POINT,
LIGHT_ENVIRONMENT_MAP,
LIGHT_AREA
};
// -----------------------------------------------------------------------------------------------------------------------------------
struct MaterialData
{
glm::ivec4 texture_indices0 = glm::ivec4(-1); // x: albedo, y: normals, z: roughness, w: metallic
glm::ivec4 texture_indices1 = glm::ivec4(-1); // x: emissive, z: roughness_channel, w: metallic_channel
glm::vec4 albedo;
glm::vec4 emissive;
glm::vec4 roughness_metallic;
};
// -----------------------------------------------------------------------------------------------------------------------------------
struct LightData
{
glm::vec4 light_data0; // x: light type, yzw: color | x: light_type, y: mesh_id, z: material_id, w: primitive_offset
glm::vec4 light_data1; // xyz: direction, w: intensity | x: primitive_count
glm::vec4 light_data2; // xyz: position, w: radius
glm::vec4 light_data3; // x: cos_inner, y: cos_outer
};
// -----------------------------------------------------------------------------------------------------------------------------------
struct InstanceData
{
glm::mat4 model_matrix;
glm::mat4 normal_matrix;
uint32_t mesh_index;
float padding[3];
};
static uint32_t g_node_counter = 0;
// -----------------------------------------------------------------------------------------------------------------------------------
Node::Node(const NodeType& type, const std::string& name) :
m_type(type), m_name(name), m_id(g_node_counter++)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
Node::~Node()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Node::mid_frame_cleanup()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Node::add_child(Node::Ptr child)
{
m_is_heirarchy_dirty = true;
child->m_parent = this;
m_children.push_back(child);
}
// -----------------------------------------------------------------------------------------------------------------------------------
Node::Ptr Node::find_child(const std::string& name)
{
for (auto child : m_children)
{
if (child->m_name == name)
return child;
else
{
auto found = child->find_child(name);
if (found)
return found;
}
}
return nullptr;
}
// -----------------------------------------------------------------------------------------------------------------------------------
Node::Ptr Node::find_child(const NodeType& type)
{
for (auto child : m_children)
{
if (child->type() == type)
return child;
else
{
auto found = child->find_child(type);
if (found)
return found;
}
}
return nullptr;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Node::remove_child(const std::string& name)
{
m_is_heirarchy_dirty = true;
int child_to_remove = -1;
for (int i = 0; i < m_children.size(); i++)
{
if (m_children[i]->m_name == name)
{
child_to_remove = i;
break;
}
}
if (child_to_remove != -1)
{
m_children[child_to_remove]->mid_frame_cleanup();
m_children.erase(m_children.begin() + child_to_remove);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Node::update_children(RenderState& render_state)
{
if (m_is_heirarchy_dirty)
{
render_state.m_scene_state = SCENE_STATE_HIERARCHY_UPDATED;
m_is_heirarchy_dirty = false;
}
for (auto& child : m_children)
child->update(render_state);
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Node::mark_transforms_as_dirty()
{
m_is_transform_dirty = true;
for (auto& child : m_children)
child->mark_transforms_as_dirty();
}
// -----------------------------------------------------------------------------------------------------------------------------------
TransformNode::TransformNode(const NodeType& type, const std::string& name) :
Node(type, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
TransformNode::~TransformNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::update(RenderState& render_state)
{
if (m_is_transform_dirty)
{
glm::mat4 R = glm::mat4_cast(m_orientation);
glm::mat4 S = glm::scale(glm::mat4(1.0f), m_scale);
glm::mat4 T = glm::translate(glm::mat4(1.0f), m_position);
m_prev_model_matrix = m_model_matrix;
m_model_matrix_without_scale = T * R;
m_model_matrix = m_model_matrix_without_scale * S;
render_state.m_scene_state = SCENE_STATE_HIERARCHY_UPDATED;
m_is_transform_dirty = false;
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 TransformNode::forward()
{
return m_orientation * glm::vec3(0.0f, 0.0f, 1.0f);
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 TransformNode::up()
{
return m_orientation * glm::vec3(0.0f, 1.0f, 0.0f);
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 TransformNode::left()
{
return m_orientation * glm::vec3(1.0f, 0.0f, 0.0f);
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 TransformNode::local_position()
{
return m_position;
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 TransformNode::global_position()
{
TransformNode* parent_transform = dynamic_cast<TransformNode*>(m_parent);
if (parent_transform)
return parent_transform->m_model_matrix_without_scale * glm::vec4(m_position, 1.0f);
else
return m_position;
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::mat4 TransformNode::global_transform()
{
TransformNode* parent_transform = dynamic_cast<TransformNode*>(m_parent);
if (parent_transform)
return parent_transform->m_model_matrix_without_scale * m_model_matrix;
else
return m_model_matrix;
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::mat4 TransformNode::global_transform_without_scale()
{
TransformNode* parent_transform = dynamic_cast<TransformNode*>(m_parent);
if (parent_transform)
return parent_transform->m_model_matrix_without_scale * m_model_matrix_without_scale;
else
return m_model_matrix_without_scale;
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::mat4 TransformNode::local_transform()
{
return m_model_matrix;
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::mat4 TransformNode::normal_matrix()
{
return global_transform_without_scale();
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::quat TransformNode::orientation()
{
return m_orientation;
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 TransformNode::scale()
{
return m_scale;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_from_local_transform(const glm::mat4& transform)
{
mark_transforms_as_dirty();
glm::mat4 local_transform = transform;
glm::vec3 out_skew;
glm::vec4 out_persp;
glm::decompose(local_transform, m_scale, m_orientation, m_position, out_skew, out_persp);
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_from_global_transform(const glm::mat4& transform)
{
mark_transforms_as_dirty();
TransformNode* parent_transform = dynamic_cast<TransformNode*>(m_parent);
glm::mat4 local_transform = transform;
if (parent_transform)
{
glm::mat4 inverse_parent_transform = glm::inverse(parent_transform->m_model_matrix_without_scale);
local_transform = inverse_parent_transform * transform;
}
glm::vec3 out_skew;
glm::vec4 out_persp;
glm::decompose(local_transform, m_scale, m_orientation, m_position, out_skew, out_persp);
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_orientation(const glm::quat& q)
{
mark_transforms_as_dirty();
m_orientation = q;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_orientation_from_euler_yxz(const glm::vec3& e)
{
mark_transforms_as_dirty();
glm::quat pitch = glm::quat(glm::vec3(glm::radians(e.x), glm::radians(0.0f), glm::radians(0.0f)));
glm::quat yaw = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(e.y), glm::radians(0.0f)));
glm::quat roll = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(0.0f), glm::radians(e.z)));
m_orientation = yaw * pitch * roll;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_orientation_from_euler_xyz(const glm::vec3& e)
{
mark_transforms_as_dirty();
glm::quat pitch = glm::quat(glm::vec3(glm::radians(e.x), glm::radians(0.0f), glm::radians(0.0f)));
glm::quat yaw = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(e.y), glm::radians(0.0f)));
glm::quat roll = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(0.0f), glm::radians(e.z)));
m_orientation = pitch * yaw * roll;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_position(const glm::vec3& position)
{
mark_transforms_as_dirty();
m_position = position;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::set_scale(const glm::vec3& scale)
{
mark_transforms_as_dirty();
m_scale = scale;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::move(const glm::vec3& displacement)
{
mark_transforms_as_dirty();
m_position += displacement;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::rotate_euler_yxz(const glm::vec3& e)
{
mark_transforms_as_dirty();
glm::quat pitch = glm::quat(glm::vec3(glm::radians(e.x), glm::radians(0.0f), glm::radians(0.0f)));
glm::quat yaw = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(e.y), glm::radians(0.0f)));
glm::quat roll = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(0.0f), glm::radians(e.z)));
glm::quat delta = yaw * pitch * roll;
m_orientation = m_orientation * delta;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void TransformNode::rotate_euler_xyz(const glm::vec3& e)
{
mark_transforms_as_dirty();
glm::quat pitch = glm::quat(glm::vec3(glm::radians(e.x), glm::radians(0.0f), glm::radians(0.0f)));
glm::quat yaw = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(e.y), glm::radians(0.0f)));
glm::quat roll = glm::quat(glm::vec3(glm::radians(0.0f), glm::radians(0.0f), glm::radians(e.z)));
glm::quat delta = pitch * yaw * roll;
m_orientation = m_orientation * delta;
}
// -----------------------------------------------------------------------------------------------------------------------------------
RootNode::RootNode(const std::string& name) :
TransformNode(NODE_ROOT, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
RootNode::~RootNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void RootNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
TransformNode::update(render_state);
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
MeshNode::MeshNode(const std::string& name) :
TransformNode(NODE_MESH, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
MeshNode::~MeshNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void MeshNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
TransformNode::update(render_state);
if (m_mesh)
render_state.m_meshes.push_back(this);
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void MeshNode::mid_frame_cleanup()
{
if (m_mesh)
{
auto backend = m_mesh->backend().lock();
if (backend)
backend->queue_object_deletion(m_mesh);
}
mid_frame_material_cleanup();
}
// -----------------------------------------------------------------------------------------------------------------------------------
void MeshNode::mid_frame_material_cleanup()
{
if (m_material_override)
{
auto backend = m_material_override->backend().lock();
backend->queue_object_deletion(m_material_override);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void MeshNode::set_mesh(std::shared_ptr<Mesh> mesh)
{
mid_frame_cleanup();
m_mesh = mesh;
create_instance_data_buffer();
}
// -----------------------------------------------------------------------------------------------------------------------------------
void MeshNode::set_material_override(std::shared_ptr<Material> material_override)
{
mid_frame_material_cleanup();
m_material_override = material_override;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void MeshNode::create_instance_data_buffer()
{
if (m_mesh)
{
auto backend = m_mesh->backend().lock();
if (backend)
{
backend->queue_object_deletion(m_material_indices_buffer);
m_material_indices_buffer = vk::Buffer::create(backend, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, sizeof(glm::uvec2) * (m_mesh->sub_meshes().size()), VMA_MEMORY_USAGE_CPU_TO_GPU, VMA_ALLOCATION_CREATE_MAPPED_BIT);
}
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
DirectionalLightNode::DirectionalLightNode(const std::string& name) :
TransformNode(NODE_DIRECTIONAL_LIGHT, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
DirectionalLightNode::~DirectionalLightNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void DirectionalLightNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
TransformNode::update(render_state);
render_state.m_directional_lights.push_back(this);
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
SpotLightNode::SpotLightNode(const std::string& name) :
TransformNode(NODE_SPOT_LIGHT, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
SpotLightNode::~SpotLightNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void SpotLightNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
TransformNode::update(render_state);
render_state.m_spot_lights.push_back(this);
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
PointLightNode::PointLightNode(const std::string& name) :
TransformNode(NODE_POINT_LIGHT, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
PointLightNode::~PointLightNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void PointLightNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
TransformNode::update(render_state);
render_state.m_point_lights.push_back(this);
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
CameraNode::CameraNode(const std::string& name) :
TransformNode(NODE_CAMERA, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
CameraNode::~CameraNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void CameraNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
TransformNode::update(render_state);
m_projection_matrix = glm::perspective(glm::radians(m_fov), float(render_state.viewport_width()) / float(render_state.viewport_height()), m_near_plane, m_far_plane);
m_view_matrix = glm::inverse(global_transform_without_scale());
if (!render_state.m_camera)
render_state.m_camera = this;
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 CameraNode::camera_forward()
{
return -forward();
}
// -----------------------------------------------------------------------------------------------------------------------------------
glm::vec3 CameraNode::camera_left()
{
return -left();
}
// -----------------------------------------------------------------------------------------------------------------------------------
IBLNode::IBLNode(const std::string& name) :
Node(NODE_IBL, name)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
IBLNode::~IBLNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void IBLNode::update(RenderState& render_state)
{
if (m_is_enabled)
{
if (!render_state.m_ibl_environment_map)
render_state.m_ibl_environment_map = this;
update_children(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void IBLNode::set_image(std::shared_ptr<TextureCube> image)
{
mid_frame_cleanup();
m_image = image;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void IBLNode::mid_frame_cleanup()
{
if (m_image)
{
auto backend = m_image->backend().lock();
if (backend)
backend->queue_object_deletion(m_image);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
RenderState::RenderState()
{
m_meshes.reserve(100000);
m_directional_lights.reserve(100000);
m_spot_lights.reserve(100000);
m_point_lights.reserve(100000);
}
// -----------------------------------------------------------------------------------------------------------------------------------
RenderState::~RenderState()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void RenderState::clear()
{
m_meshes.clear();
m_directional_lights.clear();
m_spot_lights.clear();
m_point_lights.clear();
m_camera = nullptr;
m_ibl_environment_map = nullptr;
m_read_image_ds = nullptr;
m_write_image_ds = nullptr;
m_scene_ds = nullptr;
m_cmd_buffer = nullptr;
m_scene = nullptr;
m_vbo_ds = nullptr;
m_ibo_ds = nullptr;
m_material_indices_ds = nullptr;
m_texture_ds = nullptr;
m_ray_debug_ds = nullptr;
m_num_lights = 0;
m_scene_state = SCENE_STATE_READY;
}
// -----------------------------------------------------------------------------------------------------------------------------------
void RenderState::setup(uint32_t width, uint32_t height, vk::CommandBuffer::Ptr cmd_buffer)
{
clear();
m_viewport_width = width;
m_viewport_height = height;
m_cmd_buffer = cmd_buffer;
}
// -----------------------------------------------------------------------------------------------------------------------------------
Scene::Ptr Scene::create(vk::Backend::Ptr backend, const std::string& name, Node::Ptr root, const std::string& path)
{
return std::shared_ptr<Scene>(new Scene(backend, name, root, path));
}
// -----------------------------------------------------------------------------------------------------------------------------------
Scene::Scene(vk::Backend::Ptr backend, const std::string& name, Node::Ptr root, const std::string& path) :
m_name(name), m_path(path), m_backend(backend), m_root(root), vk::Object(backend)
{
// Allocate device instance buffer
m_tlas.instance_buffer_device = vk::Buffer::create(backend, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT | VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, sizeof(VkAccelerationStructureInstanceKHR) * MAX_SCENE_MESH_INSTANCE_COUNT, VMA_MEMORY_USAGE_GPU_ONLY, 0);
VkDeviceOrHostAddressConstKHR instance_device_address {};
instance_device_address.deviceAddress = m_tlas.instance_buffer_device->device_address();
// Allocate host instance buffer
m_tlas.instance_buffer_host = vk::Buffer::create(backend, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, sizeof(VkAccelerationStructureInstanceKHR) * MAX_SCENE_MESH_INSTANCE_COUNT, VMA_MEMORY_USAGE_CPU_ONLY, VMA_ALLOCATION_CREATE_MAPPED_BIT);
// Create TLAS
VkAccelerationStructureGeometryKHR tlas_geometry;
HELIOS_ZERO_MEMORY(tlas_geometry);
tlas_geometry.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR;
tlas_geometry.geometryType = VK_GEOMETRY_TYPE_INSTANCES_KHR;
tlas_geometry.geometry.instances.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR;
tlas_geometry.geometry.instances.arrayOfPointers = VK_FALSE;
tlas_geometry.geometry.instances.data = instance_device_address;
vk::AccelerationStructure::Desc desc;
desc.set_geometry_count(1);
desc.set_geometries({ tlas_geometry });
desc.set_max_primitive_counts({ MAX_SCENE_MESH_INSTANCE_COUNT });
desc.set_type(VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR);
desc.set_flags(VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR | VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR);
m_tlas.tlas = vk::AccelerationStructure::create(backend, desc);
// Allocate scratch buffer
m_tlas.scratch_buffer = vk::Buffer::create(backend, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, m_tlas.tlas->build_sizes().buildScratchSize, VMA_MEMORY_USAGE_GPU_ONLY, 0);
vk::DescriptorPool::Desc dp_desc;
dp_desc.set_max_sets(25)
.add_pool_size(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 10)
.add_pool_size(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_SCENE_MATERIAL_TEXTURE_COUNT)
.add_pool_size(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 5 * MAX_SCENE_MESH_INSTANCE_COUNT)
.add_pool_size(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 10);
m_descriptor_pool = vk::DescriptorPool::create(backend, dp_desc);
// Allocate descriptor set
VkDescriptorSetVariableDescriptorCountAllocateInfo variable_ds_alloc_info;
HELIOS_ZERO_MEMORY(variable_ds_alloc_info);
uint32_t variable_desc_count = MAX_SCENE_MESH_INSTANCE_COUNT;
variable_ds_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO;
variable_ds_alloc_info.descriptorSetCount = 1;
variable_ds_alloc_info.pDescriptorCounts = &variable_desc_count;
m_scene_descriptor_set = vk::DescriptorSet::create(backend, backend->scene_descriptor_set_layout(), m_descriptor_pool);
m_scene_descriptor_set->set_name("Scene Descriptor Set");
m_vbo_descriptor_set = vk::DescriptorSet::create(backend, backend->buffer_array_descriptor_set_layout(), m_descriptor_pool, &variable_ds_alloc_info);
m_vbo_descriptor_set->set_name("VBO Descriptor Set");
m_ibo_descriptor_set = vk::DescriptorSet::create(backend, backend->buffer_array_descriptor_set_layout(), m_descriptor_pool, &variable_ds_alloc_info);
m_ibo_descriptor_set->set_name("IBO Descriptor Set");
m_material_indices_descriptor_set = vk::DescriptorSet::create(backend, backend->buffer_array_descriptor_set_layout(), m_descriptor_pool, &variable_ds_alloc_info);
m_material_indices_descriptor_set->set_name("Material Indices Descriptor Set");
variable_desc_count = MAX_SCENE_MATERIAL_TEXTURE_COUNT;
m_textures_descriptor_set = vk::DescriptorSet::create(backend, backend->combined_sampler_array_descriptor_set_layout(), m_descriptor_pool, &variable_ds_alloc_info);
m_textures_descriptor_set->set_name("Textures Descriptor Set");
// Create light data buffer
m_light_data_buffer = vk::Buffer::create(backend, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, sizeof(LightData) * MAX_SCENE_LIGHT_COUNT, VMA_MEMORY_USAGE_CPU_TO_GPU, VMA_ALLOCATION_CREATE_MAPPED_BIT);
// Create material data buffer
m_material_data_buffer = vk::Buffer::create(backend, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, sizeof(MaterialData) * MAX_SCENE_MATERIAL_COUNT, VMA_MEMORY_USAGE_CPU_TO_GPU, VMA_ALLOCATION_CREATE_MAPPED_BIT);
// Create instance data buffer
m_instance_data_buffer = vk::Buffer::create(backend, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, sizeof(InstanceData) * MAX_SCENE_MESH_INSTANCE_COUNT, VMA_MEMORY_USAGE_CPU_TO_GPU, VMA_ALLOCATION_CREATE_MAPPED_BIT);
update_static_descriptors();
m_sky_model = std::unique_ptr<HosekWilkieSkyModel>(new HosekWilkieSkyModel(backend));
}
// -----------------------------------------------------------------------------------------------------------------------------------
Scene::~Scene()
{
m_sky_model.reset();
m_textures_descriptor_set.reset();
m_material_indices_descriptor_set.reset();
m_ibo_descriptor_set.reset();
m_vbo_descriptor_set.reset();
m_scene_descriptor_set.reset();
m_descriptor_pool.reset();
m_tlas.scratch_buffer.reset();
m_tlas.instance_buffer_host.reset();
m_tlas.tlas.reset();
m_light_data_buffer.reset();
m_material_data_buffer.reset();
m_root.reset();
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Scene::update(RenderState& render_state)
{
auto backend = m_backend.lock();
render_state.m_scene_ds = m_scene_descriptor_set;
render_state.m_vbo_ds = m_vbo_descriptor_set;
render_state.m_ibo_ds = m_ibo_descriptor_set;
render_state.m_material_indices_ds = m_material_indices_descriptor_set;
render_state.m_texture_ds = m_textures_descriptor_set;
render_state.m_scene = this;
{
HELIOS_SCOPED_SAMPLE("Gather Render State");
m_root->update(render_state);
}
render_state.m_num_lights = m_num_area_lights + render_state.m_directional_lights.size() + render_state.m_spot_lights.size() + render_state.m_point_lights.size();
if (render_state.ibl_environment_map() && render_state.ibl_environment_map()->image())
render_state.m_num_lights++;
else if (render_state.m_directional_lights.size() > 0)
{
render_state.m_num_lights++;
m_sky_model->update(render_state.cmd_buffer(), -render_state.m_directional_lights[0]->forward());
}
if (m_force_update)
{
render_state.m_scene_state = SCENE_STATE_HIERARCHY_UPDATED;
m_force_update = false;
}
{
HELIOS_SCOPED_SAMPLE("Upload GPU Resources");
create_gpu_resources(render_state);
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Scene::create_gpu_resources(RenderState& render_state)
{
if (render_state.m_scene_state != SCENE_STATE_READY)
{
// Copy lights
uint32_t gpu_light_counter = 0;
LightData* light_buffer = (LightData*)m_light_data_buffer->mapped_ptr();
if (render_state.m_scene_state == SCENE_STATE_HIERARCHY_UPDATED)
{
m_num_area_lights = 0;
auto backend = m_backend.lock();
backend->wait_idle();
std::unordered_set<uint32_t> processed_meshes;
std::unordered_set<uint32_t> processed_materials;
std::unordered_set<uint32_t> processed_textures;
std::unordered_map<uint32_t, uint32_t> global_material_indices;
uint32_t mesh_index_counter = 0;
std::vector<VkDescriptorBufferInfo> vbo_descriptors;
std::vector<VkDescriptorBufferInfo> ibo_descriptors;
std::vector<VkDescriptorImageInfo> image_descriptors;
std::vector<VkDescriptorBufferInfo> material_indices_descriptors;
uint32_t gpu_material_counter = 0;
MaterialData* material_buffer = (MaterialData*)m_material_data_buffer->mapped_ptr();
for (int mesh_node_idx = 0; mesh_node_idx < render_state.m_meshes.size(); mesh_node_idx++)
{
auto& mesh_node = render_state.m_meshes[mesh_node_idx];
auto& mesh = mesh_node->mesh();
const auto& materials = mesh->materials();
const auto& submeshes = mesh->sub_meshes();
if (processed_meshes.find(mesh->id()) == processed_meshes.end())
{
processed_meshes.insert(mesh->id());
m_global_mesh_indices[mesh->id()] = mesh_index_counter++;
VkDescriptorBufferInfo ibo_info;
ibo_info.buffer = mesh->index_buffer()->handle();
ibo_info.offset = 0;
ibo_info.range = VK_WHOLE_SIZE;
ibo_descriptors.push_back(ibo_info);
VkDescriptorBufferInfo vbo_info;
vbo_info.buffer = mesh->vertex_buffer()->handle();
vbo_info.offset = 0;
vbo_info.range = VK_WHOLE_SIZE;
vbo_descriptors.push_back(vbo_info);
for (uint32_t i = 0; i < submeshes.size(); i++)
{
const SubMesh& submesh = submeshes[i];
auto material = materials[submesh.mat_idx];
if (mesh_node->material_override())
material = mesh_node->material_override();
if (processed_materials.find(material->id()) == processed_materials.end())
{
processed_materials.insert(material->id());
MaterialData& material_data = material_buffer[gpu_material_counter++];
material_data.texture_indices0 = glm::ivec4(-1);
material_data.texture_indices1 = glm::ivec4(-1);
material_data.albedo = glm::vec4(0.0f);
material_data.emissive = glm::vec4(0.0f);
material_data.roughness_metallic = glm::vec4(0.0f);
// Fill GPUMaterial
if (material->albedo_texture())
{
auto texture = material->albedo_texture();
if (processed_textures.find(texture->id()) == processed_textures.end())
{
processed_textures.insert(texture->id());
VkDescriptorImageInfo image_info;
image_info.sampler = backend->trilinear_sampler()->handle();
image_info.imageView = texture->image_view()->handle();
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
material_data.texture_indices0.x = image_descriptors.size();
image_descriptors.push_back(image_info);
}
}
else
{
material_data.albedo = material->albedo_value();
// Covert from sRGB to Linear
material_data.albedo = glm::vec4(glm::pow(glm::vec3(material_data.albedo[0], material_data.albedo[1], material_data.albedo[2]), glm::vec3(2.2f)), material_data.albedo.a);
}
if (material->normal_texture())
{
auto texture = material->normal_texture();
if (processed_textures.find(texture->id()) == processed_textures.end())
{
processed_textures.insert(texture->id());
VkDescriptorImageInfo image_info;
image_info.sampler = backend->trilinear_sampler()->handle();
image_info.imageView = texture->image_view()->handle();
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
material_data.texture_indices0.y = image_descriptors.size();
image_descriptors.push_back(image_info);
}
}
if (material->roughness_texture())
{
auto texture = material->roughness_texture();
if (processed_textures.find(texture->id()) == processed_textures.end())
{
processed_textures.insert(texture->id());
VkDescriptorImageInfo image_info;
image_info.sampler = backend->trilinear_sampler()->handle();
image_info.imageView = texture->image_view()->handle();
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
material_data.texture_indices0.z = image_descriptors.size();
material_data.texture_indices1.z = material->roughness_texture_info().array_index;
image_descriptors.push_back(image_info);
}
}
else
material_data.roughness_metallic.x = material->roughness_value();
if (material->metallic_texture())
{
auto texture = material->metallic_texture();
if (processed_textures.find(texture->id()) == processed_textures.end())
{
processed_textures.insert(texture->id());
VkDescriptorImageInfo image_info;
image_info.sampler = backend->trilinear_sampler()->handle();
image_info.imageView = texture->image_view()->handle();
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
material_data.texture_indices0.w = image_descriptors.size();
material_data.texture_indices1.w = material->metallic_texture_info().array_index;
image_descriptors.push_back(image_info);
}
}
else
material_data.roughness_metallic.y = material->metallic_value();
if (material->emissive_texture())
{
auto texture = material->emissive_texture();
if (processed_textures.find(texture->id()) == processed_textures.end())
{
processed_textures.insert(texture->id());
VkDescriptorImageInfo image_info;
image_info.sampler = backend->trilinear_sampler()->handle();
image_info.imageView = texture->image_view()->handle();
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
material_data.texture_indices1.x = image_descriptors.size();
image_descriptors.push_back(image_info);
}
}
else
material_data.emissive = material->emissive_value();
global_material_indices[material->id()] = gpu_material_counter - 1;
}
if (material->is_emissive())
{
m_num_area_lights++;
LightData& light_data = light_buffer[gpu_light_counter++];
light_data.light_data0 = glm::vec4(float(LIGHT_AREA), float(mesh_node_idx), float(global_material_indices[material->id()]), float(submesh.base_index / 3));
light_data.light_data1 = glm::vec4(float(submesh.index_count / 3), 0.0f, 0.0f, 0.0f);
}
}
}
VkDescriptorBufferInfo material_indice_info;
material_indice_info.buffer = mesh_node->material_indices_buffer()->handle();
material_indice_info.offset = 0;
material_indice_info.range = VK_WHOLE_SIZE;
material_indices_descriptors.push_back(material_indice_info);
glm::uvec2* primitive_offsets_material_indices = (glm::uvec2*)mesh_node->material_indices_buffer()->mapped_ptr();
// Set submesh materials
for (uint32_t i = 0; i < submeshes.size(); i++)
{
const auto& submesh = submeshes[i];
auto material = materials[submeshes[i].mat_idx];
if (mesh_node->material_override())
material = mesh_node->material_override();
glm::uvec2 pair = glm::uvec2(submesh.base_index / 3, global_material_indices[material->id()]);
primitive_offsets_material_indices[i] = pair;
}
}
VkDescriptorImageInfo environment_map_info;
environment_map_info.sampler = backend->bilinear_sampler()->handle();
if (render_state.ibl_environment_map() && render_state.ibl_environment_map()->image())
environment_map_info.imageView = render_state.ibl_environment_map()->image()->image_view()->handle();
else
{
if (render_state.m_directional_lights.size() > 0)
environment_map_info.imageView = m_sky_model->cubemap()->handle();
else
environment_map_info.imageView = backend->default_cubemap()->handle();
}
environment_map_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
std::vector<VkWriteDescriptorSet> write_datas;
VkWriteDescriptorSet write_data;
HELIOS_ZERO_MEMORY(write_data);
write_data.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data.descriptorCount = 1;
write_data.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
write_data.pImageInfo = &environment_map_info;
write_data.dstBinding = 4;
write_data.dstSet = m_scene_descriptor_set->handle();
write_datas.push_back(write_data);
HELIOS_ZERO_MEMORY(write_data);
if (vbo_descriptors.size() > 0)
{
write_data.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data.descriptorCount = vbo_descriptors.size();
write_data.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
write_data.pBufferInfo = vbo_descriptors.data();
write_data.dstBinding = 0;
write_data.dstSet = m_vbo_descriptor_set->handle();
write_datas.push_back(write_data);
}
if (ibo_descriptors.size() > 0)
{
HELIOS_ZERO_MEMORY(write_data);
write_data.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data.descriptorCount = ibo_descriptors.size();
write_data.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
write_data.pBufferInfo = ibo_descriptors.data();
write_data.dstBinding = 0;
write_data.dstSet = m_ibo_descriptor_set->handle();
write_datas.push_back(write_data);
}
if (material_indices_descriptors.size() > 0)
{
HELIOS_ZERO_MEMORY(write_data);
write_data.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data.descriptorCount = material_indices_descriptors.size();
write_data.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
write_data.pBufferInfo = material_indices_descriptors.data();
write_data.dstBinding = 0;
write_data.dstSet = m_material_indices_descriptor_set->handle();
write_datas.push_back(write_data);
}
if (image_descriptors.size() > 0)
{
HELIOS_ZERO_MEMORY(write_data);
write_data.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data.descriptorCount = image_descriptors.size();
write_data.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
write_data.pImageInfo = image_descriptors.data();
write_data.dstBinding = 0;
write_data.dstSet = m_textures_descriptor_set->handle();
write_datas.push_back(write_data);
}
if (write_datas.size() > 0)
vkUpdateDescriptorSets(backend->device(), write_datas.size(), write_datas.data(), 0, nullptr);
}
InstanceData* instance_buffer = (InstanceData*)m_instance_data_buffer->mapped_ptr();
VkAccelerationStructureInstanceKHR* geometry_instance_buffer = (VkAccelerationStructureInstanceKHR*)m_tlas.instance_buffer_host->mapped_ptr();
for (int mesh_node_idx = 0; mesh_node_idx < render_state.m_meshes.size(); mesh_node_idx++)
{
auto& mesh_node = render_state.m_meshes[mesh_node_idx];
auto& mesh = mesh_node->mesh();
// Copy geometry instance data
VkAccelerationStructureInstanceKHR& rt_instance = geometry_instance_buffer[mesh_node_idx];
glm::mat3x4 transform = glm::mat3x4(glm::transpose(mesh_node->global_transform()));
memcpy(&rt_instance.transform, &transform, sizeof(rt_instance.transform));
rt_instance.instanceCustomIndex = mesh_node_idx;
rt_instance.mask = 0xFF;
rt_instance.instanceShaderBindingTableRecordOffset = 0;
rt_instance.flags = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR;
rt_instance.accelerationStructureReference = mesh->acceleration_structure()->device_address();
// Update instance data
InstanceData& instance_data = instance_buffer[mesh_node_idx];
// Set mesh data index
instance_data.mesh_index = m_global_mesh_indices[mesh->id()];
instance_data.model_matrix = mesh_node->global_transform();
instance_data.normal_matrix = mesh_node->normal_matrix();
}
if ((render_state.ibl_environment_map() && render_state.ibl_environment_map()->image()) || render_state.m_directional_lights.size() > 0)
{
LightData& light_data = light_buffer[gpu_light_counter++];
light_data.light_data0 = glm::vec4(float(LIGHT_ENVIRONMENT_MAP), 0.0f, 0.0f, 0.0f);
}
for (int i = 0; i < render_state.m_directional_lights.size(); i++)
{
auto light = render_state.m_directional_lights[i];
LightData& light_data = light_buffer[gpu_light_counter++];
light_data.light_data0 = glm::vec4(float(LIGHT_DIRECTIONAL), light->color());
light_data.light_data1 = glm::vec4(light->forward(), light->intensity());
light_data.light_data2 = glm::vec4(0.0f, 0.0f, 0.0f, light->radius());
}
for (int i = 0; i < render_state.m_point_lights.size(); i++)
{
auto light = render_state.m_point_lights[i];
LightData& light_data = light_buffer[gpu_light_counter++];
light_data.light_data0 = glm::vec4(float(LIGHT_POINT), light->color());
light_data.light_data1 = glm::vec4(0.0f, 0.0f, 0.0f, light->intensity());
light_data.light_data2 = glm::vec4(light->global_position(), light->radius());
light_data.light_data3 = glm::vec4(0.0f);
}
for (int i = 0; i < render_state.m_spot_lights.size(); i++)
{
auto light = render_state.m_spot_lights[i];
LightData& light_data = light_buffer[gpu_light_counter++];
light_data.light_data0 = glm::vec4(float(LIGHT_SPOT), light->color());
light_data.light_data1 = glm::vec4(light->forward(), light->intensity());
light_data.light_data2 = glm::vec4(light->global_position(), light->radius());
light_data.light_data3 = glm::vec4(cosf(glm::radians(light->inner_cone_angle())), cosf(glm::radians(light->outer_cone_angle())), 0.0f, 0.0f);
}
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Scene::update_static_descriptors()
{
auto backend = m_backend.lock();
VkDescriptorBufferInfo material_buffer_info;
material_buffer_info.buffer = m_material_data_buffer->handle();
material_buffer_info.offset = 0;
material_buffer_info.range = VK_WHOLE_SIZE;
VkDescriptorBufferInfo instance_buffer_info;
instance_buffer_info.buffer = m_instance_data_buffer->handle();
instance_buffer_info.offset = 0;
instance_buffer_info.range = VK_WHOLE_SIZE;
VkDescriptorBufferInfo light_buffer_info;
light_buffer_info.buffer = m_light_data_buffer->handle();
light_buffer_info.offset = 0;
light_buffer_info.range = VK_WHOLE_SIZE;
VkWriteDescriptorSet write_data[4];
HELIOS_ZERO_MEMORY(write_data[0]);
HELIOS_ZERO_MEMORY(write_data[1]);
HELIOS_ZERO_MEMORY(write_data[2]);
HELIOS_ZERO_MEMORY(write_data[3]);
write_data[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data[0].descriptorCount = 1;
write_data[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
write_data[0].pBufferInfo = &material_buffer_info;
write_data[0].dstBinding = 0;
write_data[0].dstSet = m_scene_descriptor_set->handle();
write_data[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data[1].descriptorCount = 1;
write_data[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
write_data[1].pBufferInfo = &instance_buffer_info;
write_data[1].dstBinding = 1;
write_data[1].dstSet = m_scene_descriptor_set->handle();
write_data[2].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data[2].descriptorCount = 1;
write_data[2].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
write_data[2].pBufferInfo = &light_buffer_info;
write_data[2].dstBinding = 2;
write_data[2].dstSet = m_scene_descriptor_set->handle();
VkWriteDescriptorSetAccelerationStructureKHR descriptor_as;
descriptor_as.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
descriptor_as.pNext = nullptr;
descriptor_as.accelerationStructureCount = 1;
descriptor_as.pAccelerationStructures = &m_tlas.tlas->handle();
write_data[3].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_data[3].pNext = &descriptor_as;
write_data[3].descriptorCount = 1;
write_data[3].descriptorType = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
write_data[3].dstBinding = 3;
write_data[3].dstSet = m_scene_descriptor_set->handle();
vkUpdateDescriptorSets(backend->device(), 4, write_data, 0, nullptr);
}
// -----------------------------------------------------------------------------------------------------------------------------------
void Scene::set_root_node(Node::Ptr node)
{
if (m_root)
m_root->mid_frame_cleanup();
m_root = node;
}
// -----------------------------------------------------------------------------------------------------------------------------------
Node::Ptr Scene::root_node()
{
return m_root;
}
// -----------------------------------------------------------------------------------------------------------------------------------
Node::Ptr Scene::find_node(const std::string& name)
{
if (m_root->name() == name)
return m_root;
else
return m_root->find_child(name);
}
// -----------------------------------------------------------------------------------------------------------------------------------
CameraNode::Ptr Scene::find_camera()
{
if (m_root->type() == NODE_CAMERA)
return std::dynamic_pointer_cast<CameraNode>(m_root);
else
return std::dynamic_pointer_cast<CameraNode>(m_root->find_child(NODE_CAMERA));
}
// -----------------------------------------------------------------------------------------------------------------------------------
} // namespace helios |
#!/bin/sh
lock() {
i3lock-custom.sh
}
case "$1" in
lock)
lock
;;
logout)
i3-msg exit
;;
suspend)
lock && systemctl suspend
;;
hibernate)
lock && systemctl hibernate
;;
reboot)
systemctl reboot
;;
shutdown)
systemctl poweroff
;;
*)
echo "Usage: $0 {lock|logout|suspend|hibernate|reboot|shutdown}"
exit 2
esac
exit 0
|
def is_anagram(str1, str2):
str1 = str1.lower()
str2 = str2.lower()
str1_list = list(str1)
str2_list = list(str2)
str1_list.sort()
str2_list.sort()
return str1_list == str2_list |
#!/bin/bash
# Get the distances between the lipid COM and the AH domain
echo 0 | gmx trjconv -s step7_umbrella_v1.tpr -f step7_umbrella_v1.xtc -o conf.gro -sep
# compute distances
for (( i=0; i<501; i++ ))
do
gmx distance -s step7_umbrella_v1.tpr -f conf${i}.gro -n index.ndx -select 'com of group "MEMB" plus com of group "SOLU"' -oall dist${i}.xvg
done
# compile summary
touch summary_distances.dat
for (( i=0; i<501; i++ ))
do
d=`tail -n 1 dist${i}.xvg | awk '{print $2}'`
echo "${i} ${d}" >> summary_distances.dat
rm dist${i}.xvg
done
exit;
|
#!/bin/bash -e
source ../common/common.sh
NAME=hfst
GIT_REPO=git@github.com:hfst/hfst.git
init_vars
checkout_git
module purge
module load GCC
module load Bison
BIN_PATH="${INSTALL_DIR}/bin"
LIB_PATH="${INSTALL_DIR}/lib"
pushd "${GIT_PATH}"
./autogen.sh
./configure --prefix="${INSTALL_DIR}"
make
mkdir -p "${BIN_PATH}"
mkdir -p "${LIB_PATH}"
make install
popd
DESC="Helsinki Finite-State Transducer Technology (HFST)"
HELP="The Helsinki Finite-State Transducer software is intended for the implementation of morphological analysers and other tools which are based on weighted and unweighted finite-state transducer technology."
write_module
|
package org.fbc.prototypes.websocket_push_to_browser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
@Service
public class MessageService {
private SimpMessagingTemplate template;
@Autowired
public MessageService(SimpMessagingTemplate template) {
System.out.println("created Message service");
this.template = template;
}
@Scheduled(fixedDelay=1000)
public void send() {
System.out.println("autofire");
this.template.convertAndSend("/topic/updates", new Message("Update from " + System.currentTimeMillis()));
}
} |
#!/bin/bash
mkdir -p "$PREFIX/bin"
if [ "$(uname)" == "Darwin" ]; then
cp twoBitMask "$PREFIX/bin"
else
export MACHTYPE=x86_64
export BINDIR=$(pwd)/bin
mkdir -p "$BINDIR"
(cd kent/src/lib && make)
(cd kent/src/htslib && make)
(cd kent/src/jkOwnLib && make)
(cd kent/src/hg/lib && make)
(cd kent/src/hg/utils/twoBitMask && make)
cp bin/twoBitMask "$PREFIX/bin"
fi
chmod +x "$PREFIX/bin/twoBitMask"
|
#!/usr/bin/env bats
@test "minio binary is in our \$PATH" {
command -v minio
}
@test "assert minio user exists" {
run getent passwd minio > /dev/null
[ ${status} = 0 ]
}
@test "assert minio service file exists" {
run test -e /etc/systemd/system/minio.service
[ ${status} = 0 ]
}
@test "assert /etc/default/minio file exists" {
run test -e /etc/default/minio
[ ${status} = 0 ]
}
@test "assert minio service is running " {
run bash -c "[ ! -d '/tmp/test' ] && mkdir /tmp/test; chown minio:minio /tmp/test; systemctl start minio; systemctl show minio|grep -e ActiveState -e SubState"
[ $status = 0 ]
[ "${lines[0]}" = "ActiveState=active" ]
[ "${lines[1]}" = "SubState=running" ]
}
|
import { styled } from '../../stitches.js'
import Menu from '../../components/Menu.js'
import transition from '../../styles/transition.js'
export default styled(
Menu.Button,
{
width: '100%',
borderWidth: 0,
backgroundColor: 'transparent',
cursor: 'pointer',
display: 'flex',
alignItems: 'center',
paddingLeft: '1em',
paddingRight: '1em',
paddingTop: '.75em',
paddingBottom: '.75em',
fontSize: '.825em',
fontWeight: '500',
color: '$coolGray600',
...transition,
'&:hover': {
backgroundColor: '$coolGray200',
color: '$coolGray900'
}
}
)
|
<gh_stars>0
export { default } from 'components/PreviewTheme/PreviewTheme';
|
package com.example;
import de.greenrobot.daogenerator.DaoGenerator;
import de.greenrobot.daogenerator.Entity;
import de.greenrobot.daogenerator.Property;
import de.greenrobot.daogenerator.Schema;
public class GreenDaoGenerator {
private static final String SQL_DB = "com.cjy.flb.db";
private static final String SQL_DAO = "com.cjy.flb.dao";
public static void main(String[] args) throws Exception {
//创建一个用于添加实体的Schema对象,第一个参数表示数据库的版本,
// 第二个参数表示在java-gen目录下自动生成的实体类和DAO类存放的包名
Schema schema = new Schema(1, SQL_DB);
// 假如你不想实体类和DAO类都放在一个包中,你可以重新为DAO类设置一个新的包
schema.setDefaultJavaPackageDao(SQL_DAO);
createFirstTable(schema);
//最后通过DaoGenerator对象的generateAll()方法来生成相应的实体类和DAO类,参数分别为Schema对象和java-gen目录路径
new DaoGenerator().generateAll(schema, "../flb_cjyun/app/src/main/java-gen");
}
/**
* 创建表
*
* @param schema
*/
private static void createFirstTable(Schema schema) {
//创建一个实体,一个实体对应一张表,此处表示生成的实体名为Student,同样它默认也是表名
// 你如果不想实体名和表名相同,也可以重新设置一个表名
//为Student表添加字段,这里的参数表示实体类Student的字段名,生成的表中的字段会变成大写,如name在表中字段为NAME
// entity.setTableName("Students");
Entity box = schema.addEntity("Box");
box.addLongProperty("boxId").primaryKey();
box.addStringProperty("day_in_week").notNull();
box.addStringProperty("point_in_time").notNull();
box.addIntProperty("number");
//吃药记录表
Entity user = schema.addEntity("Eat");
user.addIdProperty().autoincrement().primaryKey();
user.addStringProperty("device_uid").notNull();
user.addStringProperty("eat_medicine_time");
user.addIntProperty("number");
user.addBooleanProperty("taken");
//记录是否通知表
Entity notif = schema.addEntity("IsNotif");
notif.addIdProperty().autoincrement().primaryKey();
notif.addStringProperty("today");
notif.addStringProperty("medic_id");
notif.addStringProperty("medic_name");
notif.addStringProperty("medic_phone");
notif.addBooleanProperty("isMOut");
notif.addIntProperty("isM");
notif.addBooleanProperty("isNOut");
notif.addIntProperty("isN");
notif.addBooleanProperty("isAOut");
notif.addIntProperty("isA");
notif.addBooleanProperty("isEOut");
notif.addIntProperty("isE");
//记录将要服药的时间表
Entity eatTime = schema.addEntity("EatTime");
eatTime.addIdProperty().autoincrement().primaryKey();
eatTime.addStringProperty("medic_id");
eatTime.addLongProperty("morn");
eatTime.addLongProperty("non");
eatTime.addLongProperty("after");
eatTime.addLongProperty("even");
eatTime.addStringProperty("mornS");
eatTime.addStringProperty("nonS");
eatTime.addStringProperty("afterS");
eatTime.addStringProperty("evenS");
eatTime.addStringProperty("name");
eatTime.addStringProperty("phone");
//记录重复设置功能中的表
Entity repeatSet = schema.addEntity("RepeatWeek");
repeatSet.addIdProperty().autoincrement().primaryKey();
repeatSet.addStringProperty("medic_id");
repeatSet.addStringProperty("number");
repeatSet.addBooleanProperty("isEvery");
repeatSet.addBooleanProperty("isMorn");
repeatSet.addBooleanProperty("isTue");
repeatSet.addBooleanProperty("isWed");
repeatSet.addBooleanProperty("isThu");
repeatSet.addBooleanProperty("isFri");
repeatSet.addBooleanProperty("isSat");
repeatSet.addBooleanProperty("isSun");
//建立多表关联
/* Entity number = schema.addEntity("Number");
number.addLongProperty("numberId").primaryKey();
number.addIntProperty("number").notNull();
number.addStringProperty("device_uid").notNull();
number.addStringProperty("unit");
number.addStringProperty("eat_time");
Entity medicine = schema.addEntity("Medicine");
medicine.addLongProperty("medicineId").primaryKey();
medicine.addStringProperty("medicine_name");
medicine.addIntProperty("quantity");
Entity nmAssociated = schema.addEntity("NumMed");
Property nmnId = nmAssociated.addLongProperty("numberId").getProperty();
Property nmmId = nmAssociated.addLongProperty("medicineId").getProperty();
nmAssociated.addToOne(number, nmnId);
nmAssociated.addToOne(medicine, nmmId);
number.addToMany(nmAssociated, nmnId);
medicine.addToMany(nmAssociated, nmmId);*/
//用于设置时间用表
Entity setMTime = schema.addEntity("SetMedicTime");
setMTime.addIdProperty().autoincrement().primaryKey();
setMTime.addStringProperty("number").notNull();
setMTime.addStringProperty("device_uid").notNull();
setMTime.addStringProperty("medicine");
setMTime.addIntProperty("quantity");
setMTime.addIntProperty("medicine_id");
setMTime.addStringProperty("unit");
setMTime.addStringProperty("eat_time");
schema.enableKeepSectionsByDefault();
}
}
|
<reponame>RyanFikejs/lambdata-ryanfikejs
"""Basic Unit testing for example.py"""
from random import randint
import pytest
from example import increment, COLORS
def test_increment():
"""Testing increment function"""
test_value = randint(0,10)
assert increment(3) == 4
assert increment(-2) == -1
assert increment(test_value) == test_value + 1
for i in range(100):
i = randint(1, 1000)
assert increment(i-1) == i
def test_number_colors():
"""Testing colors contents"""
assert len(COLORS) == 4
def test_color_contents():
"""Check for specific colors in the list"""
assert "blue" in COLORS
assert "brown" in COLORS
assert "mauve" in COLORS |
<gh_stars>0
package io.laminext.syntax
import com.raquo.laminar.api.L._
import io.laminext.core.ops.observable.ObservableOfOptionOps
trait ObservableOfOptionSyntax {
implicit def syntaxObservableOfOption[A](
s: Observable[Option[A]]
): ObservableOfOptionOps[A] = new ObservableOfOptionOps[A](s)
}
|
<reponame>zhanghongli-lily/tmoney
package api.framework;
import api.item.AppType;
import org.junit.Test;
public class LoginHelper {
private static ApiModel model = ApiModel.load(LoginHelper.class);
@Test
public void test(){
login(AppType.MINIPRO);
}
public static void login(){
model.get("minipro").importDefaultConfig().run();
}
public static void login(AppType type){
switch (type){
case MANAGE:
model.get("api.framework.manage").importDefaultConfig().run();
break;
case MARKET:
model.get("market").importDefaultConfig().run();
break;
case MINIPRO:
model.get("minipro").importDefaultConfig().run();
break;
default:
model.get("h5").importDefaultConfig().run();
}
}
} |
<gh_stars>0
from ..api.api import get_PBSMM_record
from ..abstract.helpers import set_json_serialized_field, fix_non_aware_datetime
def process_season_record(obj, instance, origin='season'):
"""
Take the data returned from a single Season's API JSON content and map it to a PBSMMEpisode database record.
"""
# We have to get the detail endpoint now because PBS removed the show link from season listings.
self_link = obj['links']['self']
status, obj = get_PBSMM_record(self_link)
# These are the top-level fields - almost everything else is under attrs
if 'attributes' not in obj.keys():
attrs = obj['data']['attributes']
else:
attrs = obj['attributes']
links = obj['links']
# UUID and updated_on
if 'id' in obj.keys():
instance.object_id = obj.get('id', None) # This should always be set.
else:
instance.object_id = obj['data'].get('id')
instance.updated_at = fix_non_aware_datetime(
attrs.get('updated_at', None)
) # timestamp of the record in the API
instance.api_endpoint = links.get('self', None) # URL of the request
# Title, Sortable Ttile, and Slug
instance.title = attrs.get('title', None)
instance.title_sortable = attrs.get('title_sortable', None)
# Descriptions
instance.description_long = attrs.get('description_long', None)
instance.description_short = attrs.get('description_short', None)
# Season metadata - things related to the season itself
instance.premiered_on = fix_non_aware_datetime(attrs.get('premiered_on', None))
instance.funder_message = attrs.get('funder_message', None)
instance.is_excluded_from_dfp = attrs.get('is_excluded_from_dfp', None)
instance.can_embed_player = attrs.get('can_embed_player', None)
instance.language = attrs.get('language', None)
instance.ga_page = attrs.get('tracking_ga_page', None)
instance.ga_event = attrs.get('tracking_ga_event', None)
instance.episode_count = attrs.get('episode_count', None)
instance.display_episode_number = attrs.get('display_episode_number', None)
instance.sort_episodes_descending = attrs.get('sort_episodes_descending', None)
instance.ordinal = attrs.get('ordinal', None)
instance.hashtag = attrs.get('hashtag', None)
# Unprocessed - store as JSON fragments
instance.genre = set_json_serialized_field(attrs, 'genre', default=None)
instance.links = set_json_serialized_field(attrs, 'links', default=None)
# The canonical image used for this is the one that has 'mezzanine' in it
instance.images = set_json_serialized_field(attrs, 'images', default=None)
if instance.images is None: # try latest_asset_images
instance.images = set_json_serialized_field(
attrs, 'latest_asset_images', default=None
)
instance.platforms = set_json_serialized_field(attrs, 'platforms', default=None)
instance.audience = set_json_serialized_field(attrs, 'audience', default=None)
# References to parents
show = attrs.get('show', None)
instance.show_api_id = show.get('id', None)
instance.json = obj
return instance
|
import { navigate } from "gatsby";
export const navigateLogin = () => {
const path =
typeof window !== "undefined" ? window.location.pathname + window.location.search : "";
if (!path.includes("/login")) {
navigate("/login?ret=" + encodeURIComponent(path), { state: { forced: true } });
}
};
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.resultset;
import java.util.ArrayList ;
import java.util.List ;
import org.apache.jena.atlas.iterator.PeekIterator ;
import org.apache.jena.atlas.lib.Lib ;
import org.apache.jena.query.QuerySolution ;
import org.apache.jena.query.ResultSet ;
import org.apache.jena.rdf.model.Model ;
import org.apache.jena.sparql.core.ResultBinding ;
import org.apache.jena.sparql.engine.binding.Binding ;
/** A result set held in-memory. rewindable.
*/
public class ResultSetMem implements org.apache.jena.query.ResultSetRewindable, ResultSetPeekable
{
protected List<Binding> rows = new ArrayList<>();
protected List<String> varNames = null ;
private int rowNumber = 0 ;
private PeekIterator<Binding> iterator = null ;
private Model model = null ;
/** Create an in-memory result set from another one
*
* @param imrs2 The other QueryResultsMem object
*/
public ResultSetMem(ResultSetMem imrs2) {
this(imrs2, false);
}
/**
* Create an in-memory result set from another one
*
* @param imrs2
* The other ResultSetMem object
* @param takeCopy
* Should we copy the rows?
*/
public ResultSetMem(ResultSetMem imrs2, boolean takeCopy) {
varNames = imrs2.varNames;
if ( takeCopy )
rows.addAll(imrs2.rows);
else
// Share results (not the iterator).
rows = imrs2.rows;
reset();
}
/**
* Create an in-memory result set from any ResultSet object. If the
* ResultSet is an in-memory one already, then no copying is done - the
* necessary internal datastructures are shared. This operation destroys
* (uses up) a ResultSet object that is not an in memory one.
*/
public ResultSetMem(ResultSet qr) {
model = qr.getResourceModel();
if ( qr instanceof ResultSetMem ) {
ResultSetMem qrm = (ResultSetMem)qr;
this.rows = qrm.rows;
this.varNames = qrm.varNames;
} else {
varNames = qr.getResultVars();
while (qr.hasNext()) {
Binding rb = qr.nextBinding();
rows.add(rb);
}
}
reset();
}
/**
* Create an in-memory result set from an array of ResulSets. It is assumed
* that all the ResultSets from the array have the same variables.
*
* @param sets
* the ResultSet objects to concatenate.
*/
public ResultSetMem(ResultSet... sets) {
varNames = sets[0].getResultVars();
for ( ResultSet rs : sets ) {
if ( !varNames.equals(rs.getResultVars()) )
throw new ResultSetException("ResultSet must have the same variables.");
if ( rs instanceof ResultSetMem )
rows.addAll(((ResultSetMem)rs).rows);
else
while (rs.hasNext())
rows.add(rs.nextBinding());
}
reset();
}
public ResultSetMem() {
this.varNames = new ArrayList<>();
reset();
}
// -------- ResultSet interface ------------------------------
/**
* @throws UnsupportedOperationException always thrown.
*/
@Override
public void remove() throws java.lang.UnsupportedOperationException
{
throw new java.lang.UnsupportedOperationException(
Lib.className(this)+": Attempt to remove an element");
}
/**
* Is there another possibility?
*/
@Override
public boolean hasNext() { return iterator.hasNext() ; }
/** Moves onto the next result possibility.
*/
@Override
public QuerySolution nextSolution() { return new ResultBinding(model, nextBinding()) ; }
@Override
public Binding nextBinding() { rowNumber++ ; return iterator.next() ; }
/** Moves onto the next result possibility.
* The returned object should be of class QuerySolution
*/
@Override
public QuerySolution next() { return nextSolution() ; }
/** Reset this result set back to the beginning */
public void rewind( ) { reset() ; }
@Override
public void reset() { iterator = new PeekIterator<>(rows.iterator()) ; rowNumber = 0 ; }
/** Return the "row" number for the current iterator item
*/
@Override
public int getRowNumber() { return rowNumber ; }
@Override
public Model getResourceModel()
{
return model ;
}
/** Return the number of rows
*/
@Override
public int size() { return rows.size() ; }
/** Get the variable names for the projection
*/
@Override
public List<String> getResultVars() { return varNames ; }
@Override
public QuerySolution peek() {
return new ResultBinding(model, peekBinding());
}
@Override
public Binding peekBinding() {
//PeekIterator.element() is the one that throws NoSuchElementException.
return iterator.element();
}
}
|
#!/bin/sh
set -e
set -x
if [ "$1" = "--help" ] ; then
echo "Usage: <package> <targets>"
return
fi
package="$1"
[ -z ${package} ] && exit 1
export PKG_CONFIG_ALLOW_CROSS=1
export POD_FILE_NAME=${package}.tar.gz
export LIBINDY_POD_VERSION=1.8.2
if [ -z "${OPENSSL_DIR}" ]; then
export OPENSSL_DIR=/usr/local/Cellar/openssl@1.1/1.1.1k
fi
echo "Build IOS POD started..."
TYPE="release"
cd ${package}
if [[ $# -eq 2 ]]; then # build for single platform
echo "... for target $2 ..."
TARGETS="--targets $2"
elif [[ $# -eq 3 ]]; then # build for two platforms
echo "... for targets $2,$3 ..."
TARGETS="--targets $2,$3"
else # build for all platforms
echo "... for all default targets ..."
TARGETS=""
fi
cargo lipo --$TYPE $TARGETS
echo 'Build completed successfully.'
WORK_DIR="out_pod"
echo "Try to create out directory: $WORK_DIR"
mkdir $WORK_DIR
if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
echo "Could not create temp dir $WORK_DIR"
exit 1
fi
echo "Packing..."
PACKAGE="${package}.a"
cp include/*.h $WORK_DIR
cp ../LICENSE $WORK_DIR
cp target/universal/$TYPE/$PACKAGE $WORK_DIR
cd $WORK_DIR
tar -cvzf $POD_FILE_NAME *
cd -
ls -l $WORK_DIR/$POD_FILE_NAME
echo "Packing completed."
echo "Out directory: $WORK_DIR"
|
import React, { PropTypes } from 'react';
import FormGroup from './FormGroup';
import FormGroupError from './FormGroupError';
export default function ModalFormGroup(props) {
const { errors, apiKey, label, id, description, children } = props;
return (
<FormGroup className="row" errors={errors} name={apiKey}>
<label className="col-sm-4 col-form-label" htmlFor={id}>{label}</label>
<div className="col-sm-8">
{children}
{description ? <small className="text-muted modal-description">{description}</small> : null}
<FormGroupError errors={errors} name={apiKey} inline={false} />
</div>
</FormGroup>
);
}
ModalFormGroup.propTypes = {
apiKey: PropTypes.string.isRequired,
children: PropTypes.node.isRequired,
label: PropTypes.string.isRequired,
errors: PropTypes.object.isRequired,
id: PropTypes.string.isRequired,
description: PropTypes.string,
};
ModalFormGroup.defaultProps = {
apiKey: '',
errors: {},
};
|
<gh_stars>1-10
#!/usr/bin/env mocha -R spec
import {strict as assert} from "assert";
import {binJSON} from "../";
const TITLE = __filename.split("/").pop();
describe(TITLE, () => {
test("(", []);
test("(", [null]);
test("(", [true]);
test("(", [true, false]);
test("(", [0]);
test("(", [0, 123]);
test("(", [""]);
test("(", ["ABC", "DEF"]);
// nest
test("(", [[], 1]);
test("(", [[[], 2], 1]);
test("(", [[[[], 3], 2], 1]);
it("[undefined]", () => {
assert.deepEqual(binJSON.decode(binJSON.encode([undefined])), [null]);
});
function test(tag: string, value: any): void {
it(JSON.stringify(value), () => {
const buf = binJSON.encode(value);
assert.equal(ArrayBuffer.isView(buf), true, "ArrayBuffer.isView");
assert.equal(buf[0]?.toString(16), tag.charCodeAt(0).toString(16), "tag");
const rev = binJSON.decode(buf);
assert.equal(typeof rev, typeof value);
assert.deepEqual(rev, value);
});
}
});
|
#!/bin/bash -e
cd "$(dirname $0)"
PATH=$HOME/go/bin:$PATH
unset GOPATH
export GO111MODULE=on
export GOARCH=${1}
function v
{
echo
echo $@
$@
}
if ! type -p goveralls; then
v go install github.com/mattn/goveralls
fi
if ! type -p shadow; then
v go install golang.org/x/tools/go/analysis/passes/shadow/cmd/shadow
fi
if ! type -p goreturns; then
v go install github.com/sqs/goreturns
fi
go test ./...
echo acceptable...
go test -v -covermode=count -coverprofile=test.out .
go tool cover -func=test.out
[ -z "$COVERALLS_TOKEN" ] || goveralls -coverprofile=test.out -service=travis-ci -repotoken $COVERALLS_TOKEN
for d in *; do
if [ -f $d/doc.go ]; then
echo $d...
go test -v -covermode=count -coverprofile=$d/test.out ./$d
go tool cover -func=$d/test.out
[ -z "$COVERALLS_TOKEN" ] || goveralls -coverprofile=$d/test.out -service=travis-ci -repotoken $COVERALLS_TOKEN
fi
done
v goreturns -l -w *.go */*.go
v go vet ./...
v shadow ./...
|
<reponame>MarceloFigueira/erxes-widgets
import gql from "graphql-tag";
import * as React from "react";
import client from "../../apollo-client";
import { IEmailParams, IIntegration, IIntegrationFormData } from "../../types";
import { connection } from "../connection";
import {
increaseViewCountMutation,
saveFormMutation,
sendEmailMutation
} from "../graphql";
import { ICurrentStatus, IForm } from "../types";
import { postMessage } from "./utils";
interface IState {
isPopupVisible: boolean;
isFormVisible: boolean;
isCalloutVisible: boolean;
currentStatus: ICurrentStatus;
}
interface IStore extends IState {
init: () => void;
showForm: () => void;
toggleShoutbox: (isVisible?: boolean) => void;
showPopup: () => void;
closePopup: () => void;
saveForm: (doc: any) => void;
createNew: () => void;
sendEmail: (params: IEmailParams) => void;
setHeight: () => void;
getIntegration: () => IIntegration;
getForm: () => IForm;
getIntegrationConfigs: () => IIntegrationFormData;
}
const AppContext = React.createContext({} as IStore);
export const AppConsumer = AppContext.Consumer;
export class AppProvider extends React.Component<{}, IState> {
constructor(props: {}) {
super(props);
this.state = {
isPopupVisible: false,
isFormVisible: false,
isCalloutVisible: false,
currentStatus: { status: "INITIAL" }
};
}
/*
* Decide which component will render initially
*/
init = () => {
const { data, hasPopupHandlers } = connection;
const { integration, form } = data;
const { callout } = form;
const { loadType } = integration.formData;
// if there is popup handler then do not show it initially
if (loadType === "popup" && hasPopupHandlers) {
return null;
}
this.setState({ isPopupVisible: true });
// if there is no callout setting then show form
if (!callout) {
return this.setState({ isFormVisible: true });
}
// If load type is shoutbox then hide form component initially
if (callout.skip && loadType !== "shoutbox") {
return this.setState({ isFormVisible: true });
}
return this.setState({ isCalloutVisible: true });
};
/*
* Will be called when user click callout's submit button
*/
showForm = () => {
this.setState({
isCalloutVisible: false,
isFormVisible: true
});
};
/*
* Increasing view count
*/
increaseViewCount = () => {
const form = this.getForm();
return client.mutate({
mutation: gql(increaseViewCountMutation),
variables: {
formId: form._id
}
});
};
/*
* Toggle circle button. Hide callout and show or hide form
*/
toggleShoutbox = (isVisible?: boolean) => {
if (!isVisible) {
// Increasing view count
this.increaseViewCount();
}
this.setState({
isCalloutVisible: false,
isFormVisible: !isVisible
});
};
/*
* When load type is popup, Show popup and show one of callout and form
*/
showPopup = () => {
const { data } = connection;
const { integration } = data;
const { callout } = integration.formData;
this.setState({ isPopupVisible: true });
// if there is no callout setting then show form
if (!callout) {
return this.setState({ isFormVisible: true });
}
if (callout.skip) {
return this.setState({ isFormVisible: true });
}
return this.setState({ isCalloutVisible: true });
};
/*
* When load type is popup, Hide popup
*/
closePopup = () => {
this.setState({
isPopupVisible: false,
isCalloutVisible: false,
isFormVisible: false
});
// Increasing view count
this.increaseViewCount();
};
/*
* Save user submissions
*/
saveForm = (doc: any) => {
const submissions = Object.keys(doc).map(fieldId => {
const { value, text, type, validation } = doc[fieldId];
return {
_id: fieldId,
type,
text,
value,
validation
};
});
const integration = this.getIntegration();
const form = this.getForm();
client
.mutate({
mutation: gql(saveFormMutation),
variables: {
integrationId: integration._id,
formId: form._id,
browserInfo: connection.browserInfo,
submissions
}
})
.then(({ data: { saveForm } }: any) => {
const { status, errors } = saveForm;
this.setState({
currentStatus: {
status: status === "ok" ? "SUCCESS" : "ERROR",
errors
}
});
});
};
/*
* Redisplay form component after submission
*/
createNew = () => {
this.setState({ currentStatus: { status: "INITIAL" } });
};
/*
* Send email to submitted user after successfull submission
*/
sendEmail = ({ toEmails, fromEmail, title, content }: IEmailParams) => {
client.mutate({
mutation: gql(sendEmailMutation),
variables: {
toEmails,
fromEmail,
title,
content
}
});
};
setHeight = () => {
const container = document.getElementById("erxes-container");
if (!container) {
return;
}
const elementsHeight = container.clientHeight;
postMessage({
message: "changeContainerStyle",
style: `height: ${elementsHeight}px;`
});
};
getIntegration = () => {
return connection.data.integration;
};
getForm = () => {
return connection.data.form;
};
getIntegrationConfigs = () => {
return this.getIntegration().formData;
};
render() {
return (
<AppContext.Provider
value={{
...this.state,
init: this.init,
showForm: this.showForm,
toggleShoutbox: this.toggleShoutbox,
showPopup: this.showPopup,
closePopup: this.closePopup,
saveForm: this.saveForm,
createNew: this.createNew,
sendEmail: this.sendEmail,
setHeight: this.setHeight,
getIntegration: this.getIntegration,
getForm: this.getForm,
getIntegrationConfigs: this.getIntegrationConfigs
}}
>
{this.props.children}
</AppContext.Provider>
);
}
}
|
<filename>cpp/simulator/inc/Vector2D.h
// Project Repository : https://github.com/robertapplin/N-Body-Simulations
// Authored by <NAME>, 2020
#ifndef Vector2D_H
#define Vector2D_H
namespace Simulator {
// A struct used for two dimensional vector operations.
struct Vector2D {
// Calculates the magnitude of the two dimensional vector.
[[nodiscard]] double magnitude() const;
// Used to simplify vector operations.
Vector2D operator-(Vector2D const &otherVector);
Vector2D operator*(double value);
void operator+=(Vector2D const &otherVector);
bool operator==(Vector2D const &otherVector);
double m_x;
double m_y;
};
} // namespace Simulator
#endif // Vector2D_H
|
#!/usr/bin/env sh
# Vim plugin for looking up words in an online thesaurus
# Author: Anton Beloglazov <http://beloglazov.info/>
# Version: 0.3.2
# Original idea and code: Nick Coleman <http://www.nickcoleman.org/>
URL="http://www.thesaurus.com/browse/$(echo $1 | tr ' ' '+')"
if [ "$(uname)" = "FreeBSD" ]; then
DOWNLOAD="fetch"
OPTIONS="-qo"
elif command -v curl > /dev/null; then
DOWNLOAD="curl"
OPTIONS="-so"
elif command -v wget > /dev/null; then
DOWNLOAD="wget"
OPTIONS="-qO"
else
echo "FreeBSD fetch, curl, or wget not found"
exit 1
fi
if command -v mktemp > /dev/null; then
OUTFILE="$(mktemp /tmp/XXXXXXX)"
else
NEW_RAND="$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | head -c 12)"
touch /tmp/$NEW_RAND
OUTFILE="/tmp/$NEW_RAND"
fi
"$DOWNLOAD" "$OPTIONS" "$OUTFILE" "$URL"
trap "rm -f \"$OUTFILE\"" EXIT SIGINT SIGTERM
if ! grep -q 'no thesaurus results' "$OUTFILE" && grep -q 'html' "$OUTFILE"; then
awk -F'<|>|"' '/synonym-description">/,/filter-[0-9]+/ {
if (index($0, "txt\">"))
printf "\nDefinition: %s", $3
else if (index($0, "ttl\">"))
printf " %s\nSynonyms:\n", $3
else if (index($0, "thesaurus.com")) {
sub(/-/, " ", $7)
printf "%s %s\n", $7, $15
}
} /container-info antonyms/,/\/section/ {
if (index($0, "container-info antonyms"))
print "\nAntonyms:"
else if (index($0, "thesaurus.com/browse")) {
sub(/--/, " ", $7)
printf "%s %s\n", $7, $15
}
}' "$OUTFILE"
else
echo "The word \"${1}\" has not been found on thesaurus.com!"
exit 1
fi
|
def func(n):
# The loop is nested, so the time complexity is O(n^2)
for i in range(n):
for j in range(n):
print(i + j) |
<filename>DriveFrontend/src/app/pages/manage/pages/file/services/file.service.ts
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { driveApi } from '../../../../../../environments/driveApi';
import { environment } from '../../../../../../environments/environment.prod';
import { HttpClientBase } from '../../../../../services/http-client-base.service';
import { Observable } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class FileService {
constructor(public http: HttpClientBase) {}
public list(
path: string,
q: string,
type: 'Directory' | 'File' | null = null,
skip: number = 0,
take: number = 20
): Observable<any[]> {
return this.http.get(driveApi.file.list, {
path,
q,
type,
skip,
take
});
}
public delete(paths: string[]) {
return this.http.put(driveApi.file.delete, {}, paths, {});
}
public move(from: string, to: string) {
return this.http.put(driveApi.file.move, { from, to }, {}, {});
}
public upload(path: string, files: FileList) {
const formData = new FormData();
for (let i = 0; i < files.length; i++) {
formData.append('files', files.item(i));
}
return this.http.post(driveApi.file.upload, { path }, formData);
}
public createDirectory(path: string, name: string) {
let createPath = path + '/' + name;
if (createPath[0] === '/') {
createPath = createPath.substring(1);
}
return this.http.post(
driveApi.file.createChild,
{
path: createPath
},
{}
);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.