text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
set -o pipefail
if [[ -z $FLINK_DIR ]]; then
echo "FLINK_DIR needs to point to a Flink distribution directory"
exit 1
fi
export PASS=1
echo "Flink dist directory: $FLINK_DIR"
TEST_ROOT=`pwd`
TEST_INFRA_DIR="$0"
TEST_INFRA_DIR=`dirname "$TEST_INFRA_DIR"`
cd $TEST_INFRA_DIR
TEST_INFRA_DIR=`pwd`
cd $TEST_ROOT
# used to randomize created directories
export TEST_DATA_DIR=$TEST_INFRA_DIR/temp-test-directory-$(date +%S%N)
echo "TEST_DATA_DIR: $TEST_DATA_DIR"
function backup_config() {
# back up the masters and flink-conf.yaml
cp $FLINK_DIR/conf/masters $FLINK_DIR/conf/masters.bak
cp $FLINK_DIR/conf/flink-conf.yaml $FLINK_DIR/conf/flink-conf.yaml.bak
}
function revert_default_config() {
# revert our modifications to the masters file
if [ -f $FLINK_DIR/conf/masters.bak ]; then
mv -f $FLINK_DIR/conf/masters.bak $FLINK_DIR/conf/masters
fi
# revert our modifications to the Flink conf yaml
if [ -f $FLINK_DIR/conf/flink-conf.yaml.bak ]; then
mv -f $FLINK_DIR/conf/flink-conf.yaml.bak $FLINK_DIR/conf/flink-conf.yaml
fi
}
function set_conf() {
CONF_NAME=$1
VAL=$2
echo "$CONF_NAME: $VAL" >> $FLINK_DIR/conf/flink-conf.yaml
}
function change_conf() {
CONF_NAME=$1
OLD_VAL=$2
NEW_VAL=$3
sed -i -e "s/${CONF_NAME}: ${OLD_VAL}/${CONF_NAME}: ${NEW_VAL}/" ${FLINK_DIR}/conf/flink-conf.yaml
}
function create_ha_config() {
backup_config
# clean up the dir that will be used for zookeeper storage
# (see high-availability.zookeeper.storageDir below)
if [ -e $TEST_DATA_DIR/recovery ]; then
echo "File ${TEST_DATA_DIR}/recovery exists. Deleting it..."
rm -rf $TEST_DATA_DIR/recovery
fi
# create the masters file (only one currently).
# This must have all the masters to be used in HA.
echo "localhost:8081" > ${FLINK_DIR}/conf/masters
# then move on to create the flink-conf.yaml
sed 's/^ //g' > ${FLINK_DIR}/conf/flink-conf.yaml << EOL
#==============================================================================
# Common
#==============================================================================
jobmanager.rpc.address: localhost
jobmanager.rpc.port: 6123
jobmanager.heap.mb: 1024
taskmanager.heap.mb: 1024
taskmanager.numberOfTaskSlots: 4
parallelism.default: 1
#==============================================================================
# High Availability
#==============================================================================
high-availability: zookeeper
high-availability.zookeeper.storageDir: file://${TEST_DATA_DIR}/recovery/
high-availability.zookeeper.quorum: localhost:2181
high-availability.zookeeper.path.root: /flink
high-availability.cluster-id: /test_cluster_one
#==============================================================================
# Web Frontend
#==============================================================================
rest.port: 8081
EOL
}
function start_ha_cluster {
create_ha_config
start_local_zk
start_cluster
}
function start_local_zk {
# Parses the zoo.cfg and starts locally zk.
# This is almost the same code as the
# /bin/start-zookeeper-quorum.sh without the SSH part and only running for localhost.
while read server ; do
server=$(echo -e "${server}" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//') # trim
# match server.id=address[:port[:port]]
if [[ $server =~ ^server\.([0-9]+)[[:space:]]*\=[[:space:]]*([^: \#]+) ]]; then
id=${BASH_REMATCH[1]}
address=${BASH_REMATCH[2]}
if [ "${address}" != "localhost" ]; then
echo "[ERROR] Parse error. Only available for localhost."
PASS=""
exit 1
fi
${FLINK_DIR}/bin/zookeeper.sh start $id
else
echo "[WARN] Parse error. Skipping config entry '$server'."
fi
done < <(grep "^server\." "${FLINK_DIR}/conf/zoo.cfg")
}
function start_cluster {
"$FLINK_DIR"/bin/start-cluster.sh
# wait at most 10 seconds until the dispatcher is up
for i in {1..10}; do
# without the || true this would exit our script if the JobManager is not yet up
QUERY_RESULT=$(curl "http://localhost:8081/taskmanagers" 2> /dev/null || true)
if [[ "$QUERY_RESULT" == "" ]]; then
echo "Dispatcher/TaskManagers are not yet up"
elif [[ "$QUERY_RESULT" != "{\"taskmanagers\":[]}" ]]; then
echo "Dispatcher REST endpoint is up."
break
fi
echo "Waiting for dispatcher REST endpoint to come up..."
sleep 1
done
}
function stop_cluster {
"$FLINK_DIR"/bin/stop-cluster.sh
# stop zookeeper only if there are processes running
if ! [ "`jps | grep 'FlinkZooKeeperQuorumPeer' | wc -l`" = "0" ]; then
"$FLINK_DIR"/bin/zookeeper.sh stop
fi
if grep -rv "GroupCoordinatorNotAvailableException" $FLINK_DIR/log \
| grep -v "RetriableCommitFailedException" \
| grep -v "NoAvailableBrokersException" \
| grep -v "Async Kafka commit failed" \
| grep -v "DisconnectException" \
| grep -v "AskTimeoutException" \
| grep -v "WARN akka.remote.transport.netty.NettyTransport" \
| grep -v "WARN org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline" \
| grep -v "jvm-exit-on-fatal-error" \
| grep -v '^INFO:.*AWSErrorCode=\[400 Bad Request\].*ServiceEndpoint=\[https://.*\.s3\.amazonaws\.com\].*RequestType=\[HeadBucketRequest\]' \
| grep -v "RejectedExecutionException" \
| grep -v "An exception was thrown by an exception handler" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/exceptions/YarnException" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration" \
| grep -iq "error"; then
echo "Found error in log files:"
cat $FLINK_DIR/log/*
PASS=""
fi
if grep -rv "GroupCoordinatorNotAvailableException" $FLINK_DIR/log \
| grep -v "RetriableCommitFailedException" \
| grep -v "NoAvailableBrokersException" \
| grep -v "Async Kafka commit failed" \
| grep -v "DisconnectException" \
| grep -v "AskTimeoutException" \
| grep -v "WARN akka.remote.transport.netty.NettyTransport" \
| grep -v "WARN org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline" \
| grep -v '^INFO:.*AWSErrorCode=\[400 Bad Request\].*ServiceEndpoint=\[https://.*\.s3\.amazonaws\.com\].*RequestType=\[HeadBucketRequest\]' \
| grep -v "RejectedExecutionException" \
| grep -v "An exception was thrown by an exception handler" \
| grep -v "Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.yarn.exceptions.YarnException" \
| grep -v "Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.conf.Configuration" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/exceptions/YarnException" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration" \
| grep -v "java.lang.Exception: Execution was suspended" \
| grep -v "Caused by: java.lang.Exception: JobManager is shutting down" \
| grep -iq "exception"; then
echo "Found exception in log files:"
cat $FLINK_DIR/log/*
PASS=""
fi
if grep -ri "." $FLINK_DIR/log/*.out > /dev/null; then
echo "Found non-empty .out files:"
cat $FLINK_DIR/log/*.out
PASS=""
fi
}
function wait_job_running {
for i in {1..10}; do
JOB_LIST_RESULT=$("$FLINK_DIR"/bin/flink list | grep "$1")
if [[ "$JOB_LIST_RESULT" == "" ]]; then
echo "Job ($1) is not yet running."
else
echo "Job ($1) is running."
break
fi
sleep 1
done
}
function take_savepoint {
"$FLINK_DIR"/bin/flink savepoint $1 $2
}
function cancel_job {
"$FLINK_DIR"/bin/flink cancel $1
}
function check_result_hash {
local name=$1
local outfile_prefix=$2
local expected=$3
local actual
if [ "`command -v md5`" != "" ]; then
actual=$(LC_ALL=C sort $outfile_prefix* | md5 -q)
elif [ "`command -v md5sum`" != "" ]; then
actual=$(LC_ALL=C sort $outfile_prefix* | md5sum | awk '{print $1}')
else
echo "Neither 'md5' nor 'md5sum' binary available."
exit 2
fi
if [[ "$actual" != "$expected" ]]
then
echo "FAIL $name: Output hash mismatch. Got $actual, expected $expected."
PASS=""
echo "head hexdump of actual:"
head $outfile_prefix* | hexdump -c
else
echo "pass $name"
# Output files are left behind in /tmp
fi
}
function check_all_pass {
if [[ ! "$PASS" ]]
then
echo "One or more tests FAILED."
exit 1
fi
echo "All tests PASS"
}
function s3_put {
local_file=$1
bucket=$2
s3_file=$3
resource="/${bucket}/${s3_file}"
contentType="application/octet-stream"
dateValue=`date -R`
stringToSign="PUT\n\n${contentType}\n${dateValue}\n${resource}"
s3Key=$ARTIFACTS_AWS_ACCESS_KEY
s3Secret=$ARTIFACTS_AWS_SECRET_KEY
signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
curl -X PUT -T "${local_file}" \
-H "Host: ${bucket}.s3.amazonaws.com" \
-H "Date: ${dateValue}" \
-H "Content-Type: ${contentType}" \
-H "Authorization: AWS ${s3Key}:${signature}" \
https://${bucket}.s3.amazonaws.com/${s3_file}
}
function s3_delete {
bucket=$1
s3_file=$2
resource="/${bucket}/${s3_file}"
contentType="application/octet-stream"
dateValue=`date -R`
stringToSign="DELETE\n\n${contentType}\n${dateValue}\n${resource}"
s3Key=$ARTIFACTS_AWS_ACCESS_KEY
s3Secret=$ARTIFACTS_AWS_SECRET_KEY
signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
curl -X DELETE \
-H "Host: ${bucket}.s3.amazonaws.com" \
-H "Date: ${dateValue}" \
-H "Content-Type: ${contentType}" \
-H "Authorization: AWS ${s3Key}:${signature}" \
https://${bucket}.s3.amazonaws.com/${s3_file}
}
function kill_random_taskmanager {
KILL_TM=$(jps | grep "TaskManager" | sort -R | head -n 1 | awk '{print $1}')
kill -9 "$KILL_TM"
echo "TaskManager $KILL_TM killed."
}
function setup_flink_slf4j_metric_reporter() {
INTERVAL="${1:-1 SECONDS}"
cp $FLINK_DIR/opt/flink-metrics-slf4j-*.jar $FLINK_DIR/lib/
set_conf "metrics.reporter.slf4j.class" "org.apache.flink.metrics.slf4j.Slf4jReporter"
set_conf "metrics.reporter.slf4j.interval" "${INTERVAL}"
}
function rollback_flink_slf4j_metric_reporter() {
rm $FLINK_DIR/lib/flink-metrics-slf4j-*.jar
}
function get_metric_processed_records {
OPERATOR=$1
N=$(grep ".General purpose test job.$OPERATOR.numRecordsIn:" $FLINK_DIR/log/*taskexecutor*.log | sed 's/.* //g' | tail -1)
if [ -z $N ]; then
N=0
fi
echo $N
}
function get_num_metric_samples {
OPERATOR=$1
N=$(grep ".General purpose test job.$OPERATOR.numRecordsIn:" $FLINK_DIR/log/*taskexecutor*.log | wc -l)
if [ -z $N ]; then
N=0
fi
echo $N
}
function wait_oper_metric_num_in_records {
OPERATOR=$1
MAX_NUM_METRICS="${2:-200}"
NUM_METRICS=$(get_num_metric_samples ${OPERATOR})
OLD_NUM_METRICS=${3:-${NUM_METRICS}}
# monitor the numRecordsIn metric of the state machine operator in the second execution
# we let the test finish once the second restore execution has processed 200 records
while : ; do
NUM_METRICS=$(get_num_metric_samples ${OPERATOR})
NUM_RECORDS=$(get_metric_processed_records ${OPERATOR})
# only account for metrics that appeared in the second execution
if (( $OLD_NUM_METRICS >= $NUM_METRICS )) ; then
NUM_RECORDS=0
fi
if (( $NUM_RECORDS < $MAX_NUM_METRICS )); then
echo "Waiting for job to process up to 200 records, current progress: $NUM_RECORDS records ..."
sleep 1
else
break
fi
done
}
function wait_num_checkpoints {
JOB=$1
NUM_CHECKPOINTS=$2
echo "Waiting for job ($JOB) to have at least $NUM_CHECKPOINTS completed checkpoints ..."
while : ; do
N=$(grep -o "Completed checkpoint [1-9]* for job $JOB" $FLINK_DIR/log/*standalonesession*.log | awk '{print $3}' | tail -1)
if [ -z $N ]; then
N=0
fi
if (( N < NUM_CHECKPOINTS )); then
sleep 1
else
break
fi
done
}
# make sure to clean up even in case of failures
function cleanup {
stop_cluster
check_all_pass
rm -rf $TEST_DATA_DIR
rm -f $FLINK_DIR/log/*
revert_default_config
}
trap cleanup EXIT
|
def sort_list(list_of_emails):
list_of_emails.sort()
return list_of_emails
print(sort_list(['bob@example.com', 'alice@example.com', 'jane@google.com', 'mary@example.com'])) |
<reponame>huangbin082/Bin
package com.leetcode.offer;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class Solution_7Test {
@Test
public void testBuildTree() {
Solution_7 solution_7 = new Solution_7();
int[] ints = new int[]{3,9,20,15,7};
int[] ints2 = new int[]{9,3,15,20,7};
TreeNode treeNode = solution_7.buildTree(ints,ints2);
int i = 0;
}
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-old/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-old/7-512+0+512-N-VB-ADJ-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_and_adjectives_first_half_full --eval_function last_element_eval |
/*
* Copyright (c) 2017, MegaEase
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package operator
import (
"context"
"fmt"
installbase "github.com/megaease/easemeshctl/cmd/client/command/meshinstall/base"
admissionregv1 "k8s.io/api/admissionregistration/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func mutatingWebhookSpec(ctx *installbase.StageContext) installbase.InstallFunc {
mutatingPath := installbase.DefaultMeshOperatorMutatingWebhookPath
mutatingPort := int32(installbase.DefaultMeshOperatorMutatingWebhookPort)
mutatingScope := admissionregv1.NamespacedScope
mutatingSideEffects := admissionregv1.SideEffectClassNoneOnDryRun
mutatingWebhookConfig := func(caBundle []byte) *admissionregv1.MutatingWebhookConfiguration {
return &admissionregv1.MutatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: installbase.DefaultMeshOperatorMutatingWebhookName,
Namespace: ctx.Flags.MeshNamespace,
},
Webhooks: []admissionregv1.MutatingWebhook{
{
Name: "mesh-injector.megaease.com",
NamespaceSelector: &metav1.LabelSelector{
MatchExpressions: []metav1.LabelSelectorRequirement{
{
Key: "kubernetes.io/metadata.name",
Operator: metav1.LabelSelectorOpNotIn,
Values: []string{
ctx.Flags.MeshNamespace,
"kube-system",
"kube-public",
},
},
{
Key: "mesh.megaease.com/mesh-service",
Operator: metav1.LabelSelectorOpExists,
},
},
},
ClientConfig: admissionregv1.WebhookClientConfig{
Service: &admissionregv1.ServiceReference{
Name: installbase.DefaultMeshOperatorServiceName,
Namespace: ctx.Flags.MeshNamespace,
Path: &mutatingPath,
Port: &mutatingPort,
},
CABundle: caBundle,
},
Rules: []admissionregv1.RuleWithOperations{
{
Operations: []admissionregv1.OperationType{
admissionregv1.Create,
admissionregv1.Update,
},
Rule: admissionregv1.Rule{
APIGroups: []string{"", "apps"},
APIVersions: []string{"v1"},
Resources: []string{
"pods",
"replicasets",
"deployments",
"statefulsets",
"daemonsets",
},
Scope: &mutatingScope,
},
},
},
SideEffects: &mutatingSideEffects,
AdmissionReviewVersions: []string{"v1"},
},
},
}
}
return func(ctx *installbase.StageContext) error {
secret, err := ctx.Client.CoreV1().Secrets(ctx.Flags.MeshNamespace).Get(context.TODO(), installbase.DefaultMeshOperatorSecretName, metav1.GetOptions{})
if err != nil {
return err
}
certBase64, exists := secret.Data[installbase.DefaultMeshOperatorCertFileName]
if !exists {
return fmt.Errorf("key %v in secret %s not found",
installbase.DefaultMeshOperatorCertFileName,
installbase.DefaultMeshOperatorSecretName)
}
config := mutatingWebhookConfig(certBase64)
err = installbase.DeployMutatingWebhookConfig(config, ctx.Client, ctx.Flags.MeshNamespace)
if err != nil {
return fmt.Errorf("create configMap failed: %v ", err)
}
return err
}
}
|
import re
def detect_credit_cards(text):
matches = re.findall(r'\b(?:(?:\d(?:-| )){13,16}\d|\d{4}-\d{4}-\d{4}-\d{4})\b', text)
return matches |
def get_highest_result(list_in):
highest_val = 0
for item in list_in:
highest_val = max(highest_val, item)
return highest_val |
#!/bin/sh
set -e
tmp1=`mktemp`
tmp2=`mktemp`
trap "rm -f file.a f1 f2 f3 $tmp1 $tmp2; exit" 0 2 3
###########################################################################
#empty file list
rm -f file.a
ar -qv file.a file.a
|
#!/usr/bin/env bash
# Generate HTML documentation and commit to the gh-pages branch
#
## Release Steps
# update package.json with version=$ver
# npm publish --dry-run
# git tag v$ver
# ./make-docs.sh
# git push origin master gh-pages
# npm publish
#
set -e
node_modules/.bin/typedoc src/index.ts \
--tsconfig tsconfig.build.json \
--treatWarningsAsErrors \
--includeVersion \
--excludeExternals \
--out docs
#########
# Alternative to "git subtree push --prefix"
# Commit a subdirectory to another branch while keeping the current branch clean
# Only uses plumbing commands
# Usage: grease <branch> <dir> <msg>
# Example: grease gh-pages dist v1.0.0
#########
grease() {
local TARGET_BRANCH="refs/heads/$1"
local TARGET_DIR="$2"
local MESSAGE="$3"
local parent=""
if git show-ref --verify --quiet "$TARGET_BRANCH"; then
parent="-p $TARGET_BRANCH"
fi
find "$TARGET_DIR" -type f | xargs git update-index --add
tree_sha=$(git write-tree --prefix "$TARGET_DIR")
find "$TARGET_DIR" -type f | xargs git update-index --force-remove
commit_sha=$(git commit-tree -m "$MESSAGE" $parent $tree_sha)
git update-ref $TARGET_BRANCH $commit_sha
echo Committed "$TARGET_DIR" as "$MESSAGE" to "$TARGET_BRANCH"
echo To undo:
echo " git update-ref $TARGET_BRANCH $TARGET_BRANCH~"
}
version=$(git describe --long --tags --dirty)
read -p "Commit to gh-pages as $version? Press key to continue.. " -n1 -s
grease gh-pages docs "$version"
|
// Preload for Space
SpaceScene.prototype.preload = function() {
// Load images
this.load.image("spaceshipNormal", "main/space/assets/spaceshipNormal.png");
this.load.image("spaceshipCannon", "main/space/assets/spaceshipCannon.png");
this.load.image("spaceshipDouble", "main/space/assets/spaceshipDouble.png");
this.load.image("spaceshipBoth", "main/space/assets/spaceshipBoth.png");
this.load.image("bullet", "main/space/assets/bullet.png");
this.load.image("cannonBullet", "main/space/assets/cannonBullet.png");
this.load.image("asteroid", "main/space/assets/asteroid.png");
this.load.image("spaceTile", "main/space/assets/spaceTile.png");
this.load.image("coin0", "main/space/assets/coin0.png");
this.load.image("coin1", "main/space/assets/coin1.png");
// Load sounds
this.load.audio("normalLaser", "main/space/assets/sfx/normalLaser.wav");
this.load.audio("cannonLaser", "main/space/assets/sfx/cannonLaser.wav");
this.load.audio("doubleLaser", "main/space/assets/sfx/doubleLaser.wav");
this.load.audio("explosion", "main/space/assets/sfx/explosion.wav");
this.load.audio("hurt", "main/space/assets/sfx/hurt.wav");
this.load.audio("teleport", "main/space/assets/sfx/teleport.wav");
this.load.audio("bigExplosion", "main/space/assets/sfx/bigExplosion.wav");
this.load.audio("music", "main/space/assets/sfx/music.mp3");
this.load.audio("pencoin", "main/space/assets/sfx/pencoin.wav");
};
|
function longestSubstringWithoutRepeat (s) {
let start = 0;
let maxLength = 0;
let seen = {};
for (let end = 0; end < s.length; end++) {
let char = s[end];
if (seen[char]) {
start = Math.max(start, seen[char]);
}
maxLength = Math.max(maxLength, end - start + 1);
seen[char] = end + 1;
}
return maxLength;
} |
<filename>build/esm/component/dynamics/Gate.js
import * as tslib_1 from "tslib";
import { ToneAudioNode } from "../../core/context/ToneAudioNode";
import { GreaterThan } from "../../signal/GreaterThan";
import { Gain } from "../../core/context/Gain";
import { Follower } from "../analysis/Follower";
import { optionsFromArguments } from "../../core/util/Defaults";
import { dbToGain, gainToDb } from "../../core/type/Conversions";
/**
* Gate only passes a signal through when the incoming
* signal exceeds a specified threshold. It uses [[Follower]] to follow the ampltiude
* of the incoming signal and compares it to the [[threshold]] value using [[GreaterThan]].
*
* @example
* import { Gate, UserMedia } from "tone";
* const gate = new Gate(-30, 0.2).toDestination();
* const mic = new UserMedia().connect(gate);
* // the gate will only pass through the incoming
* // signal when it's louder than -30db
*/
var Gate = /** @class */ (function (_super) {
tslib_1.__extends(Gate, _super);
function Gate() {
var _this = _super.call(this, Object.assign(optionsFromArguments(Gate.getDefaults(), arguments, ["threshold", "smoothing"]))) || this;
_this.name = "Gate";
var options = optionsFromArguments(Gate.getDefaults(), arguments, ["threshold", "smoothing"]);
_this._follower = new Follower({
context: _this.context,
smoothing: options.smoothing,
});
_this._gt = new GreaterThan({
context: _this.context,
value: dbToGain(options.threshold),
});
_this.input = new Gain({ context: _this.context });
_this._gate = _this.output = new Gain({ context: _this.context });
// connections
_this.input.connect(_this._gate);
// the control signal
_this.input.chain(_this._follower, _this._gt, _this._gate.gain);
return _this;
}
Gate.getDefaults = function () {
return Object.assign(ToneAudioNode.getDefaults(), {
smoothing: 0.1,
threshold: -40
});
};
Object.defineProperty(Gate.prototype, "threshold", {
/**
* The threshold of the gate in decibels
*/
get: function () {
return gainToDb(this._gt.value);
},
set: function (thresh) {
this._gt.value = dbToGain(thresh);
},
enumerable: true,
configurable: true
});
Object.defineProperty(Gate.prototype, "smoothing", {
/**
* The attack/decay speed of the gate. See [[Follower.smoothing]]
*/
get: function () {
return this._follower.smoothing;
},
set: function (smoothingTime) {
this._follower.smoothing = smoothingTime;
},
enumerable: true,
configurable: true
});
Gate.prototype.dispose = function () {
_super.prototype.dispose.call(this);
this.input.dispose();
this._follower.dispose();
this._gt.dispose();
this._gate.dispose();
return this;
};
return Gate;
}(ToneAudioNode));
export { Gate };
//# sourceMappingURL=Gate.js.map |
<gh_stars>10-100
//============================================================================
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//============================================================================
#ifndef SERVERHANDLER_HPP_
#define SERVERHANDLER_HPP_
#include <deque>
#include <utility>
#include <string>
#include <vector>
#include <ctime>
#include "Defs.hpp"
#include "VReply.hpp"
#include "VTask.hpp"
#include "VInfo.hpp"
#include "VServerSettings.hpp"
#include <QMutex>
class ClientInvoker;
class ServerReply;
class ConnectState;
class NodeObserver;
class ServerHandler;
class ServerComQueue;
class ServerObserver;
class ServerComObserver;
class SuiteFilter;
class UpdateTimer;
class VNodeChange;
class VServer;
class VServerChange;
class VSettings;
class VSuiteNode;
class ServerHandler : public QObject
{
Q_OBJECT // inherits from QObject in order to gain signal/slots
friend class ServerDefsAccess;
friend class ServerComQueue;
friend class CommandHandler;
public:
enum Activity {NoActivity,LoadActivity,RescanActivity,DeleteActivity,ClientRecreateActivity};
enum Compatibility {Compatible, Incompatible, CanBeCompatible};
const std::string& name() const {return name_;}
const std::string& host() const {return host_;}
const std::string& longName() const {return longName_;}
const std::string& fullLongName() const {return fullLongName_;}
const std::string& port() const {return port_;}
bool isSsl() const {return ssl_;}
const std::string& user() {return user_;}
void setSsl(bool);
void setUser(const std::string& user);
Activity activity() const {return activity_;}
ConnectState* connectState() const {return connectState_;}
bool communicating() {return communicating_;}
bool isEnabled() const {return !isDisabled();}
bool isDisabled() const;
bool isInLogout() const;
bool readFromDisk() const;
QString uidForServerLogTransfer() const;
int maxSizeForTimelineData() const;
SuiteFilter* suiteFilter() const {return suiteFilter_;}
QString nodeMenuMode() const;
QString defStatusNodeMenuMode() const;
void setSuiteFilterWithOne(VNode*);
void updateSuiteFilter(SuiteFilter*);
void updateSuiteFilterWithDefs();
void connectServer();
void disconnectServer();
void reset();
void refresh();
void setUpdatingStatus(bool newStatus) {updating_ = newStatus;}
VServer* vRoot() const {return vRoot_;}
SState::State serverState();
NState::State state(bool& isSuspended);
void run(VTask_ptr);
void addNodeObserver(NodeObserver* obs);
void removeNodeObserver(NodeObserver* obs);
void addServerObserver(ServerObserver* obs);
void removeServerObserver(ServerObserver* obs);
void addServerComObserver(ServerComObserver* obs);
void removeServerComObserver(ServerComObserver* obs);
void confChanged(VServerSettings::Param,VProperty*);
VServerSettings* conf() const {return conf_;}
bool isLocalHost() {return (localHostName_ == host_ || host_ == "localhost");}
static void saveSettings();
static const std::vector<ServerHandler*>& servers() {return servers_;}
static ServerHandler* addServer(const std::string &name,const std::string &host, const std::string &port,
const std::string &user,bool ssl);
static void removeServer(ServerHandler*);
static ServerHandler* findServer(const std::string &alias);
void searchBegan();
void searchFinished();
bool updateInfo(int& basePeriod,int& currentPeriod,int &drift,int& toNext);
int currentRefreshPeriod() const;
QDateTime lastRefresh() const {return lastRefresh_;}
int secsSinceLastRefresh() const;
int secsTillNextRefresh() const;
static bool checkNotificationState(const std::string& notifierId);
static ServerHandler* find(const std::string& name);
void writeDefs(const std::string& fileName);
void writeDefs(VInfo_ptr info,const std::string& fileName);
protected:
ServerHandler(const std::string& name,const std::string& host,const std::string& port,
const std::string& user, bool ssl);
~ServerHandler() override;
void logoutAndDelete();
void queueLoggedOut();
//Only friend classes can access it. Practically it means we
//we can only run it through CommandHandler!!!
void runCommand(const std::vector<std::string>& cmd);
void connectToServer();
void setCommunicatingStatus(bool c) {communicating_ = c;}
void clientTaskFinished(VTask_ptr task,const ServerReply& serverReply);
void clientTaskFailed(VTask_ptr task,const std::string& errMsg,const ServerReply& serverReply);
static void checkNotificationState(VServerSettings::Param par);
bool checkRefreshTimerDrift() const;
void refreshScheduled();
void refreshFinished();
std::string name_;
std::string host_;
std::string port_;
std::string user_;
bool ssl_;
ClientInvoker* client_;
std::string longName_;
std::string fullLongName_;
bool updating_;
bool communicating_;
std::vector<NodeObserver*> nodeObservers_;
std::vector<ServerObserver*> serverObservers_;
std::vector<ServerComObserver*> serverComObservers_;
VServer* vRoot_;
//The list of suites the server makes accessible
SuiteFilter* suiteFilter_;
static std::vector<ServerHandler*> servers_;
private Q_SLOTS:
void refreshServerInfo();
void slotNodeChanged(const Node* n, std::vector<ecf::Aspect::Type>);
void slotDefsChanged(std::vector<ecf::Aspect::Type>);
void slotRescanNeed();
private:
void createClient(bool init);
void recreateClient();
void refreshInternal();
void resetFinished();
void resetFailed(const std::string& errMsg);
void clearTree();
void rescanTree(bool needNewSuiteList=true);
void connectionLost(const std::string& errMsg);
bool connectionGained();
void checkServerVersion();
void failedClientServer(const std::string& msg);
void compatibleServer();
void incompatibleServer(const std::string& version);
void sslIncompatibleServer(const std::string& msg);
void sslCertificateError(const std::string& msg);
void updateSuiteFilterWithLoaded(const std::vector<std::string>&);
void updateSuiteFilter(bool needNewSuiteList);
//Handle the refresh timer
void stopRefreshTimer();
void startRefreshTimer();
void updateRefreshTimer();
void driftRefreshTimer();
void script(VTask_ptr req);
void job(VTask_ptr req);
void jobout(VTask_ptr req);
void jobstatus(VTask_ptr req);
void manual(VTask_ptr req);
defs_ptr defs();
defs_ptr safelyAccessSimpleDefsMembers();
void setActivity(Activity activity);
typedef void (ServerObserver::*SoMethod)(ServerHandler*);
typedef void (ServerObserver::*SoMethodV1)(ServerHandler*,const VServerChange&);
void broadcast(SoMethod);
void broadcast(SoMethodV1,const VServerChange&);
typedef void (NodeObserver::*NoMethod)(const VNode*);
typedef void (NodeObserver::*NoMethodV1)(const VNode*,const std::vector<ecf::Aspect::Type>&,const VNodeChange&);
typedef void (NodeObserver::*NoMethodV2)(const VNode*,const VNodeChange&);
void broadcast(NoMethod,const VNode*);
void broadcast(NoMethodV1,const VNode*,const std::vector<ecf::Aspect::Type>&,const VNodeChange&);
void broadcast(NoMethodV2,const VNode*,const VNodeChange&);
typedef void (ServerComObserver::*SocMethod)(ServerHandler*);
void broadcast(SocMethod);
void saveConf();
void loadConf();
int truncatedLinesFromServer(const std::string& txt) const;
QMutex defsMutex_;
defs_ptr defs_;
ServerComQueue* comQueue_;
//std::string targetNodeNames_; // used when building up a command in ServerHandler::command
//std::string targetNodeFullNames_; // used when building up a command in ServerHandler::command
UpdateTimer* refreshTimer_;
QDateTime lastRefresh_;
Activity activity_;
Compatibility compatibility_;
ConnectState* connectState_;
SState::State prevServerState_;
VServerSettings* conf_;
static std::string localHostName_;
};
#endif
|
<filename>Modules/ThirdParty/SiftFast/src/siftmex.cpp
// Copyright (C) <EMAIL>), 2008-2009
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// at your option) any later version.
//
//This program is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//Lesser GNU General Public License for more details.
//
//You should have received a copy of the GNU Lesser General Public License
//along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "mex.h"
#include <stdio.h>
#include <string.h>
#include <list>
#include <string>
using namespace std;
#ifdef DVPROFILE
#include "profiler.h"
#endif
#include "siftfast.h"
typedef unsigned long long u64;
// [frames,descr]=sift_mex(I,...)
// takes an image and outputs frames and a descriptor
void mexFunction(int nout, mxArray *out[], int nin, const mxArray *in[])
{
if(nin == 0 || !mxIsDouble(in[0]) )
mexErrMsgTxt("sift_fast takes 1 argument: a grayscale image in real format ");
const int* dimensions = mxGetDimensions(in[0]);
Image image = CreateImageFromMatlabData(mxGetPr(in[0]), dimensions[0], dimensions[1]);
Keypoint keypts = GetKeypoints(image);
// write the keys to the output
int numkeys = 0;
Keypoint key = keypts;
while(key) {
numkeys++;
key = key->next;
}
double* frames = NULL, *descr = NULL;
if( nout > 0 ) {
out[0] = mxCreateDoubleMatrix(4,numkeys,mxREAL);
frames = mxGetPr(out[0]);
}
if( nout > 1 ) {
out[1] = mxCreateDoubleMatrix(128,numkeys,mxREAL);
descr = mxGetPr(out[1]);
}
key = keypts;
while(key) {
if( frames != NULL ) {
frames[0] = key->col; frames[1] = key->row; frames[2] = key->scale; frames[3] = key->ori;
frames += 4;
}
if( descr != NULL ) {
for(int i = 0; i < 128; ++i)
descr[i] = (double)key->descrip[i];
descr += 128;
}
key = key->next;
}
FreeKeypoints(keypts);
#ifdef DVPROFILE
DVProfWrite("prof.txt");
DVProfClear();
#endif
DestroyAllImages();
}
|
def compare_strings(str1, str2):
return str1 == str2 |
<reponame>andromeda/mir
propertyIsEnumerable.length = {};
propertyIsEnumerable.name = {};
|
<reponame>favourch/football-data-dot-org-visualiser<gh_stars>0
// variable to hold api token
var api_token = "<KEY>";
// set headers
$.ajaxSetup({
headers: {
"X-Auth-Token": api_token
}
});
// variable to hold competitions
var competitions = $("#competitions");
// variable to hold competition
var competition = $("#competition");
// variable to hold team
var team = $("#team");
// variable to hold player
var player = $("#player");
// variable to hold current competition id
var current_competition_id = 0;
// variable to hold current competition name
var current_competition_name = "";
// variable to hold current team id
var current_team_id = 0;
// variable to hold current team name
var current_team_name = "";
// variable to hold players
var players = [];
// function to get id
function get_id(url, index) {
// return id
return url.replace(/^https?:\/\//, '').split('/')[index];
}
// function to add player info
function add_player_info(data) {
// variable to hold player info card body
var player_info_card_body = $("#player-info").find(".card-body");
// variable to hold contract years
var contract_years = moment.duration(moment(data.contract).diff(moment())).years();
// variable to hold contract months
var contract_months = moment.duration(moment(data.contract).diff(moment())).months();
// append content
player_info_card_body.empty().append(
"<dl class='row mb-0'>" +
"<dt class='col-sm-2'>Name</dt>" + "<dd class='col-sm-10'>" + data.name + "</dd>" +
"<dt class='col-sm-2'>Date of Birth</dt>" + "<dd class='col-sm-10'>" + moment(data.dob).format("DD/MM/YYYY") + " (" + moment().diff(data.dob, 'years') + " years old)</dd>" +
"<dt class='col-sm-2'>Nationality</dt>" + "<dd class='col-sm-10'>" + data.nationality + "</dd>" +
"<dt class='col-sm-2'>Position</dt>" + "<dd class='col-sm-10'>" + data.position + "</dd>" +
"<dt class='col-sm-2'>Number</dt>" + "<dd class='col-sm-10'>" + data.number + "</dd>" +
"<dt class='col-sm-2'>Contract until</dt>" + "<dd class='col-sm-10'>" + moment(data.contract).format("DD/MM/YYYY") + " (" +
(contract_years === 0 ? "" : contract_years + (contract_years > 1 ? " years " : " year " )) +
(contract_months === 0 ? "" : contract_months + (contract_months > 1 ? " months " : " month " )) +
" left)</dd>" +
"</dl>"
);
}
// function to build player page
function build_player_page(player_id) {
// hide competitions
competitions.hide();
// hide competition
competition.hide();
// fade out team
team.fadeOut();
// set html
$("#player-breadcrumb").find("a").html("Back to " + current_team_name);
// add player info
add_player_info(players[player_id]);
// fade in player
player.fadeIn();
}
// function to add team analysis
function add_team_analysis(team_info, team_players, team_fixtures) {
// variable to hold team analysis canvas
var team_analysis_canvas = $("#team-analysis").find(".card-body").find("#team-analysis-canvas");
// append content
team_analysis_canvas.empty().append(
"<canvas id='nationalities-chart'></canvas>"
);
// variable to hold nationalities array
var nationalities = [];
// for each player
$.each(team_players.players, function(key, val) {
// add player nationality to nationalities array
nationalities.push(val.nationality);
});
// variable to hold nationalities counts
var nationalities_counts = {};
// for each nationality
$.each(nationalities, function(key, val) {
// apply logic
if (!nationalities_counts.hasOwnProperty(val)) {
nationalities_counts[val] = 1;
} else {
nationalities_counts[val]++;
}
});
// variable to hold nationality keys array
var nationality_keys = [];
// variable to hold nationality values array
var nationality_vals = [];
// for each nationality count
$.each(nationalities_counts, function(key, val) {
// add key to nationality keys array
nationality_keys.push(key);
// add key to nationality values array
nationality_vals.push(val);
});
// variable to hold ctx
var ctx = document.getElementById("nationalities-chart").getContext('2d');
// initialize chart
var nationalities_chart = new Chart(ctx, {
type: 'horizontalBar',
data: {
labels: nationality_keys,
datasets: [{
data: nationality_vals,
backgroundColor: Please.make_color({ base_color: "DarkTurquoise", colors_returned: nationality_keys.length }),
borderWidth: 1,
borderColor: "#343a40"
}]
},
options: {
legend: {
display: false
},
title: {
display: true,
fontSize: 16,
fontFamily: "'Work Sans', sans-serif",
fontColor: "#17a2b8",
text: 'Nationalities'
},
scales: {
yAxes: [{
gridLines: {
color: "rgba(23, 162, 184, 0.2)",
zeroLineColor: "rgba(23, 162, 184, 0.2)"
},
ticks: {
fontFamily: "'Work Sans', sans-serif",
fontColor: "#17a2b8"
}
}],
xAxes: [{
gridLines: {
color: "rgba(23, 162, 184, 0.2)",
zeroLineColor: "rgba(23, 162, 184, 0.2)"
},
ticks: {
fontFamily: "'Work Sans', sans-serif",
fontColor: "#17a2b8",
beginAtZero: true,
userCallback: function(label, index, labels) {
if (Math.floor(label) === label) {
return label;
}
}
}
}]
},
responsive: true
}
});
// show team analysis canvas
team_analysis_canvas.show();
}
// function to get fixture result
function get_fixture_result(fixture) {
// variable to hold result
var result = "";
// apply logic
if (fixture.status === "IN_PLAY") {
result += fixture.result.goalsHomeTeam + " - " + fixture.result.goalsAwayTeam;
} else if ($.inArray(fixture.status, ["SCHEDULED", "TIMED", "POSTPONED"]) !== -1) {
result += "-";
} else if (fixture.status === "FINISHED") {
result += "FT: " + fixture.result.goalsHomeTeam + " - " + fixture.result.goalsAwayTeam + "<br>";
if (fixture.result.halfTime) {
result += "HT: " + fixture.result.halfTime.goalsHomeTeam + " - " + fixture.result.halfTime.goalsAwayTeam + "<br>";
}
if (fixture.result.extraTime) {
result += "ET: " + fixture.result.extraTime.goalsHomeTeam + " - " + fixture.result.extraTime.goalsAwayTeam;
}
}
// return result
return result;
}
// function to add fixtures
function add_fixtures(data, type) {
// variable to hold fixtures tbody
var fixtures_tbody = $();
// empty selectors
$("#competition-fixtures-in-play, #competition-fixtures-scheduled, #competition-fixtures-finished").find("tbody").empty();
// empty selectors
$("#team-fixtures-in-play, #team-fixtures-scheduled, #team-fixtures-finished").find("tbody").empty();
// define variables
var competition_fixtures_in_play = false; var competition_fixtures_scheduled = false; var competition_fixtures_finished = false;
var team_fixtures_in_play = false; var team_fixtures_scheduled = false; var team_fixtures_finished = false;
// for each fixture
$.each(data.fixtures, function(key, val) {
// apply logic
if (type === "competition" && val.status === "IN_PLAY") { fixtures_tbody = $("#competition-fixtures-in-play").find("tbody"); competition_fixtures_in_play = true; }
if (type === "competition" && val.status === "SCHEDULED") { fixtures_tbody = $("#competition-fixtures-scheduled").find("tbody"); competition_fixtures_scheduled = true; }
if (type === "competition" && val.status === "FINISHED") { fixtures_tbody = $("#competition-fixtures-finished").find("tbody"); competition_fixtures_finished = true; }
if (type === "team" && val.status === "IN_PLAY") { fixtures_tbody = $("#team-fixtures-in-play").find("tbody"); team_fixtures_in_play = true; }
if (type === "team" && val.status === "SCHEDULED") { fixtures_tbody = $("#team-fixtures-scheduled").find("tbody"); team_fixtures_scheduled = true; }
if (type === "team" && val.status === "FINISHED") { fixtures_tbody = $("#team-fixtures-finished").find("tbody"); team_fixtures_finished = true; }
// variable to hold function parameters home
var function_parameters_home = "\"" + get_id(val._links.homeTeam.href, 3) + "\"";
// variable to hold function parameters away
var function_parameters_away = "\"" + get_id(val._links.awayTeam.href, 3) + "\"";
// append content
fixtures_tbody.append(
"<tr>" +
"<td class='border-info align-middle'>" +
moment(new Date(val.date)).format("DD/MM/YYYY<br>HH:mm") +
"</td>" +
"<td class='border-info align-middle'>" +
"<a class='text-info' href='#' onclick='event.preventDefault(); build_team_page(" + function_parameters_home + ");'>" + val.homeTeamName + "</a>" +
"</td>" +
"<td class='border-info align-middle'>" +
get_fixture_result(val) +
"</td>" +
"<td class='border-info align-middle'>" +
"<a class='text-info' href='#' onclick='event.preventDefault(); build_team_page(" + function_parameters_away + ");'>" + val.awayTeamName + "</a>" +
"</td>" +
"</tr>"
);
});
// apply logic
if (type === "competition" && competition_fixtures_in_play === false) {
$("#competition-fixtures-in-play").find(".table-responsive").hide();
$("#competition-fixtures-in-play-error").html("There are currently no in-play fixtures for this competition.").show();
}
if (type === "competition" && competition_fixtures_scheduled === false) {
$("#competition-fixtures-scheduled").find(".table-responsive").hide();
$("#competition-fixtures-scheduled-error").html("There are no scheduled fixtures for this competition.").show();
}
if (type === "competition" && competition_fixtures_finished === false) {
$("#competition-fixtures-finished").find(".table-responsive").hide();
$("#competition-fixtures-finished-error").html("There are no finished fixtures for this competition.").show();
}
if (type === "team" && team_fixtures_in_play === false) {
$("#team-fixtures-in-play").find(".table-responsive").hide();
$("#team-fixtures-in-play-error").html("There are currently no in-play fixtures for this team.").show();
}
if (type === "team" && team_fixtures_scheduled === false) {
$("#team-fixtures-scheduled").find(".table-responsive").hide();
$("#team-fixtures-scheduled-error").html("There are no scheduled fixtures for this team.").show();
}
if (type === "team" && team_fixtures_finished === false) {
$("#team-fixtures-finished").find(".table-responsive").hide();
$("#team-fixtures-finished-error").html("There are no finished fixtures for this team.").show();
}
}
// function to get team fixtures
function get_team_fixtures(team_id) {
// return ajax data
return $.ajax({ url: "http://api.football-data.org/v1/teams/" + team_id + "/fixtures" }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to add team players
function add_team_players(team_players) {
// empty selectors
$("#goalkeepers, #defenders, #midfielders, #attackers").next().empty();
// set variable to array
players = [];
// variable to hold player id
var player_id = 1;
// define variables
var goalkeepers_count = 0; var defenders_count = 0; var midfielders_count = 0; var attackers_count = 0; var years_total = 0;
// for each team player
$.each(team_players.players, function(key, val) {
// define player object
var player = {
name: val.name,
dob: val.dateOfBirth,
nationality: val.nationality,
position: val.position,
number: val.jerseyNumber,
contract: val.contractUntil
};
// calculate player years old
years_total += moment().diff(val.dateOfBirth, 'years');
// apply logic
if ((val.position === "Keeper")) {
$("#goalkeepers").next().append(
"<a class='btn btn-info bg-transparent text-info mt-0 mr-2 mb-2 ml-0' href='#' onclick='event.preventDefault(); build_player_page(" + "\"" + key + "\"" + ");'>" + val.name + "</a>"
);
goalkeepers_count += 1
} else if ((val.position).indexOf("Back") >= 0) {
$("#defenders").next().append(
"<a class='btn btn-info bg-transparent text-info mt-0 mr-2 mb-2 ml-0' href='#' onclick='event.preventDefault(); build_player_page(" + "\"" + key + "\"" + ");'>" + val.name + "</a>"
);
defenders_count += 1
} else if ((val.position).indexOf("Midfield") >= 0) {
$("#midfielders").next().append(
"<a class='btn btn-info bg-transparent text-info mt-0 mr-2 mb-2 ml-0' href='#' onclick='event.preventDefault(); build_player_page(" + "\"" + key + "\"" + ");'>" + val.name + "</a>"
);
midfielders_count += 1
} else if ((val.position).indexOf("Wing") >= 0 || (val.position).indexOf("Forward") >= 0) {
$("#attackers").next().append(
"<a class='btn btn-info bg-transparent text-info mt-0 mr-2 mb-2 ml-0' href='#' onclick='event.preventDefault(); build_player_page(" + "\"" + key + "\"" + ");'>" + val.name + "</a>"
);
attackers_count += 1
}
// add player to players array
players.push(player);
// increment player id
player_id += 1;
});
// set html
$("#goalkeepers").html((goalkeepers_count === 1) ? goalkeepers_count + " Goalkeeper" : goalkeepers_count + " Goalkeepers");
$("#defenders").html((defenders_count === 1) ? defenders_count + " Defender" : defenders_count + " Defenders");
$("#midfielders").html((midfielders_count === 1) ? midfielders_count + " Midfielder" : midfielders_count + " Midfielders");
$("#attackers").html((attackers_count === 1) ? attackers_count + " Attacker" : attackers_count + " Attackers");
// append content
$("#team-info").find(".mb-0").append(
"<dt class='col-md-12 col-lg-3'>No. of players</dt>" + "<dd class='col-md-12 col-lg-9'>" + team_players.count + "</dd>"
);
var average_age = (years_total / players.length).toFixed(2);
$("#team-info").find(".mb-0").append(
"<dt class='col-md-12 col-lg-3'>Average age</dt>" + "<dd class='col-md-12 col-lg-9'>" + average_age + "</dd>"
);
}
// function to get team players
function get_team_players(team_id) {
// return ajax data
return $.ajax({ url: "http://api.football-data.org/v1/teams/" + team_id + "/players" }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to add team info
function add_team_info(team_info) {
// variable to hold team info card body
var team_info_card_body = $("#team-info").find(".card-body");
// append content
team_info_card_body.empty().append(
"<div class='container'>" +
"<div class='row'>" +
"<div class='col-lg-3 text-center'>" +
"<img src='" + team_info.crestUrl + "' onerror=\"this.src='http://via.placeholder.com/150x150'\" width='150px'/>" +
"</div>" +
"<div class='col-lg-9'>" +
"<dl class='row mb-0'>" +
"<dt class='col-md-12 col-lg-3'>Team name</dt>" + "<dd class='col-md-12 col-lg-9'>" + team_info.name + "</dd>" +
"</dl>" +
"</div>" +
"</div>" +
"</div>"
);
}
// function to get team info
function get_team_info(team_id) {
// return ajax data
return $.ajax({ url: "http://api.football-data.org/v1/teams/" + team_id }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to build team page
function build_team_page(team_id) {
// hide competitions
competitions.hide();
// fade out competition
competition.fadeOut();
// hide player
player.hide();
// set html
$("#team-breadcrumb").find("a").html("Back to " + current_competition_name);
$.when(
// get team info
get_team_info(team_id),
// get team players
get_team_players(team_id),
// get team fixtures
get_team_fixtures(team_id)
).then(
function(
// variable to hold team info
team_info,
// variable to hold team players
team_players,
// variable to hold team fixtures
team_fixtures
) {
// set current team id
current_team_id = team_id;
// set current team name
current_team_name = team_info.name;
// set html
$("#team-nav").find(".card-header").html(current_team_name);
// add team info
if ($.inArray(team_info, [400, 403, 404, 429]) !== -1) {
$("#team-info").find(".card-body").html("The information is not available for this team.");
} else {
add_team_info(team_info);
}
// add team players
if (($.inArray(team_players, [400, 403, 404, 429]) !== -1) || (team_players.players.length === 0)) {
$("#team-players").find("dl, hr").hide();
$("#team-players-error").html("The players are not available for this team.").show();
$("#team-analysis-canvas").hide();
$("#team-analysis-error").html("The analysis is not possible because the players are not available for this team.").show();
} else {
$("#team-players-error").hide();
$("#team-players").find("dl, hr").fadeIn();
add_team_players(team_players);
$("#team-analysis-error").hide();
add_team_analysis(team_info, team_players, team_fixtures);
}
// add team fixtures
if ($.inArray(team_fixtures, [400, 403, 404, 429]) !== -1) {
$("#team-fixtures-in-play, #team-fixtures-scheduled, #team-fixtures-finished").find(".table-responsive").hide();
$("#team-fixtures-in-play-error, #team-fixtures-scheduled-error, #team-fixtures-finished-error").html("The fixtures are not available for this team.").show();
} else {
$("#team-fixtures-in-play-error, #team-fixtures-scheduled-error, #team-fixtures-finished-error").hide();
$("#team-fixtures-in-play, #team-fixtures-scheduled, #team-fixtures-finished").find(".table-responsive").fadeIn();
add_fixtures(team_fixtures, "team");
}
// fade in team
team.fadeIn();
});
}
// function to get competition fixtures
function get_competition_fixtures(competition_id) {
// return ajax data
return $.ajax({ url: "http://api.football-data.org/v1/competitions/" + competition_id + "/fixtures" }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to add competition standings
function add_competition_standings(data) {
// variable to hold competition standings tbody
var competition_standings_tbody = $("#competition-standings").find(".card-body").find("tbody");
// apply logic
if (data.standing) {
// empty competition standings tbody
competition_standings_tbody.empty();
// for each standing
$.each(data.standing, function(key, val) {
// variable to hold function parameters
var function_parameters = "\"" + get_id(val._links.team.href, 3) + "\"";
// append content
competition_standings_tbody.append(
"<tr>" +
"<td class='border-info align-middle'>" + val.position + "</td>" +
"<td class='border-info text-left align-middle'>" +
"<img class='mr-2' src='" + val.crestURI + "' onerror=\"this.src='http://via.placeholder.com/20x20'\" width='20' />" +
"<a class='text-info' href='#' onclick='event.preventDefault(); build_team_page(" + function_parameters + ");'>" + val.teamName + "</a>" +
"</td>" +
"<td class='border-info align-middle'>" + val.playedGames + "</td>" +
"<td class='border-info align-middle'>" + val.wins + "</td>" +
"<td class='border-info align-middle'>" + val.draws + "</td>" +
"<td class='border-info align-middle'>" + val.losses + "</td>" +
"<td class='border-info align-middle'>" + val.goals + "</td>" +
"<td class='border-info align-middle'>" + val.goalsAgainst + "</td>" +
"<td class='border-info align-middle'>" + val.goalDifference + "</td>" +
"<td class='border-info align-middle'>" + val.points + "</td>" +
"</tr>"
);
});
} else if (data.standings) {
/*
competition_standings_tbody.empty();
$.each(data.standings, function(key, val) {
});
*/
}
}
// function to get competition standings
function get_competition_standings(competition_id) {
// return ajax data
return $.ajax({ url: "http://api.football-data.org/v1/competitions/" + competition_id + "/leagueTable" }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to add competition teams
function add_competition_teams(data) {
// variable to hold competition teams card body
var competition_teams_card_body = $("#competition-teams").find(".card-body");
// empty competition teams card body
competition_teams_card_body.empty();
// for each team
$.each(data.teams, function(key, val) {
// variable to hold function parameters
var function_parameters = "\"" + get_id(val._links.self.href, 3) + "\"";
// append content
competition_teams_card_body.append(
"<a class='btn btn-info bg-transparent text-info mt-0 mr-2 mb-2 ml-0' href='#' onclick='event.preventDefault(); build_team_page(" + function_parameters + ");'>" + val.name + "</a>"
);
});
}
// function to get competition teams
function get_competition_teams(competition_id) {
// return ajax data
return $.ajax({ url: "http://api.football-data.org/v1/competitions/" + competition_id + "/teams" }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to add competition info
function add_competition_info(data) {
// variable to hold competition info card body
var competition_info_card_body = $("#competition-info").find(".card-body");
// append content
competition_info_card_body.empty().append(
"<dl class='row mb-0'>" +
"<dt class='col-sm-5 col-md-4 col-lg-3 col-xl-2'>League name</dt>" + "<dd class='col-sm-7 col-md-8 col-lg-9 col-xl-10'>" + data.caption + "</dd>" +
"<dt class='col-sm-5 col-md-4 col-lg-3 col-xl-2'>No. of teams</dt>" + "<dd class='col-sm-7 col-md-8 col-lg-9 col-xl-10'>" + data.numberOfTeams + "</dd>" +
"<dt class='col-sm-5 col-md-4 col-lg-3 col-xl-2'>Current match day</dt>" + "<dd class='col-sm-7 col-md-8 col-lg-9 col-xl-10'>" + data.currentMatchday + "</dd>" +
"<dt class='col-sm-5 col-md-4 col-lg-3 col-xl-2'>No. of match days</dt>" + "<dd class='col-sm-7 col-md-8 col-lg-9 col-xl-10'>" + data.numberOfMatchdays + "</dd>" +
"<dt class='col-sm-5 col-md-4 col-lg-3 col-xl-2'>No. of games</dt>" + "<dd class='col-sm-7 col-md-8 col-lg-9 col-xl-10'>" + data.numberOfGames + "</dd>" +
"</dl>"
);
}
// function to get competition info
function get_competition_info(competition_id) {
return $.ajax({ url: "http://api.football-data.org/v1/competitions/" + competition_id }).then(function(data) { return data }).catch(function(xhr) { return xhr.status });
}
// function to build competition page
function build_competition_page(competition_id) {
// fade out competitions
competitions.fadeOut();
// fade out team
team.fadeOut();
// hide player
player.hide();
$.when(
// get competition info
get_competition_info(competition_id),
// get competition teams
get_competition_teams(competition_id),
// get competition standings
get_competition_standings(competition_id),
// get competition fixtures
get_competition_fixtures(competition_id)
).then(
function(
// variable to hold competition info
competition_info,
// variable to hold competition teams
competition_teams,
// variable to hold competition standings
competition_standings,
// variable to hold competition fixtures
competition_fixtures
) {
// set current competition id
current_competition_id = competition_id;
// set current competition name
current_competition_name = competition_info.caption;
// set html
$("#competition-nav").find(".card-header").html(current_competition_name);
// add competition info
if ($.inArray(competition_info, [400, 403, 404, 429]) !== -1) {
$("#competition-info").find(".card-body").html("The information is not available for this competition.");
} else {
add_competition_info(competition_info);
}
// add competition teams
if ($.inArray(competition_teams, [400, 403, 404, 429]) !== -1) {
$("#competition-teams").find(".card-body").html("<div class='text-left'>The teams are not available for this competition.</div>");
} else {
add_competition_teams(competition_teams);
}
// add competition standings
if ($.inArray(competition_standings, [400, 403, 404, 429]) !== -1) {
$("#competition-standings").find(".table-responsive").hide();
$("#competition-standings-error").html("The standings are not available for this competition.").show();
} else {
$("#competition-standings-error").hide();
$("#competition-standings").find(".table-responsive").fadeIn();
add_competition_standings(competition_standings);
}
// add competition fixtures
if ($.inArray(competition_fixtures, [400, 403, 404, 429]) !== -1) {
$("#competition-fixtures-in-play, #competition-fixtures-scheduled, #competition-fixtures-finished").find(".table-responsive").hide();
$("#competition-fixtures-in-play-error, #competition-fixtures-scheduled-error, #competition-fixtures-finished-error").html("The fixtures are not available for this competition.").show();
} else {
$("#competition-fixtures-in-play-error, #competition-fixtures-scheduled-error, #competition-fixtures-finished-error").hide();
$("#competition-fixtures-in-play, #competition-fixtures-scheduled, #competition-fixtures-finished").find(".table-responsive").fadeIn();
add_fixtures(competition_fixtures, "competition");
}
// fade in competition
competition.fadeIn();
});
}
function build_competitions_page() {
// fade out competition
competition.fadeOut();
// hide team
team.hide();
// hide player
player.hide();
// variable to hold competitions card body
var competitions_card_body = competitions.find(".card-body");
// initialize ajax call
$.ajax({
// URL to GET data from
url: "http://api.football-data.org/v1/competitions/"
}).then(function (data) {
// empty competitions card body
competitions_card_body.empty();
// for each competition
$.each(data, function(key, val) {
// variable to hold competition id
var competition_id = "\"" + get_id(val._links.self.href, 3) + "\"";
// append content
competitions_card_body.append(
"<a href='#' class='btn btn-info bg-transparent text-info m-2' onclick='event.preventDefault(); build_competition_page(" + competition_id + ");'>" +
val.caption + " <span class='badge badge-info text-dark'>" + val.numberOfTeams + "</span>" +
"</a>"
);
});
// fade in competitions
competitions.fadeIn();
}).catch(function(xhr) {
// append content
competitions_card_body.append("<span class='text-info'>" + xhr.responseJSON.error + "</span>");
// fade in competitions
competitions.fadeIn();
});
}
// define on click functions
$("#competition-breadcrumb").find("a").click(function(e) {
e.preventDefault();
// build competitions page
build_competitions_page();
});
$("#team-breadcrumb").find("a").click(function(e) {
e.preventDefault();
// build competition page
build_competition_page(current_competition_id);
});
$("#player-breadcrumb").find("a").click(function(e) {
e.preventDefault();
// build team page
build_team_page(current_team_id);
});
// hide competitions
competitions.hide();
// hide competition
competition.hide();
// hide team
team.hide();
// hide player
player.hide();
// build competitions page
build_competitions_page();
// smooth scroll on anchor click
$('a.page-scroll[href*="#"]:not([href="#"])').click(function() {
if (location.pathname.replace(/^\//, '') === this.pathname.replace(/^\//, '') && location.hostname === this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html, body').animate({
scrollTop: (target.offset().top - 40)
}, 1000, "easeInOutExpo");
return false;
}
}
}); |
<filename>src/math/Boj17355.java
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 17355번: <NAME>
*
* @see https://www.acmicpc.net/problem/17355
*
*/
public class Boj17355 {
private static final int MOD = 1_000_000_007;
private static final int SIZE = 10_000_001;
private static ArrayList<Integer> primes = new ArrayList<>();
private static int[][] fraction = new int[2][SIZE];
private static boolean[] prime = new boolean[10_000_001];
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
eratosthenesSieve();
while(N-- > 0) {
StringTokenizer st = new StringTokenizer(br.readLine());
int h = Integer.parseInt(st.nextToken());
int t = Integer.parseInt(st.nextToken());
primeFactor(Math.abs(t - h), 0);
primeFactor(Math.abs(t), 1);
}
System.out.println(fractionForm());
}
/**
* remove duplicated value & make fraction form
*
* @return
*/
private static String fractionForm() {
for(int i = 0; i < SIZE; i++) {
if(fraction[0][i] > fraction[1][i]) {
fraction[0][i] -= fraction[1][i];
fraction[1][i] = 0;
}
else {
fraction[1][i] -= fraction[0][i];
fraction[0][i] = 0;
}
}
long head = 1;
long tail = 1;
for(int i = 0; i < SIZE; i++) {
if(fraction[0][i] != 0) head = modulation(head, pow(i, fraction[0][i]));
if(fraction[1][i] != 0) tail = modulation(tail, pow(i, fraction[1][i]));
}
return head + " " + tail;
}
private static long pow(long value, int current) {
if(current == 1) return value;
long result = 1;
while (current-- > 0) {
result = modulation(result, value);
}
return result;
}
private static long modulation(long a, long b) {
return ((a % MOD) * (b % MOD)) % MOD;
}
/**
* find prime factors & convert
*
* @param index
* @param flag
*/
private static void primeFactor(int index, int flag) {
if(index == 1 || prime[index]) {
fraction[flag][index]++;
return;
}
for(int p: primes) {
if(index == 1) break;
if(prime[index]) {
fraction[flag][index]++;
return;
}
while(index % p == 0) {
index /= p;
fraction[flag][p]++;
}
}
}
/**
* find primes
*
*/
private static void eratosthenesSieve() {
int loop = (int) Math.sqrt(SIZE) + 1;
Arrays.fill(prime, true);
prime[0] = prime[1] = false;
for(int i = 2; i < loop; i++) {
if (!prime[i]) continue;
for(int j = i + i; j < SIZE; j += i) {
prime[j] = false;
}
}
for(int i = 2; i < prime.length; i++) {
if (prime[i]) primes.add(i);
}
}
}
|
<filename>docs/html/search/functions_2.js
var searchData=
[
['conformerrigiddockingengine',['ConformerRigidDockingEngine',['../class_smol_dock_1_1_engine_1_1_conformer_rigid_docking_engine.html#a543e3df802cf3990dd5518a40e08aa52',1,'SmolDock::Engine::ConformerRigidDockingEngine']]]
];
|
#!/usr/bin/env bash
INCLUDE: ./../../test.opencaching.de/actions/activate-maintenance.sh
|
package de.bitbrain.braingdx.audio;
import aurelienribon.tweenengine.TweenManager;
import com.badlogic.gdx.assets.AssetManager;
import de.bitbrain.braingdx.behavior.BehaviorManager;
import de.bitbrain.braingdx.graphics.GameCamera;
import de.bitbrain.braingdx.world.GameWorld;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class AudioManagerTest {
@InjectMocks
private AudioManagerImpl impl;
@Mock
private TweenManager tweenManager;
@Mock
private AssetManager assetManager;
@Mock
private GameCamera gameCamera;
@Mock
private GameWorld gameWorld;
@Mock
private BehaviorManager behaviorManager;
@Test
public void testSpawnMusic() {
// TODO
}
@Test
public void testSpawnSound() {
// TODO
}
}
|
// Copyright 2006, 2007, 2008, 2010, 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.services;
import static java.util.Collections.singletonMap;
import static org.apache.tapestry5.ioc.internal.util.CollectionFactory.newMap;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.tapestry5.ComponentResources;
import org.apache.tapestry5.Field;
import org.apache.tapestry5.FieldValidator;
import org.apache.tapestry5.Validator;
import org.apache.tapestry5.internal.test.InternalBaseTestCase;
import org.apache.tapestry5.ioc.MessageFormatter;
import org.apache.tapestry5.ioc.Messages;
import org.apache.tapestry5.ioc.services.TypeCoercer;
import org.apache.tapestry5.runtime.Component;
import org.apache.tapestry5.services.FieldValidatorSource;
import org.apache.tapestry5.services.FormSupport;
import org.apache.tapestry5.validator.ValidatorMacro;
import org.easymock.EasyMock;
import org.testng.annotations.Test;
public class FieldValidatorSourceImplTest extends InternalBaseTestCase
{
public interface FieldComponent extends Field, Component
{
}
@Test
public void unknown_validator_type()
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
Map<String, Validator> map = newMap();
ValidatorMacro macro = mockValidatorMacro();
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_alwaysNull(macro);
map.put("alpha", validator);
map.put("beta", validator);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(null, coercer, null, map, macro);
try
{
source.createValidator(field, "foo", null);
unreachable();
}
catch (IllegalArgumentException ex)
{
assertEquals(ex.getMessage(), "Unknown validator type 'foo'. Configured validators are alpha, beta.");
}
verify();
}
@SuppressWarnings("unchecked")
@Test
public void validator_with_no_constraint() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
Messages globalMessages = mockMessages();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
FormSupport fs = mockFormSupport();
ValidatorMacro macro = mockValidatorMacro();
Map<String, Validator> map = singletonMap("required", validator);
train_getConstraintType(validator, null);
train_getFormValidationId(fs, "form");
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_alwaysNull(macro);
train_contains(containerMessages, "form-fred-required-message", false);
train_contains(containerMessages, "fred-required-message", false);
train_getMessageKey(validator, "key");
train_getMessageFormatter(globalMessages, "key", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, null, formatter, inputValue);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidator(field, "required", null);
fieldValidator.validate(inputValue);
verify();
}
@SuppressWarnings("unchecked")
@Test
public void component_messages_overrides_validator_messages() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
FormSupport fs = mockFormSupport();
ValidatorMacro macro = mockValidatorMacro();
Map<String, Validator> map = singletonMap("required", validator);
train_getConstraintType(validator, null);
train_getFormValidationId(fs, "form");
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_alwaysNull(macro);
train_contains(containerMessages, "form-fred-required-message", false);
train_contains(containerMessages, "fred-required-message", true);
train_getMessageFormatter(containerMessages, "fred-required-message", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, null, formatter, inputValue);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(null, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidator(field, "required", null);
fieldValidator.validate(inputValue);
verify();
}
@Test
public void component_messages_overrides_validator_messages_per_form() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
FormSupport fs = mockFormSupport();
ValidatorMacro macro = mockValidatorMacro();
Map<String, Validator> map = singletonMap("required", validator);
train_getConstraintType(validator, null);
train_getFormValidationId(fs, "form");
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_alwaysNull(macro);
train_contains(containerMessages, "form-fred-required-message", true);
train_getMessageFormatter(containerMessages, "form-fred-required-message", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, null, formatter, inputValue);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(null, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidator(field, "required", null);
fieldValidator.validate(inputValue);
verify();
}
@SuppressWarnings("unchecked")
@Test
public void constraint_value_from_message_catalog_per() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
Messages globalMessages = mockMessages();
FormSupport fs = mockFormSupport();
Map<String, Validator> map = singletonMap("minlength", validator);
train_getConstraintType(validator, Integer.class);
train_getFormValidationId(fs, "myform");
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_contains(containerMessages, "myform-fred-minlength", false);
train_contains(containerMessages, "fred-minlength", true);
train_get(containerMessages, "fred-minlength", "5");
train_coerce(coercer, "5", Integer.class, 5);
train_getContainerMessages(resources, containerMessages);
train_contains(containerMessages, "myform-fred-minlength-message", false);
train_contains(containerMessages, "fred-minlength-message", false);
train_getMessageKey(validator, "key");
train_getMessageFormatter(globalMessages, "key", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, 5, formatter, inputValue);
ValidatorMacro macro = mockValidatorMacro();
train_alwaysNull(macro);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidators(field, "minlength");
fieldValidator.validate(inputValue);
verify();
}
@SuppressWarnings("unchecked")
@Test
public void constraint_value_from_message_catalog_per_form() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
Messages globalMessages = mockMessages();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
FormSupport fs = mockFormSupport();
Map<String, Validator> map = singletonMap("minlength", validator);
train_getConstraintType(validator, Integer.class);
train_getFormValidationId(fs, "myform");
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_contains(containerMessages, "myform-fred-minlength", true);
train_get(containerMessages, "myform-fred-minlength", "5");
train_coerce(coercer, "5", Integer.class, 5);
train_getContainerMessages(resources, containerMessages);
train_contains(containerMessages, "myform-fred-minlength-message", false);
train_contains(containerMessages, "fred-minlength-message", false);
train_getMessageKey(validator, "key");
train_getMessageFormatter(globalMessages, "key", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, 5, formatter, inputValue);
ValidatorMacro macro = mockValidatorMacro();
train_alwaysNull(macro);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidators(field, "minlength");
fieldValidator.validate(inputValue);
verify();
}
@SuppressWarnings("unchecked")
@Test
public void missing_field_validator_constraint() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
FormSupport fs = mockFormSupport();
Map<String, Validator> map = singletonMap("minlength", validator);
train_getConstraintType(validator, Integer.class);
train_getFormValidationId(fs, "myform");
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_contains(containerMessages, "myform-fred-minlength", false);
train_contains(containerMessages, "fred-minlength", false);
ValidatorMacro macro = mockValidatorMacro();
train_alwaysNull(macro);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(null, coercer, fs, map, macro);
try
{
source.createValidators(field, "minlength");
unreachable();
}
catch (IllegalArgumentException ex)
{
assertEquals(
ex.getMessage(),
"Validator 'minlength' requires a validation constraint (of type java.lang.Integer) but none was provided. The constraint may be provided inside the @Validator annotation on the property, or in the associated component message catalog as key 'myform-fred-minlength' or key 'fred-minlength'.");
}
verify();
}
@SuppressWarnings("unchecked")
@Test
public void single_validator_via_specification() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
Messages messages = mockMessages();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
FormSupport fs = mockFormSupport();
Messages globalMessages = mockMessages();
Map<String, Validator> map = singletonMap("required", validator);
train_getFormValidationId(fs, "myform");
train_getConstraintType(validator, null);
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_contains(containerMessages, "myform-fred-required-message", false);
train_contains(containerMessages, "fred-required-message", false);
train_getMessageKey(validator, "key");
train_getMessageFormatter(globalMessages, "key", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, null, formatter, inputValue);
ValidatorMacro macro = mockValidatorMacro();
train_alwaysNull(macro);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidators(field, "required");
fieldValidator.validate(inputValue);
verify();
}
private void train_alwaysNull(ValidatorMacro macro)
{
expect(macro.valueForMacro(EasyMock.isA(String.class))).andReturn(null).anyTimes();
}
private ValidatorMacro mockValidatorMacro()
{
return newMock(ValidatorMacro.class);
}
@SuppressWarnings("unchecked")
@Test
public void simple_macro_expansion() throws Exception
{
ValidatorMacro macro = mockValidatorMacro();
expect(macro.valueForMacro("combo")).andReturn("required,minlength=5");
expect(macro.valueForMacro("required")).andReturn(null);
expect(macro.valueForMacro("minlength")).andReturn(null);
replay();
FieldValidatorSourceImpl source = new FieldValidatorSourceImpl(null, null, null, null, macro);
List<ValidatorSpecification> specs = source.toValidatorSpecifications("combo");
assertListsEquals(specs, new ValidatorSpecification("required"), new ValidatorSpecification("minlength", "5"));
verify();
}
@SuppressWarnings("unchecked")
@Test
public void macros_can_not_have_constraints() throws Exception
{
ValidatorMacro macro = mockValidatorMacro();
expect(macro.valueForMacro("combo")).andReturn("required,minlength=5");
replay();
FieldValidatorSourceImpl source = new FieldValidatorSourceImpl(null, null, null, null, macro);
try
{
source.toValidatorSpecifications("combo=3");
unreachable();
}
catch (RuntimeException ex)
{
assertEquals(ex.getMessage(),
"'combo' is a validator macro, not a validator, and can not have a constraint value.");
}
verify();
}
@Test
public void recursive_macros_are_caught()
{
ValidatorMacro macro = mockValidatorMacro();
expect(macro.valueForMacro("combo")).andReturn("required,combo");
expect(macro.valueForMacro("required")).andReturn(null);
expect(macro.valueForMacro("combo")).andReturn("required,combo");
replay();
FieldValidatorSourceImpl source = new FieldValidatorSourceImpl(null, null, null, null, macro);
try
{
source.toValidatorSpecifications("combo");
unreachable();
}
catch (RuntimeException ex)
{
assertEquals(ex.getMessage(), "Validator macro 'combo' appears more than once.");
}
verify();
}
@SuppressWarnings("unchecked")
@Test
public void multiple_validators_via_specification() throws Exception
{
Validator required = mockValidator();
Validator minLength = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
Messages messages = mockMessages();
MessageFormatter requiredFormatter = mockMessageFormatter();
MessageFormatter minLengthFormatter = mockMessageFormatter();
Object inputValue = "input value";
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
Integer fifteen = 15;
FormSupport fs = mockFormSupport();
Messages globalMessages = mockMessages();
Map<String, Validator> map = newMap();
map.put("required", required);
map.put("minLength", minLength);
train_getFormValidationId(fs, "myform");
train_getConstraintType(required, null);
train_getConstraintType(minLength, Integer.class);
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_contains(containerMessages, "myform-fred-required-message", false);
train_contains(containerMessages, "fred-required-message", false);
train_getMessageKey(required, "required");
train_getMessageFormatter(globalMessages, "required", requiredFormatter);
train_contains(containerMessages, "myform-fred-minLength-message", false);
train_contains(containerMessages, "fred-minLength-message", false);
train_getMessageKey(minLength, "min-length");
train_getMessageFormatter(globalMessages, "min-length", minLengthFormatter);
train_coerce(coercer, "15", Integer.class, fifteen);
train_isRequired(required, true);
train_getValueType(required, Object.class);
required.validate(field, null, requiredFormatter, inputValue);
train_isRequired(minLength, false);
train_getValueType(minLength, String.class);
minLength.validate(field, fifteen, minLengthFormatter, inputValue);
ValidatorMacro macro = mockValidatorMacro();
train_alwaysNull(macro);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidators(field, "required,minLength=15");
fieldValidator.validate(inputValue);
verify();
}
@SuppressWarnings("unchecked")
@Test
public void validator_with_constraint() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
Integer five = 5;
FormSupport fs = mockFormSupport();
ValidatorMacro macro = mockValidatorMacro();
Messages globalMessages = mockMessages();
Map<String, Validator> map = singletonMap("minLength", validator);
train_getConstraintType(validator, Integer.class);
train_getFormValidationId(fs, "myform");
train_coerce(coercer, "5", Integer.class, five);
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
train_alwaysNull(macro);
train_contains(containerMessages, "myform-fred-minLength-message", false);
train_contains(containerMessages, "fred-minLength-message", false);
train_getMessageKey(validator, "key");
train_getMessageFormatter(globalMessages, "key", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, five, formatter, inputValue);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidator(field, "minLength", "5");
fieldValidator.validate(inputValue);
verify();
}
@SuppressWarnings("unchecked")
@Test
public void validator_with_constraint_and_macro() throws Exception
{
Validator validator = mockValidator();
TypeCoercer coercer = mockTypeCoercer();
FieldComponent field = newFieldComponent();
MessageFormatter formatter = mockMessageFormatter();
Object inputValue = new Object();
ComponentResources resources = mockComponentResources();
Messages containerMessages = mockMessages();
Integer five = 5;
FormSupport fs = mockFormSupport();
ValidatorMacro macro = mockValidatorMacro();
Messages globalMessages = mockMessages();
Map<String, Validator> map = singletonMap("minLength", validator);
train_getConstraintType(validator, Integer.class);
train_getFormValidationId(fs, "myform");
train_coerce(coercer, "77", Integer.class, five);
train_getComponentResources(field, resources);
train_getId(resources, "fred");
train_getContainerMessages(resources, containerMessages);
expect(macro.valueForMacro("foo-bar-baz")).andReturn("minLength=77");
expect(macro.valueForMacro("minLength")).andReturn(null);
train_contains(containerMessages, "myform-fred-minLength-message", false);
train_contains(containerMessages, "fred-minLength-message", false);
train_getMessageKey(validator, "key");
train_getMessageFormatter(globalMessages, "key", formatter);
train_isRequired(validator, false);
train_getValueType(validator, Object.class);
validator.validate(field, five, formatter, inputValue);
replay();
FieldValidatorSource source = new FieldValidatorSourceImpl(globalMessages, coercer, fs, map, macro);
FieldValidator fieldValidator = source.createValidator(field, "foo-bar-baz", null);
fieldValidator.validate(inputValue);
verify();
}
private FieldComponent newFieldComponent()
{
return newMock(FieldComponent.class);
}
private void test(String specification, ValidatorSpecification... expected)
{
List<ValidatorSpecification> specs = FieldValidatorSourceImpl.parse(specification);
assertEquals(specs, Arrays.asList(expected));
}
@Test
public void parse_simple_type_list()
{
test("required,email", new ValidatorSpecification("required", null), new ValidatorSpecification("email", null));
}
@Test
public void parse_single_type()
{
test("required", new ValidatorSpecification("required", null));
}
@Test
public void ignore_whitespace_around_type_name()
{
test(" required , email ", new ValidatorSpecification("required", null), new ValidatorSpecification(
"email", null));
}
@Test
public void parse_simple_type_with_value()
{
test("minLength=5,sameAs=otherComponentId", new ValidatorSpecification("minLength", "5"),
new ValidatorSpecification("sameAs", "otherComponentId"));
}
@Test
public void whitespace_ignored_around_value()
{
test("minLength= 5 , sameAs = otherComponentId ", new ValidatorSpecification("minLength", "5"),
new ValidatorSpecification("sameAs", "otherComponentId"));
}
@Test
public void dangling_equals_sign_is_empty_string_value()
{
test("minLength= ", new ValidatorSpecification("minLength", ""));
}
@Test
public void unexpected_character_not_a_comma()
{
try
{
test("required.email");
unreachable();
}
catch (RuntimeException ex)
{
assertEquals(ex.getMessage(), "Unexpected character '.' at position 9 of input string: required.email");
}
}
@Test
public void unexpected_character_after_constraint_value()
{
try
{
test("minLength=3 . email");
unreachable();
}
catch (RuntimeException ex)
{
assertEquals(ex.getMessage(),
"Unexpected character '.' at position 13 of input string: minLength=3 . email");
}
}
}
|
<filename>server/game/cards/characters/01/samwelltarly.js
const DrawCard = require('../../../drawcard.js');
class SamwellTarly extends DrawCard {
setupCardAbilities() {
this.plotModifiers({
reserve: 1
});
}
}
SamwellTarly.code = '01127';
module.exports = SamwellTarly;
|
#!/bin/bash
cd $AMI/
rm -rf $AMI/work/temp
cp $AMI/examples/viral15raw/ $AMI/work/temp/viral15raw/
ls $AMI/work/temp/viral15raw
|
#!/usr/bin/env bash
#
# Copyright (c) 2009-2012 VMware, Inc.
set -e
base_dir=$(readlink -nf $(dirname $0)/../..)
source $base_dir/lib/prelude_apply.bash
ovf=$work/ovf
mkdir -p $ovf
disk_size=$(($(stat --printf="%s" $work/${stemcell_image_name}) / (1024*1024)))
# 512 bytes per sector
disk_sectors=$(($disk_size * 2048))
# 255 * 63 = 16065 sectors per head
disk_cylinders=$(($disk_sectors / 16065))
# Output disk description
cat > $ovf/root.vmdk <<EOS
version=1
CID=ffffffff
parentCID=ffffffff
createType="vmfs"
# Extent description
RW $disk_sectors FLAT "$work/${stemcell_image_name}" 0
ddb.toolsVersion = "0"
ddb.adapterType = "lsilogic"
ddb.geometry.biosSectors = "63"
ddb.geometry.biosHeads = "255"
ddb.geometry.biosCylinders = "$disk_cylinders"
ddb.geometry.sectors = "63"
ddb.geometry.heads = "255"
ddb.geometry.cylinders = "$disk_cylinders"
ddb.virtualHWVersion = "4"
EOS
vm_mem=512
vm_cpus=1
vm_hostname=ubuntu
vm_arch=amd64
vm_guestos=ubuntu-64
cat > $ovf/$vm_hostname.vmx <<EOS
config.version = "8"
virtualHW.version = 8
floppy0.present = "FALSE"
nvram = "nvram"
deploymentPlatform = "windows"
virtualHW.productCompatibility = "hosted"
tools.upgrade.policy = "useGlobal"
powerType.powerOff = "preset"
powerType.powerOn = "preset"
powerType.suspend = "preset"
powerType.reset = "preset"
displayName = "$vm_hostname $vm_arch"
numvcpus = "$vm_cpus"
scsi0.present = "true"
scsi0.sharedBus = "none"
scsi0.virtualDev = "lsilogic"
memsize = "$vm_mem"
scsi0:0.present = "true"
scsi0:0.fileName = "root.vmdk"
scsi0:0.deviceType = "scsi-hardDisk"
ide0:0.present = "true"
ide0:0.clientDevice = "TRUE"
ide0:0.deviceType = "cdrom-raw"
ide0:0.startConnected = "FALSE"
guestOSAltName = "$vm_guestos ($vm_arch)"
guestOS = "$vm_guestos"
toolScripts.afterPowerOn = "true"
toolScripts.afterResume = "true"
toolScripts.beforeSuspend = "true"
toolScripts.beforePowerOff = "true"
scsi0:0.redo = ""
tools.syncTime = "FALSE"
tools.remindInstall = "TRUE"
evcCompatibilityMode = "FALSE"
EOS
|
<gh_stars>0
#include <Core/Utils/ConfigReader.h>
#include <fstream>
namespace Lunia {
Config::Config(const char* filename) {
if (!FileExists(filename))
Logger::GetInstance().Exception("Could not find config file provided => {0}", filename);
ReadConfigFile(filename);
}
bool Config::FileExists(const std::string& name){
struct stat buffer;
return (stat(name.c_str(), &buffer) == 0);
}
void Config::ReadConfigFile(const char* filename)
{
std::ifstream in(filename);
json j = json::parse((std::istreambuf_iterator<char>(in)), std::istreambuf_iterator<char>());
if (j.at("Config").is_null())
Logger::GetInstance().Exception("Cannot launch server without settings.");
this->m_Config = j.at("Config");
if (!j.at("Config")["LobbyServer"].is_null()) {
this->m_ServerKind = ServerKind::LobbyKind;
this->m_BaseStr = "LobbyServer";
} else if (!j.at("Config")["PvpServer"].is_null()) {
this->m_ServerKind = ServerKind::PVPKind;
this->m_BaseStr = "PvpServer";
}else if (!j.at("Config")["SquareServer"].is_null()) {
this->m_ServerKind = ServerKind::SquareKind;
this->m_BaseStr = "SquareServer";
}else if (!j.at("Config")["StageServer"].is_null()) {
this->m_ServerKind = ServerKind::StageKind;
this->m_BaseStr = "StageServer";
}else
Logger::GetInstance().Exception("Could not find a valid ServerKind");
if (m_Config.at(m_BaseStr).is_null())
throw;
m_Config.get_to(Settings);
}
ServerKind Config::GetKind()
{
return this->m_ServerKind;
}
Config& ConfigInstance(const char* filename) {
return Config::GetInstance(filename);
}
void to_json(json& j, const LoggingStruct& o){
j["Filename"] = o.Filename;
j["LoggingLevel"] = o.LoggingLevel;
}
void from_json(const json& j, LoggingStruct& o){
j.at("Filename").get_to(o.Filename);
j.at("LoggingLevel").get_to(o.LoggingLevel);
}
void to_json(json& j, const ServerAddressStruct& o) {
j["ServerIp"] = o.ServerIp;
j["ServerPort"] = o.ServerPort;
}
void from_json(const json& j, ServerAddressStruct& o) {
j.at("ServerIp").get_to(o.ServerIp);
j.at("ServerPort").get_to(o.ServerPort);
}
void to_json(json& j, const GeneralSettings& t) {
j["Logging"] = t.Logging;
j["ServerAddress"] = t.ServerAddress;
j["ApiUrl"] = t.ApiUrl;
j["ServerName"] = t.ServerName;
j["Capacity"] = t.Capacity;
j["PingTimeout"] = t.PingTimeout;
j["ShowPacket"] = t.ShowPacket;
j["AchievementAddress"] = t.AchievementAddress;
}
void from_json(const json& j, GeneralSettings& t) {
j.at("Logging").get_to(t.Logging);
j.at("ServerAddress").get_to(t.ServerAddress);
j.at("ApiUrl").get_to(t.ApiUrl);
j.at("ServerName").get_to(t.ServerName);
j.at("Capacity").get_to(t.Capacity);
j.at("PingTimeout").get_to(t.PingTimeout);
j.at("ShowPacket").get_to(t.ShowPacket);
j.at("AchievementAddress").get_to(t.AchievementAddress);
}
} |
#!/bin/bash
set -e
# Setup nexus as the maven proxy
if [ -n "${NEXUS_REPO}" ] ; then
mkdir -p "${MAVEN_CONFIG}"
cat > ${MAVEN_CONFIG}/settings.xml <<EOF
<settings xmlns="http://maven.apache.org/SETTINGS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd">
<mirrors>
<mirror>
<id>nexus</id>
<name>Local Maven Repository Manager</name>
<url>${NEXUS_REPO}</url>
<mirrorOf>*</mirrorOf>
</mirror>
</mirrors>
<servers>
<server>
<id>deployment</id>
<username>deployment</username>
<password>deployment123</password>
</server>
</servers>
</settings>
EOF
fi
|
def StringReplace(string, oldchar, newchar, case_sens=False):
if case_sens == False:
string = string.lower()
oldchar = oldchar.lower()
newString = ""
for char in string:
if char == oldchar:
newString += newchar
else:
newString += oldchar
return newString |
//
// Crappy barrels with kludged physics
//
#include "game.h"
#include "barrel.h"
#include "fmatrix.h"
#include "mav.h"
#include "pap.h"
#include "statedef.h"
#include "animate.h"
#include "pcom.h"
#include "psystem.h"
#include "poly.h"
#include "eway.h"
#include "sound.h"
#include "pow.h"
#include "dirt.h"
#ifndef PSX
#include "panel.h"
#else
#include "c:\fallen\psxeng\headers\panel.h"
#endif
BARREL_Sphere *BARREL_sphere; //[BARREL_MAX_SPHERES];
SLONG BARREL_sphere_last; // MARK!!! WTF, you usuall call thing BLAH_blah_upto
//
// The states a barrel can be in.
//
#define BARREL_FLAG_STACKED (1 << 0)
#define BARREL_FLAG_STILL (1 << 1)
#define BARREL_FLAG_GROUNDED (1 << 2)
#define BARREL_FLAG_HELD (1 << 3) // A person is holding this barrel- don't process it.
#define BARREL_FLAG_HIT (1 << 4) // This cone has already been hit- so don't be penalised for hitting it again.
#define BARREL_FLAG_RUBBISH (1 << 5) // This bin contains rubbish.
#define BARREL_FLAG_CANS (1 << 6) // This bin contains cans.
#ifndef PSX
extern BOOL allow_debug_keys;
#endif
Barrel *BARREL_barrel;//[BARREL_MAX_BARRELS];
SLONG BARREL_barrel_upto;
//
// The radius and height of a barrel. A barrel has its pivot in the middle of itself,
// not at its base!
//
#define BARREL_DIAMETER 80
#define BARREL_RADIUS 40
#define BARREL_HEIGHT 93
#define BARREL_STACK_RADIUS 45
#define BARREL_SPHERE_RADIUS 85
#define BARREL_SPHERE_DIST 50
#define BARREL_GRAVITY 0x80
SLONG BARREL_fx_rate;
inline void BARREL_hit_noise(Thing *p_barrel) {
BARREL_fx_rate++;
//TRACE("barrel: %d\n",BARREL_fx_rate);
if (BARREL_fx_rate>4) return;
if (p_barrel->Genus.Barrel->type == BARREL_TYPE_CONE)
MFX_play_thing(THING_NUMBER(p_barrel),S_TRAFFIC_CONE,0,p_barrel);
else
MFX_play_thing(THING_NUMBER(p_barrel),SOUND_Range(S_BARREL_START,S_BARREL_END),0,p_barrel);
}
#ifndef PSX
void BARREL_init()
{
SLONG i;
memset(BARREL_sphere, 0, sizeof(BARREL_Sphere)*BARREL_MAX_SPHERES);
memset(BARREL_barrel, 0, sizeof(Barrel)*BARREL_MAX_BARRELS);
BARREL_barrel_upto = 0;
BARREL_sphere_last = 2;
for (i = 0; i < BARREL_MAX_SPHERES; i++)
{
BARREL_sphere[i].x = -INFINITY; // The marker for being unused.
}
BARREL_fx_rate = 0;
}
#endif
//
// Returns an index to two free BARREL_Spheres.
//
SLONG BARREL_spheres_get(void)
{
SLONG i;
for (i = 0; i < BARREL_MAX_SPHERES / 2; i++)
{
ASSERT(WITHIN(BARREL_sphere_last, 2, BARREL_MAX_SPHERES - 2));
if (BARREL_sphere[BARREL_sphere_last].x == -INFINITY)
{
return BARREL_sphere_last;
}
BARREL_sphere_last += 2;
if (BARREL_sphere_last >= BARREL_MAX_SPHERES - 2)
{
BARREL_sphere_last = 2;
}
}
return NULL;
}
//
// Marks the two barrel spheres as unused.
//
void BARREL_spheres_give(SLONG bs)
{
ASSERT(WITHIN(bs + 0, 2, BARREL_MAX_SPHERES - 2));
ASSERT(WITHIN(bs + 1, 3, BARREL_MAX_SPHERES - 1));
BARREL_sphere[bs + 0].x = -INFINITY;
BARREL_sphere[bs + 1].x = -INFINITY;
}
//
// Returns the position where the given sphere of a stacked/still barrel would be.
//
void BARREL_stacked_sphere(Thing *p_barrel, SLONG which_sphere, SLONG *sx, SLONG *sy, SLONG *sz, SLONG *sradius)
{
ASSERT(p_barrel->Class == CLASS_BARREL);
SLONG ans_x = p_barrel->WorldPos.X;
SLONG ans_y = p_barrel->WorldPos.Y;
SLONG ans_z = p_barrel->WorldPos.Z;
SLONG vector[3];
FMATRIX_vector(
vector,
p_barrel->Draw.Mesh->Angle,
p_barrel->Draw.Mesh->Tilt);
vector[0] = MUL64(vector[0], (BARREL_SPHERE_DIST << 7));
vector[1] = MUL64(vector[1], (BARREL_SPHERE_DIST << 7));
vector[2] = MUL64(vector[2], (BARREL_SPHERE_DIST << 7));
if (which_sphere)
{
ans_x += vector[0];
ans_y += vector[1];
ans_z += vector[2];
*sradius = BARREL_SPHERE_RADIUS << 8;
if (p_barrel->Genus.Barrel->type == BARREL_TYPE_CONE)
{
*sradius >>= 1;
}
}
else
{
ans_x -= vector[0];
ans_y -= vector[1];
ans_z -= vector[2];
*sradius = BARREL_SPHERE_RADIUS << 8;
if (p_barrel->Genus.Barrel->type == BARREL_TYPE_CONE)
{
*sradius -= *sradius >> 2;
}
}
*sx = ans_x;
*sy = ans_y;
*sz = ans_z;
}
//
// Converts a stationary barrel to a moving one.
//
void BARREL_convert_stationary_to_moving(Thing *p_barrel)
{
SLONG i;
SLONG sx;
SLONG sy;
SLONG sz;
SLONG sradius;
ASSERT(p_barrel->Class == CLASS_BARREL);
Barrel *bb = p_barrel->Genus.Barrel;
BARREL_Sphere *bs;
ASSERT(bb->flag & (BARREL_FLAG_STACKED|BARREL_FLAG_STILL));
if (p_barrel->Genus.Barrel->type == BARREL_TYPE_CONE)
{
/*
if (!(p_barrel->Genus.Barrel->flag & BARREL_FLAG_HIT))
{
p_barrel->Genus.Barrel->flag |= BARREL_FLAG_HIT;
extern UBYTE EWAY_count_up_visible;
extern SLONG EWAY_count_up;
if (EWAY_count_up_visible)
{
EWAY_count_up += 500;
void add_damage_text(SWORD x,SWORD y,SWORD z,CBYTE *text);
add_damage_text(
p_barrel->WorldPos.X >> 8,
p_barrel->WorldPos.Y >> 8,
p_barrel->WorldPos.Z >> 8,
"Time penalty");
PANEL_new_info_message("Time Penalty");
}
if (GAME_FLAGS & GF_CONE_PENALTIES)
{
EWAY_deduct_time_penalty(50);
}
}
*/
MFX_play_thing(THING_NUMBER(p_barrel),S_TRAFFIC_CONE,0,p_barrel);
} else MFX_play_thing(THING_NUMBER(p_barrel),S_CAR_BUMP,0,p_barrel);
if (p_barrel->Genus.Barrel->flag & BARREL_FLAG_RUBBISH)
{
//
// Make some newspapers fly out from the top of the barrel.
//
DIRT_create_papers(
(p_barrel->WorldPos.X >> 8),
(p_barrel->WorldPos.Y >> 8) + 0xa0,
(p_barrel->WorldPos.Z >> 8));
}
//
// Find a spare set of spheres.
//
bb->bs = BARREL_spheres_get();
bb->flag &= ~(BARREL_FLAG_STACKED|BARREL_FLAG_STILL);
bb->on = NULL;
for (i = 0; i < 2; i++)
{
ASSERT(WITHIN(bb->bs + i, 2, BARREL_MAX_SPHERES - 1));
bs = &BARREL_sphere[bb->bs + i];
BARREL_stacked_sphere(p_barrel, i, &sx, &sy, &sz, &sradius);
bs->x = sx;
bs->y = sy;
bs->z = sz;
bs->dx = 0;
bs->dy = (Random() & 0xff);
bs->dz = 0;
bs->still = 0;
bs->radius = sradius;
}
}
void BARREL_convert_moving_to_stationary(Thing *p_barrel)
{
ASSERT(p_barrel->Class == CLASS_BARREL);
Barrel *bb = p_barrel->Genus.Barrel;
BARREL_Sphere *bs;
BARREL_spheres_give(bb->bs);
bb->bs = 0;
bb->on = 0;
bb->flag |= BARREL_FLAG_STILL;
if (BARREL_fx_rate>4)
BARREL_fx_rate-=3;
else
BARREL_fx_rate=0;
}
//
// decrease stack usage
//
#define BARREL_MAX_FIND 16
THING_INDEX found_barrel[BARREL_MAX_FIND];
void BARREL_hit_with_sphere(
SLONG x,
SLONG y,
SLONG z,
SLONG radius)
{
SLONG i;
SLONG j;
SLONG sx;
SLONG sy;
SLONG sz;
SLONG sradius;
SLONG dx;
SLONG dy;
SLONG dz;
SLONG dist;
SLONG ddist;
Thing *p_found;
Barrel *bb;
BARREL_Sphere *bs;
SLONG num;
num = THING_find_sphere(
x, y, z,
radius,
found_barrel,
BARREL_MAX_FIND,
1 << CLASS_BARREL);
//
// Convert (x,y,z,radius) to the barrel coordinate system.
//
x <<= 8;
y <<= 8;
z <<= 8;
radius <<= 8;
for (i = 0; i < num; i++)
{
p_found = TO_THING(found_barrel[i]);
bb = p_found->Genus.Barrel;
try_this_again:;
if (bb->flag & (BARREL_FLAG_STACKED | BARREL_FLAG_STILL))
{
//
// If any of the potential spheres of the stacked barrel are effected by
// this hit, then we must convert the barrel to a moving one and then apply
// the hit to each.
//
for (j = 0; j < 2; j++)
{
BARREL_stacked_sphere(
p_found,
j,
&sx,
&sy,
&sz,
&sradius);
dx = abs(sx - x);
dy = abs(sy - y);
dz = abs(sz - z);
dist = QDIST3(dx,dy,dz);
if (dist < radius + sradius)
{
//
// Convert this barrel to a moving one.
//
BARREL_convert_stationary_to_moving(p_found);
goto try_this_again;
}
}
}
else
{
//
// Make sure none of the spheres of the barrel are within our collision sphere.
//
for (j = 0; j < 2; j++)
{
ASSERT(WITHIN(bb->bs + j, 2, BARREL_MAX_SPHERES - 1));
bs = &BARREL_sphere[bb->bs + j];
dx = bs->x - x;
dy = bs->y - y;
dz = bs->z - z;
dist = QDIST3(abs(dx),abs(dy),abs(dz)) + 1;
ddist = radius + bs->radius - dist;
if (ddist > 0)
{
//
// Push this barrel sphere away from the collision sphere.
//
dx = dx * ddist / dist;
dy = dy * ddist / dist;
dz = dz * ddist / dist;
bs->x += dx;
bs->y += dy;
bs->z += dz;
bs->dx += dx / 8;
bs->dy += dy / 8;
bs->dz += dz / 8;
BARREL_hit_noise(p_found);
}
}
}
}
}
void BARREL_hit_with_prim(
SLONG prim,
SLONG x,
SLONG y,
SLONG z,
SLONG yaw)
{
SLONG i;
SLONG j;
Thing *p_found;
Barrel *bb_found;
BARREL_Sphere *bs;
SLONG num;
SLONG dx;
SLONG dy;
SLONG dz;
SLONG sx;
SLONG sy;
SLONG sz;
SLONG sradius;
SLONG tx;
SLONG tz;
SLONG rx;
SLONG rz;
SLONG matrix[4];
SLONG useangle;
SLONG sin_yaw;
SLONG cos_yaw;
SLONG minx;
SLONG miny;
SLONG minz;
SLONG maxx;
SLONG maxy;
SLONG maxz;
SLONG dminx;
SLONG dminz;
SLONG dmaxx;
SLONG dmaxy;
SLONG dmaxz;
SLONG best;
SLONG best_x;
SLONG best_y;
SLONG best_z;
PrimInfo *pi = get_prim_info(prim);
//
// Find all barrels withing the bounding sphere of the prim.
//
num = THING_find_sphere(
x, y, z,
pi->radius,
found_barrel,
BARREL_MAX_FIND,
1 << CLASS_BARREL);
//
// The y-range we consider in worldspace.
//
miny = y + pi->miny - BARREL_SPHERE_RADIUS;
maxy = y + pi->maxy + BARREL_SPHERE_RADIUS;
//
// The bounding box in object space.
//
minx = pi->minx - BARREL_SPHERE_RADIUS;
minz = pi->minz - BARREL_SPHERE_RADIUS;
maxx = pi->maxx + BARREL_SPHERE_RADIUS;
maxz = pi->maxz + BARREL_SPHERE_RADIUS;
//
// Find the bounding box of the prim.
//
useangle = -yaw;
useangle &= 2047;
sin_yaw = SIN(useangle);
cos_yaw = COS(useangle);
matrix[0] = cos_yaw;
matrix[1] = sin_yaw;
matrix[2] = -sin_yaw;
matrix[3] = cos_yaw;
for (i = 0; i < num; i++)
{
ASSERT(TO_THING(found_barrel[i])->Class == CLASS_BARREL);
p_found = TO_THING(found_barrel[i]);
bb_found = p_found->Genus.Barrel;
try_this_again:;
if (bb_found->flag & (BARREL_FLAG_STACKED|BARREL_FLAG_STILL))
{
//
// Check each of the potential spheres.
//
for (j = 0; j < 2; j++)
{
BARREL_stacked_sphere(
p_found,
j,
&sx,
&sy,
&sz,
&sradius);
sx >>= 8;
sy >>= 8;
sz >>= 8;
if (WITHIN(sy, miny, maxy))
{
tx = sx - x;
tz = sz - z;
rx = MUL64(tx, matrix[0]) + MUL64(tz, matrix[1]);
rz = MUL64(tx, matrix[2]) + MUL64(tz, matrix[3]);
if (WITHIN(rx, minx, maxx) &&
WITHIN(rz, minz, maxz))
{
//
// The barrel would collide. Convert this barrel to a moving one and
// do the collision stuff again.
//
BARREL_convert_stationary_to_moving(p_found);
goto try_this_again;
}
}
}
}
else
{
//
// Check each sphere.
//
for (j = 0; j < 2; j++)
{
ASSERT(WITHIN(bb_found->bs + j, 2, BARREL_MAX_SPHERES - 1));
bs = &BARREL_sphere[bb_found->bs + j];
sx = bs->x >> 8;
sy = bs->y >> 8;
sz = bs->z >> 8;
if (WITHIN(sy, miny, maxy))
{
tx = sx - x;
tz = sz - z;
rx = MUL64(tx, matrix[0]) + MUL64(tz, matrix[1]);
rz = MUL64(tx, matrix[2]) + MUL64(tz, matrix[3]);
if (WITHIN(rx, minx, maxx) &&
WITHIN(rz, minz, maxz))
{
//
// The barrel sphere has collided. Find the nearest point on the
// edge of the box.
//
dminx = rx - minx;
dminz = rz - minz;
dmaxx = maxx - rx;
dmaxy = maxy - sy;
dmaxz = maxz - rz;
best = dminx;
best_x = rx;
best_y = sy;
best_z = rz;
if (dminz < best)
{
best = dminz;
best_x = rx;
best_y = sy;
best_z = minz;
}
if (dmaxx < best)
{
best = dmaxx;
best_x = maxx;
best_y = sy;
best_z = rz;
}
if (dmaxy < best)
{
best = dmaxy;
best_x = rx;
best_y = maxy;
best_z = rz;
}
if (dmaxz < best)
{
best = dmaxz;
best_x = rx;
best_y = sy;
best_z = maxz;
}
//
// Unrotate (best_x,best_y,best_z) out of barrel space.
//
sx = MUL64(best_x, matrix[0]) + MUL64(best_z, matrix[2]);
sz = MUL64(best_x, matrix[1]) + MUL64(best_z, matrix[3]);
sx += x;
sy = best_y;
sz += z;
//
// Move the sphere to the best position on the edge of the box.
//
sx <<= 8;
sy <<= 8;
sz <<= 8;
dx = sx - bs->x;
dy = sy - bs->y;
dz = sz - bs->z;
bs->x = sx;
bs->y = sy;
bs->z = sz;
bs->dx += dx / 8;
bs->dy += dy / 8;
bs->dz += dz / 8;
BARREL_hit_noise(p_found);
}
}
}
}
}
}
//
// Processes a barrel sphere.
//
void BARREL_process_sphere(Thing *p_barrel, Barrel *bb, BARREL_Sphere *bs)
{
SLONG i;
SLONG j;
SLONG sx;
SLONG sy;
SLONG sz;
SLONG sradius;
SLONG dx;
SLONG dy;
SLONG dz;
SLONG dist;
SLONG ddist;
Thing *p_found;
Barrel *bb_found;
BARREL_Sphere *bso;
SLONG num;
SLONG ground;
//
// Gravity and damping.
//
bs->dx -= bs->dx / 32;
bs->dy -= bs->dy / 32;
bs->dz -= bs->dz / 32;
bs->dy -= BARREL_GRAVITY;
bs->x += bs->dx;
bs->y += bs->dy;
bs->z += bs->dz;
//
// Collision with other barrels.
//
num = THING_find_sphere(
bs->x >> 8,
bs->y >> 8,
bs->z >> 8,
BARREL_HEIGHT,
found_barrel,
BARREL_MAX_FIND,
1 << CLASS_BARREL);
for (i = 0; i < num ; i++)
{
ASSERT(TO_THING(found_barrel[i])->Class == CLASS_BARREL);
p_found = TO_THING(found_barrel[i]);
if (p_found == p_barrel)
{
//
// Don't collide with yourself.
//
continue;
}
bb_found = p_found->Genus.Barrel;
try_this_again:;
if (bb_found->flag & (BARREL_FLAG_STACKED|BARREL_FLAG_STILL))
{
//
// Would we collide with this barrel if it was moving?
//
for (j = 0; j < 2; j++)
{
BARREL_stacked_sphere(
p_found,
j,
&sx,
&sy,
&sz,
&sradius);
dx = sx - bs->x;
dy = sy - bs->y;
dz = sz - bs->z;
dist = QDIST3(abs(dx),abs(dy),abs(dz)) + 1;
if (dist < sradius)
{
//
// Yes. Convert this barrel to a moving one and try again.
//
BARREL_convert_stationary_to_moving(p_found);
goto try_this_again;
}
}
}
else
{
//
// Collide with the spheres of this barrel.
//
for (j = 0; j < 2; j++)
{
ASSERT(WITHIN(bb_found->bs + j, 2, BARREL_MAX_SPHERES - 1));
bso = &BARREL_sphere[bb_found->bs + j];
dx = bso->x - bs->x;
dy = bso->y - bs->y;
dz = bso->z - bs->z;
dist = QDIST3(abs(dx),abs(dy),abs(dz)) + 1;
if (dist < bs->radius)
{
//
// These two spheres have collided. Now push them apart.
//
ddist = bs->radius - dist;
ddist /= 4;
dx = dx * ddist / dist;
dy = dy * ddist / dist;
dz = dz * ddist / dist;
bs->x -= dx;
bs->y -= dy;
bs->z -= dz;
bso->x += dx;
bso->y += dy;
bso->z += dz;
BARREL_hit_noise(p_barrel);
}
}
}
}
//
// Collision with the ground.
//
ground = PAP_calc_map_height_at(
bs->x >> 8,
bs->z >> 8) << 8;
ground += bs->radius >> 2;
if (bs->y < ground)
{
if (bs->dy<-1000) BARREL_hit_noise(p_barrel);
if ((bs->y < ground - 0x8000) || (bs->dy > 0))
{
//
// Hit a wall- not gone underground.
//
bs->x -= bs->dx;
bs->z -= bs->dz;
bs->dx = -bs->dx;
bs->dz = -bs->dz;
}
else
{
bs->y = ground;
bs->dx = 0;
bs->dy = abs(bs->dy) >> 1;
bs->dz = 0;
bb->flag |= BARREL_FLAG_GROUNDED;
}
}
//
// Collision with the sides of fences and buildings.
//
if (WITHIN(bs->x >> 16, 0, PAP_SIZE_HI - 1) &&
WITHIN(bs->z >> 16, 0, PAP_SIZE_HI - 1))
{
MAV_Opt mo = MAV_opt[MAV_NAV(bs->x >> 16,bs->z >> 16)];
if (!(mo.opt[MAV_DIR_XS] & (MAV_CAPS_GOTO|MAV_CAPS_FALL_OFF)))
{
//
// The sphere shouldn't be near the (x-small) bit of this mapsquare.
//
if ((bs->x & 0xffff) < bs->radius)
{
bs->x &= ~0xffff;
bs->x |= bs->radius;
bs->dx = abs(bs->dx) >> 1;
bs->dy = 0;
bs->dz = 0;
}
}
if (!(mo.opt[MAV_DIR_XL] & (MAV_CAPS_GOTO|MAV_CAPS_FALL_OFF)))
{
//
// The sphere shouldn't be near the (x-large) bit of this mapsquare.
//
if ((bs->x & 0xffff) > 0x10000 - bs->radius)
{
bs->x &= ~0xffff;
bs->x |= 0x10000 - bs->radius;
bs->dx = -(abs(bs->dx) >> 1);
bs->dy = 0;
bs->dz = 0;
}
}
if (!(mo.opt[MAV_DIR_ZS] & (MAV_CAPS_GOTO|MAV_CAPS_FALL_OFF)))
{
//
// The sphere shouldn't be near the (z-small) bit of this mapsquare.
//
if ((bs->z & 0xffff) < bs->radius)
{
bs->z &= ~0xffff;
bs->z |= bs->radius;
bs->dx = 0;
bs->dy = 0;
bs->dz = abs(bs->dz) >> 1;
}
}
if (!(mo.opt[MAV_DIR_ZL] & (MAV_CAPS_GOTO|MAV_CAPS_FALL_OFF)))
{
//
// The sphere shouldn't be near the (z-large) bit of this mapsquare.
//
if ((bs->z & 0xffff) > 0x10000 - bs->radius)
{
bs->z &= ~0xffff;
bs->z |= 0x10000 - bs->radius;
bs->dx = 0;
bs->dy = 0;
bs->dz = -(abs(bs->dz) >> 1);
}
}
}
if (abs(bs->dx) + abs(bs->dy) + abs(bs->dz) < 0x200)
{
bs->still += 1;
}
else
{
bs->still = 0;
}
}
//
// Pushes the two spheres together/away so they are the right distance apart.
//
void BARREL_push_apart(
BARREL_Sphere *bs1,
BARREL_Sphere *bs2,
SLONG accelerate_the_spheres_apart_if_they_are_too_close_together)
{
SLONG dx = bs2->x - bs1->x;
SLONG dy = bs2->y - bs1->y;
SLONG dz = bs2->z - bs1->z;
SLONG dist = QDIST3(abs(dx),abs(dy),abs(dz)) + 1;
SLONG ddist = (BARREL_SPHERE_DIST << 8) - dist;
ddist /= 4;
dx = dx * ddist / dist;
dy = dy * ddist / dist;
dz = dz * ddist / dist;
bs1->dx -= dx;
bs1->dy -= dy;
bs1->dz -= dz;
bs2->dx += dx;
bs2->dy += dy;
bs2->dz += dz;
if (ddist > ((BARREL_SPHERE_DIST << 7) / 8))
{
//
// The spheres are dangerously close together.
//
bs1->x -= dx * 2;
bs1->y -= dy * 2;
bs1->z -= dz * 2;
bs2->x += dx * 2;
bs2->y += dy * 2;
bs2->z += dz * 2;
}
if (accelerate_the_spheres_apart_if_they_are_too_close_together)
{
if (ddist > ((BARREL_SPHERE_DIST << 7) / 8))
{
//
// The spheres are dangerously close together.
//
bs1->x -= dx * 2;
bs1->y -= dy * 2;
bs1->z -= dz * 2;
bs2->x += dx * 2;
bs2->y += dy * 2;
bs2->z += dz * 2;
}
}
}
void BARREL_process_normal(Thing *p_barrel)
{
ASSERT(p_barrel->Class == CLASS_BARREL);
GameCoord newpos;
Barrel *bb = p_barrel->Genus.Barrel;
if (bb->flag & BARREL_FLAG_STACKED)
{
if (bb->on)
{
//
// Do nothing unless the barrel we are stacked on has started.
//
Thing *p_stackedon = TO_THING(bb->on);
ASSERT(p_stackedon->Class == CLASS_BARREL);
if (p_stackedon->Genus.Barrel->flag & BARREL_FLAG_STACKED)
{
//
// We are okay.
//
}
else
{
//
// Make the barrel start moving.
//
BARREL_convert_stationary_to_moving(p_barrel);
}
}
return;
}
if (bb->flag & BARREL_FLAG_STILL)
{
//
// No processing required.
//
return;
}
//
// Process each sphere of the barrel.
//
ASSERT(WITHIN(bb->bs + 0, 2, BARREL_MAX_SPHERES - 1));
ASSERT(WITHIN(bb->bs + 1, 2, BARREL_MAX_SPHERES - 1));
BARREL_Sphere *bs1 = &BARREL_sphere[bb->bs + 0];
BARREL_Sphere *bs2 = &BARREL_sphere[bb->bs + 1];
bb->flag &= ~BARREL_FLAG_GROUNDED;
if (bb->flag & BARREL_FLAG_HELD)
{
//
// Dont process the barrel.
//
}
else
{
BARREL_process_sphere(p_barrel, bb, bs1);
BARREL_process_sphere(p_barrel, bb, bs2);
if (bb->flag & BARREL_FLAG_CANS)
{
//
// Should the cans spill out of the barrel?
//
if (bb->flag & BARREL_FLAG_GROUNDED)
{
if (bs1->still &&
bs2->still)
{
//
// Make sure the barrel is not standing up.
//
SLONG dtilt1 = abs(p_barrel->Draw.Mesh->Tilt - 512);
SLONG dtilt2 = abs(p_barrel->Draw.Mesh->Tilt - 1536);
if (MIN(dtilt1, dtilt2) < 224)
{
//
// Standing up...
//
}
else
{
bb->flag &= ~BARREL_FLAG_CANS;
{
SLONG angle;
SLONG cx;
SLONG cz;
angle = p_barrel->Draw.Mesh->Angle;
cx = p_barrel->WorldPos.X >> 8;
cz = p_barrel->WorldPos.Z >> 8;
cx += SIN(angle) >> 11;
cz += COS(angle) >> 11;
DIRT_create_cans(cx, cz, angle);
}
}
}
}
}
}
//
// Make the spheres be the correct distance apart.
//
BARREL_push_apart(
bs1,
bs2,
TRUE);
//
// The barrel is positioned at the average of the two spheres.
//
newpos.X = bs1->x + bs2->x >> 1;
newpos.Y = bs1->y + bs2->y >> 1;
newpos.Z = bs1->z + bs2->z >> 1;
move_thing_on_map(p_barrel, &newpos);
//
// Work out the yaw and pitch of the barrel.
//
SLONG dx = bs2->x - bs1->x;
SLONG dy = bs2->y - bs1->y;
SLONG dz = bs2->z - bs1->z;
SLONG dxdz = QDIST2(abs(dx),abs(dz));
if (dxdz < 0x100)
{
//
// Dont work out the yaw - we dont have enough info.
//
}
else
{
p_barrel->Draw.Mesh->Angle = calc_angle(dx,dz);
}
p_barrel->Draw.Mesh->Tilt = calc_angle(dy, dxdz);
if (bb->type == BARREL_TYPE_BIN || p_barrel->Draw.Mesh->ObjectId == 145)
{
p_barrel->Draw.Mesh->Tilt += 1024;
p_barrel->Draw.Mesh->Tilt &= 2047;
}
if (bb->flag & BARREL_FLAG_GROUNDED)
{
//
// Make the barrels be able to stand up.
//
SLONG dtilt1 = abs(p_barrel->Draw.Mesh->Tilt - 512);
SLONG dtilt2 = abs(p_barrel->Draw.Mesh->Tilt - 1536);
if (MIN(dtilt1, dtilt2) < 224)
{
if ((bb->type == BARREL_TYPE_CONE || bb->type == BARREL_TYPE_BIN) && (bs1->y > bs2->y))
{
//
// Cones and bins only stand up one way...
//
}
else
{
//
// Make the two spheres accelerate towards one another in (dx,dz) only.
//
dx = (bs2->x - bs1->x) / 4;
dz = (bs2->z - bs1->z) / 4;
bs1->x += dx;
bs1->z += dz;
bs2->x -= dx;
bs2->z -= dz;
bs1->dx /=2;
bs1->dz /=2;
bs2->dx /=2;
bs2->dz /=2;
}
}
}
//
// Can we stop processing this barrel?
//
if (bs1->still > 64 &&
bs2->still > 64)
{
//
// Both spheres have been still for a while- we can stop
// processing the barrel.
//
BARREL_convert_moving_to_stationary(p_barrel);
}
#ifndef PSX
#ifndef TARGET_DC
if (ControlFlag&&allow_debug_keys)
{
AENG_world_line(
bs1->x >> 8,
bs1->y >> 8,
bs1->z >> 8,
8,
0x0022ff22,
bs2->x >> 8,
bs2->y >> 8,
bs2->z >> 8,
8,
0x00ff4444,
TRUE);
}
#endif
#endif
}
UWORD BARREL_alloc(
SLONG type,
SLONG prim,
SLONG x,
SLONG z,
SLONG waypoint)
{
SLONG i;
SLONG y;
SLONG on;
SLONG ony;
SLONG dx;
SLONG dz;
SLONG dist;
DrawMesh *dm;
Thing *p_thing;
Thing *p_found;
Barrel *bb;
SLONG num;
//
// We need to get a thing, a barrel and a drawmesh.
//
if (!WITHIN(BARREL_barrel_upto, 0, BARREL_MAX_BARRELS - 1))
{
//
// No more barrels left!
//
return NULL;
}
bb = &BARREL_barrel[BARREL_barrel_upto++];
//
// Now get the thing.
//
p_thing = alloc_thing(CLASS_BARREL);
if(!p_thing)
{
Thing *p_del;
SLONG c0;
SLONG best_dist=0x7fffffff;
for(c0=1;c0<MAX_THINGS;c0++)
{
p_del=TO_THING(c0);
if(p_del->Class==CLASS_BARREL)
{
SLONG dx,dz,dist;
dx=x-(p_del->WorldPos.X>>8);
dz=z-(p_del->WorldPos.Z>>8);
dist=QDIST2(abs(dx),abs(dz));
if(dist<best_dist)
{
p_thing=p_del;
best_dist=dist;
}
}
}
}
if (p_thing)
{
//
// And finally get the drawmesh
//
dm = alloc_draw_mesh();
if (dm)
{
p_thing->Class = CLASS_BARREL;
p_thing->Flags = 0;
p_thing->State = STATE_NORMAL;
p_thing->StateFn = BARREL_process_normal;
p_thing->Genus.Barrel = bb;
p_thing->DrawType = DT_MESH;
p_thing->Draw.Mesh = dm;
p_thing->Velocity = waypoint;
dm->Angle = Random() & 2047;
dm->Tilt = 512;
dm->Roll = 0; // Wierd huh?!
dm->ObjectId = prim;
dm->Hm = 0;
dm->Cache = 0;
if (type == BARREL_TYPE_BIN || prim == 145)
{
dm->Tilt = 1536;
}
bb->type = type;
bb->flag = BARREL_FLAG_STACKED;
bb->on = NULL; // NULL => stacked on the ground
bb->bs = NULL;
if (bb->type == BARREL_TYPE_BIN)
{
bb->flag |= BARREL_FLAG_RUBBISH | BARREL_FLAG_CANS; // Bins contain rubbish.
}
//
// Look for a barrel we should be stacked up on top of.
//
num = THING_find_box(
x - BARREL_STACK_RADIUS,
z - BARREL_STACK_RADIUS,
x + BARREL_STACK_RADIUS,
z + BARREL_STACK_RADIUS,
found_barrel,
BARREL_MAX_FIND,
1 << CLASS_BARREL);
y = PAP_calc_map_height_at(x,z) + (BARREL_HEIGHT / 2);
on = NULL;
if (type == BARREL_TYPE_BURNING) {
Pyro* pyro;
pyro=PYRO_create(p_thing->WorldPos,PYRO_IMMOLATE)->Genus.Pyro;
if(pyro)
{
pyro->victim=p_thing;
pyro->Flags=PYRO_FLAGS_FLICKER; // immolate faces
}
}
if (type == BARREL_TYPE_CONE)
{
//
// Cones don't stack- and they aren't as high off the ground...
//
y -= 15;
}
else
{
for (i = 0; i < num; i++)
{
p_found = TO_THING(found_barrel[i]);
dx = abs((p_found->WorldPos.X >> 8) - x);
dz = abs((p_found->WorldPos.Z >> 8) - z);
dist = QDIST2(dx,dz);
if (dist < BARREL_STACK_RADIUS)
{
ony = (p_found->WorldPos.Y >> 8) + BARREL_HEIGHT;
if (ony > y)
{
y = ony;
on = found_barrel[i];
}
}
}
}
bb->on = on;
p_thing->WorldPos.X = x << 8;
p_thing->WorldPos.Y = y << 8;
p_thing->WorldPos.Z = z << 8;
p_thing->WorldPos.X += (Random() & 0x3ff) - 0x1ff;
p_thing->WorldPos.Y += (Random() & 0x3ff) - 0x1ff;
p_thing->WorldPos.Z += (Random() & 0x3ff) - 0x1ff;
add_thing_to_map(p_thing);
return THING_NUMBER(p_thing);
}
else
{
//
// Free up the thing and the barrel.
//
BARREL_barrel_upto -= 1;
free_thing(p_thing);
return NULL;
}
}
else
{
//
// Free up the barrel.
//
BARREL_barrel_upto -= 1;
return NULL;
}
}
/*
void BARREL_position_on_hands(Thing *p_barrel, Thing *p_person)
{
ASSERT(p_barrel->Class == CLASS_BARREL);
ASSERT(p_person->Class == CLASS_PERSON);
SLONG lhx;
SLONG lhy;
SLONG lhz;
SLONG rhx;
SLONG rhy;
SLONG rhz;
SLONG lrx;
SLONG lry;
SLONG lrz;
SLONG rrx;
SLONG rry;
SLONG rrz;
if (p_barrel->Genus.Barrel->flag & (BARREL_FLAG_STACKED | BARREL_FLAG_STILL))
{
//
// Convert this barrel to a moving one.
//
BARREL_convert_stationary_to_moving(p_barrel);
}
//
// Work out the positions of the two hands.
//
calc_sub_objects_position(
p_person,
p_person->Draw.Tweened->AnimTween,
SUB_OBJECT_LEFT_HAND,
&lhx,
&lhy,
&lhz);
lhx <<= 8;
lhy <<= 8;
lhz <<= 8;
lhx += p_person->WorldPos.X;
lhy += p_person->WorldPos.Y;
lhz += p_person->WorldPos.Z;
calc_sub_objects_position(
p_person,
p_person->Draw.Tweened->AnimTween,
SUB_OBJECT_RIGHT_HAND,
&rhx,
&rhy,
&rhz);
rhx <<= 8;
rhy <<= 8;
rhz <<= 8;
rhx += p_person->WorldPos.X;
rhy += p_person->WorldPos.Y;
rhz += p_person->WorldPos.Z;
//
// Work out the position of the elbows.
//
calc_sub_objects_position(
p_person,
p_person->Draw.Tweened->AnimTween,
SUB_OBJECT_LEFT_RADIUS,
&lrx,
&lry,
&lrz);
lrx <<= 8;
lry <<= 8;
lrz <<= 8;
lrx += p_person->WorldPos.X;
lry += p_person->WorldPos.Y;
lrz += p_person->WorldPos.Z;
calc_sub_objects_position(
p_person,
p_person->Draw.Tweened->AnimTween,
SUB_OBJECT_RIGHT_RADIUS,
&rrx,
&rry,
&rrz);
rrx <<= 8;
rry <<= 8;
rrz <<= 8;
rrx += p_person->WorldPos.X;
rry += p_person->WorldPos.Y;
rrz += p_person->WorldPos.Z;
//
// Position the barrel spheres beyond the two hands.
//
ASSERT(WITHIN(p_barrel->Genus.Barrel->bs + 0, 2, BARREL_MAX_SPHERES - 1));
ASSERT(WITHIN(p_barrel->Genus.Barrel->bs + 1, 2, BARREL_MAX_SPHERES - 1));
BARREL_Sphere *bs1 = &BARREL_sphere[p_barrel->Genus.Barrel->bs + 0];
BARREL_Sphere *bs2 = &BARREL_sphere[p_barrel->Genus.Barrel->bs + 1];
//
// Rememober the old position of the barrel spheres so we can work out their velocity.
//
SLONG old_bs1_x = bs1->x;
SLONG old_bs1_y = bs1->y;
SLONG old_bs1_z = bs1->z;
SLONG old_bs2_x = bs2->x;
SLONG old_bs2_y = bs2->y;
SLONG old_bs2_z = bs2->z;
//
// The new position of the barrels.
//
bs1->x = lhx + (lhx - lrx << 1);
bs1->y = lhy + (lhy - lry << 1);
bs1->z = lhz + (lhz - lrz << 1);
bs2->x = rhx + (rhx - rrx << 1);
bs2->y = rhy + (rhy - rry << 1);
bs2->z = rhz + (rhz - rrz << 1);
BARREL_push_apart(
bs1,
bs2,
FALSE);
//
// The velocity of the barrels.
//
bs1->dx = bs1->x - old_bs1_x;
bs1->dy = bs1->y - old_bs1_y;
bs1->dz = bs1->z - old_bs1_z;
bs2->dx = bs2->x - old_bs2_x;
bs2->dy = bs2->y - old_bs2_y;
bs2->dz = bs2->z - old_bs2_z;
//
// Remember that this barrel is being held.
//
p_barrel->Genus.Barrel->flag |= BARREL_FLAG_HELD;
}
void BARREL_throw(Thing *p_barrel)
{
//
// Just mark the barrel as not being held and that will start
// it processing again.
//
p_barrel->Genus.Barrel->flag &= ~BARREL_FLAG_HELD;
//
// Make the barrel go in the air a bit more.
//
ASSERT(WITHIN(p_barrel->Genus.Barrel->bs + 0, 2, BARREL_MAX_SPHERES - 1));
ASSERT(WITHIN(p_barrel->Genus.Barrel->bs + 1, 2, BARREL_MAX_SPHERES - 1));
BARREL_Sphere *bs1 = &BARREL_sphere[p_barrel->Genus.Barrel->bs + 0];
BARREL_Sphere *bs2 = &BARREL_sphere[p_barrel->Genus.Barrel->bs + 1];
bs1->dy += 0x1000;
bs2->dy += 0x1000;
}
*/
#ifndef PSX
GameCoord BARREL_fire_pos(Thing *p_barrel)
{
SLONG sx;
SLONG sy;
SLONG sz;
SLONG sradius;
BARREL_Sphere *bs;
GameCoord ans;
ASSERT(p_barrel->Class == CLASS_BARREL);
if (p_barrel->Genus.Barrel->flag & (BARREL_FLAG_STILL|BARREL_FLAG_STACKED))
{
BARREL_stacked_sphere(
p_barrel,
0,
&sx,
&sy,
&sz,
&sradius);
ans.X = sx;
ans.Y = sy;
ans.Z = sz;
}
else
{
ASSERT(WITHIN(p_barrel->Genus.Barrel->bs, 2, BARREL_MAX_SPHERES - 1));
bs = &BARREL_sphere[p_barrel->Genus.Barrel->bs];
ans.X = bs->x;
ans.Y = bs->y;
ans.Z = bs->z;
}
return ans;
}
#endif
void BARREL_dissapear(Thing *p_barrel)
{
//
// If any barrel is stacked on this one...
//
UWORD found[8];
SLONG num_found;
num_found = THING_find_sphere(
p_barrel->WorldPos.X >> 8,
p_barrel->WorldPos.Y >> 8,
p_barrel->WorldPos.Z >> 8,
0x200,
found,
8,
1 << CLASS_BARREL);
SLONG i;
Thing *p_found;
for (i = 0; i < num_found; i++)
{
p_found = TO_THING(found[i]);
if (p_found == p_barrel)
{
continue;
}
if (p_found->Genus.Barrel->flag & BARREL_FLAG_STACKED)
{
if (p_found->Genus.Barrel->on == THING_NUMBER(p_barrel))
{
//
// This barrel was stacked on the one that has dissapeared!
//
BARREL_convert_stationary_to_moving(p_found);
}
}
}
//
// Tell the waypoint that created the barrel that there is
// no longer a barrel.
//
if (p_barrel->Velocity)
{
ASSERT(WITHIN(p_barrel->Velocity, 1, EWAY_way_upto - 1));
ASSERT(EWAY_way[p_barrel->Velocity].ed.type == EWAY_DO_CREATE_BARREL);
EWAY_way[p_barrel->Velocity].ed.arg1 = NULL;
}
if (p_barrel->Genus.Barrel->flag & (BARREL_FLAG_STACKED | BARREL_FLAG_STILL))
{
//
// The barrel doesn't have any sphere structures.
//
}
else
{
//
// Give up the barrel's spheres.
//
BARREL_convert_moving_to_stationary(p_barrel);
}
remove_thing_from_map(p_barrel);
free_draw_mesh(p_barrel->Draw.Mesh);
free_thing(p_barrel);
if (NET_PERSON(0)->Genus.Person->Target == THING_NUMBER(p_barrel))
{
//
// Make the player choose a new target.
//
NET_PERSON(0)->Genus.Person->Target = NULL;
}
}
void BARREL_shoot(
Thing *p_barrel,
Thing *p_shooter)
{
ULONG in_the_air;
SWORD wave;
if ( (p_barrel->Genus.Barrel->flag & BARREL_FLAG_STILL) &&
!(p_barrel->Genus.Barrel->flag & BARREL_FLAG_STACKED))
{
in_the_air = FALSE;
}
else
{
in_the_air = TRUE;
}
GameCoord barrelpos = p_barrel->WorldPos;
wave=S_EXPLODE_SMALL;
if (!(Random()&3)) wave++; // 25% chance of a bigger bang than usual
MFX_play_xyz(THING_NUMBER(p_barrel),wave,0,barrelpos.X,barrelpos.Y,barrelpos.Z);
if (p_barrel->Genus.Barrel->type != BARREL_TYPE_CONE &&
p_barrel->Genus.Barrel->type != BARREL_TYPE_BIN)
{
if (p_shooter->Genus.Person->Target == THING_NUMBER(p_barrel))
{
//
// Stop this person targetting the barrel... because the
// barrel is going!
//
p_shooter->Genus.Person->Target = NULL;
}
BARREL_dissapear(p_barrel);
/*
if (in_the_air)
{
SLONG i;
GameCoord pos = barrelpos;
PARTICLE_Add(pos.X,pos.Y,pos.Z, 0, 0, 0, POLY_PAGE_SMOKECLOUD, 2, 0xFFFFFFFF, PFLAG_SPRITEANI|PFLAG_SPRITELOOP|PFLAG_FADE|PFLAG_RESIZE, 100, 440+(Random()&0x7f), 1, 3, 50);
PARTICLE_Add(pos.X,pos.Y,pos.Z, 0, 0, 0, POLY_PAGE_EXPLODE1-(Random()&1), 2, 0xFFFFFF, PFLAG_SPRITEANI|PFLAG_RESIZE, 20, 120+(Random()&0x7f), 1, 0, 30);
PARTICLE_Add(pos.X,pos.Y,pos.Z, 0, 0, 0, POLY_PAGE_EXPLODE1-(Random()&1), 2, 0x7fFFFFFF, PFLAG_SPRITEANI|PFLAG_RESIZE, 20, 120+(Random()&0x7f), 1, 0, 40);
for (i=0;i<25;i++) {
PARTICLE_Add(pos.X,pos.Y,pos.Z, ((Random()&0x1f)-0xf)<<6, (Random()&0x1f)<<6, ((Random()&0x1f)-0xf)<<6, POLY_PAGE_EXPLODE1-(Random()&1), 2+((Random()&3)<<2), 0xFFFFFF, PFLAG_GRAVITY|PFLAG_RESIZE2|PFLAG_FADE|PFLAG_INVALPHA, 240, 20+(Random()&0x1f), 1, 3+(Random()&3), 0);
if (Random()&3)
PARTICLE_Add(pos.X,pos.Y,pos.Z, ((Random()&0x1f)-0xf)<<8, (Random()&0x1f)<<8, ((Random()&0x1f)-0xf)<<8, POLY_PAGE_EXPLODE1-(Random()&1), 2+((Random()&3)<<2), 0xFFFFFF, PFLAG_GRAVITY|PFLAG_RESIZE2|PFLAG_FADE|PFLAG_INVALPHA|PFLAG_BOUNCE, 240, 20+(Random()&0x1f), 1, 2+(Random()&3), 0);
else
PARTICLE_Add(pos.X,pos.Y,pos.Z, ((Random()&0x1f)-0xf)<<12, (Random()&0x1f)<<8, ((Random()&0x1f)-0xf)<<12, POLY_PAGE_EXPLODE1-(Random()&1), 2+((Random()&3)<<2), 0xFFFFFF, PFLAG_GRAVITY|PFLAG_RESIZE2|PFLAG_FADE|PFLAG_INVALPHA, 240, 20+(Random()&0x1f), 1, 3, 0);
}
}
else
{
PYRO_construct(
barrelpos,
-1,
256);
}
*/
#ifdef PSX
POW_create(
(in_the_air) ? POW_CREATE_LARGE : POW_CREATE_LARGE_SEMI,
barrelpos.X,
barrelpos.Y,
barrelpos.Z,
0,0,0);
#else
PYRO_create(barrelpos,PYRO_FIREBOMB);
#endif
PCOM_oscillate_tympanum(
PCOM_SOUND_BANG,
p_shooter,
barrelpos.X >> 8,
barrelpos.Y >> 8,
barrelpos.Z >> 8);
create_shockwave(
barrelpos.X >> 8,
barrelpos.Y >> 8,
barrelpos.Z >> 8,
0x200,
250,
p_shooter);
BARREL_hit_with_sphere(
barrelpos.X >> 8,
barrelpos.Y - 0x2000 >> 8,
barrelpos.Z >> 8,
0x60);
}
else
{
BARREL_hit_with_sphere(
barrelpos.X + (Random() & 0x1fff) - 0x0fff >> 8,
barrelpos.Y - 0x1400 >> 8,
barrelpos.Z + (Random() & 0x1fff) - 0x0fff >> 8,
0x15);
}
}
SLONG BARREL_is_stacked(Thing *p_barrel)
{
return p_barrel->Genus.Barrel->flag & BARREL_FLAG_STACKED;
}
|
<reponame>dineshmm23/food_around<filename>app/src/main/java/com/opalfire/foodorder/activities/ResetPasswordActivity.java
package com.opalfire.foodorder.activities;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.text.method.PasswordTransformationMethod;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.opalfire.foodorder.HomeActivity;
import com.opalfire.foodorder.R;
import com.opalfire.foodorder.build.api.ApiClient;
import com.opalfire.foodorder.build.api.ApiInterface;
import com.opalfire.foodorder.helper.CustomDialog;
import com.opalfire.foodorder.helper.GlobalData;
import com.opalfire.foodorder.models.ResetPassword;
import com.opalfire.foodorder.utils.TextUtils;
import org.json.JSONObject;
import java.util.HashMap;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper;
public class ResetPasswordActivity extends AppCompatActivity {
ApiInterface apiInterface = ((ApiInterface) ApiClient.getRetrofit().create(ApiInterface.class));
@BindView(2131296406)
Button changeBtn;
@BindView(2131296433)
EditText confirmPassword;
@BindView(2131296434)
ImageView confirmPasswordEyeImg;
Context context;
CustomDialog customDialog;
@BindView(2131296621)
LinearLayout linearLayout;
@BindView(2131296665)
EditText newPassword;
@BindView(2131296719)
ImageView passwordEyeImg;
@BindView(2131296845)
LinearLayout siginLayout;
@BindView(2131296847)
TextView signInHere;
String strConfirmPassword;
String strNewPassword;
protected void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView((int) R.layout.activity_reset_password);
ButterKnife.bind((Activity) this);
this.context = this;
this.customDialog = new CustomDialog(this.context);
this.passwordEyeImg.setTag(Integer.valueOf(1));
this.confirmPasswordEyeImg.setTag(Integer.valueOf(1));
}
protected void attachBaseContext(Context context) {
super.attachBaseContext(CalligraphyContextWrapper.wrap(context));
}
public void onBackPressed() {
super.onBackPressed();
finish();
overridePendingTransition(R.anim.anim_nothing, R.anim.slide_out_right);
}
@OnClick({2131296406, 2131296847, 2131296719, 2131296434})
public void onViewClicked(View view) {
view = view.getId();
if (view == R.id.change_btn) {
initValues();
} else if (view != R.id.confirm_password_eye_img) {
if (view == R.id.password_eye_img) {
if (this.passwordEyeImg.getTag().equals(Integer.valueOf(1)) != null) {
this.newPassword.setTransformationMethod(null);
this.passwordEyeImg.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.ic_eye_close));
this.passwordEyeImg.setTag(Integer.valueOf(0));
return;
}
this.passwordEyeImg.setTag(Integer.valueOf(1));
this.newPassword.setTransformationMethod(new PasswordTransformationMethod());
this.passwordEyeImg.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.ic_eye_open));
}
} else if (this.confirmPasswordEyeImg.getTag().equals(Integer.valueOf(1)) != null) {
this.confirmPassword.setTransformationMethod(null);
this.confirmPasswordEyeImg.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.ic_eye_close));
this.confirmPasswordEyeImg.setTag(Integer.valueOf(0));
} else {
this.confirmPasswordEyeImg.setTag(Integer.valueOf(1));
this.confirmPassword.setTransformationMethod(new PasswordTransformationMethod());
this.confirmPasswordEyeImg.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.ic_eye_open));
}
}
private void initValues() {
this.strConfirmPassword = this.confirmPassword.getText().toString();
this.strNewPassword = this.newPassword.getText().toString();
if (TextUtils.isEmpty(this.strNewPassword)) {
Toast.makeText(this, "Please enter password", 0).show();
} else if (TextUtils.isEmpty(this.strConfirmPassword)) {
Toast.makeText(this, "Please confirm password", 0).show();
} else if (this.strConfirmPassword.equalsIgnoreCase(this.strNewPassword)) {
HashMap hashMap = new HashMap();
hashMap.put("id", String.valueOf(GlobalData.profileModel.getId()));
hashMap.put("password", <PASSWORD>);
hashMap.put("password_confirmation", this.strConfirmPassword);
resetPassword(hashMap);
} else {
Toast.makeText(this, "Password and confirm password doesn't match", 0).show();
}
}
private void resetPassword(HashMap<String, String> hashMap) {
this.customDialog.show();
this.apiInterface.resetPassword(hashMap).enqueue(new C13431());
}
/* renamed from: com.entriver.foodorder.activities.ResetPasswordActivity$1 */
class C13431 implements Callback<ResetPassword> {
C13431() {
}
public void onFailure(@NonNull Call<ResetPassword> call, @NonNull Throwable th) {
}
public void onResponse(@NonNull Call<ResetPassword> call, @NonNull Response<ResetPassword> response) {
ResetPasswordActivity.this.customDialog.dismiss();
if (response.isSuccessful() != null) {
Toast.makeText(ResetPasswordActivity.this, ((ResetPassword) response.body()).getMessage(), 0).show();
ResetPasswordActivity.this.startActivity(new Intent(ResetPasswordActivity.this.context, HomeActivity.class));
ResetPasswordActivity.this.overridePendingTransition(R.anim.slide_in_right, R.anim.anim_nothing);
return;
}
try {
Toast.makeText(ResetPasswordActivity.this.context, new JSONObject(response.errorBody().string()).optString("error"), 1).show();
} catch (Response<ResetPassword> response2) {
Toast.makeText(ResetPasswordActivity.this.context, response2.getMessage(), 1).show();
}
}
}
}
|
#include <Veritas/Math/Complex.h>
using namespace Veritas::Math;
Complex::Complex() {}
Complex::Complex(float32 a, float32 b) : a(a), b(b) {}
Complex Complex::operator+(const Complex& c) const { return Complex(a+c.a, b+c.b); }
Complex Complex::operator-(const Complex& c) const { return Complex(a-c.a, b-c.b); }
Complex Complex::operator*(const Complex& c) const { return Complex(a*c.a - b*c.b, a*c.b + b*c.a); }
Complex Complex::operator/(const Complex& c) const { float32 d = c.a*c.a + c.b*c.b; return Complex((a*c.a + b*c.b)/d, (b*c.a - a*c.b)/d); }
Complex Complex::operator+(const float32 s) const { return Complex(a + s, b); }
Complex Complex::operator-(const float32 s) const { return Complex(a - s, b); }
Complex Complex::operator*(const float32 s) const { return Complex(a * s, b * s); }
Complex Complex::operator/(const float32 s) const { return Complex(a / s, b / b); }
void Complex::operator+=(const Complex& c) { a += c.a; b += c.b; }
void Complex::operator-=(const Complex& c) { a -= c.a; b -= c.b; }
void Complex::operator*=(const Complex& c) { float32 oa = a; a = a*c.a - b*c.b; b = oa*c.b + b*c.a; }
void Complex::operator/=(const Complex& c) { float32 d = c.a*c.a + c.b*c.b; float32 oa = a; a = (a*c.a + b*c.b)/d; b = (b*c.a - oa*c.b)/d; }
void Complex::operator+=(const float32 s) { a += s; }
void Complex::operator-=(const float32 s) { a -= s; }
void Complex::operator*=(const float32 s) { a *= s; b *= s; }
void Complex::operator/=(const float32 s) { a /= s; b /= s; }
Complex Complex::operator-() const { return Complex(-a, -b); }
void Complex::conjugate() { b = -b; }
Complex operator+(const float32 s, const Complex& c) { return Complex(s + c.a, c.b); }
Complex operator-(const float32 s, const Complex& c) { return Complex(s - c.a, -c.b); }
Complex operator*(const float32 s, const Complex& c) { return Complex(s * c.a, s * c.b); }
Complex operator/(const float32 s, const Complex& c) { return Complex(s, 0.0f)/Complex(c.a, c.b); }
|
<gh_stars>0
/**
* This namespace contains plugins which extend PIXI Object.
* @namespace Yogame.plugins
*/
export {default as layer} from "./layer"; |
<reponame>MondayMorningHaskell/HaskellData
class Person(object):
# This definition hasn't changed from part 1!
def __init__(self, fn, ln, em, age, occ):
self.firstName = fn
self.lastName = ln
self.email = em
self.age = age
self.occupation = occ
class Occupation(object):
def __init__(self, name, location):
self.name = name
self.location = location
if __name__ == "__main__":
stringOcc = "Lawyer"
person1 = Person(
"Michael",
"Smith",
"<EMAIL>",
27,
stringOcc)
classOcc = Occupation("Software Engineer", "San Francisco")
# Still works!
person2 = Person(
"Katie",
"Johnson",
"<EMAIL>",
26,
classOcc)
people = [person1, person2]
for p in people:
# This works. Both types of occupations are printable.
print(p.occupation)
# This won't work. Our "Occupation" class
# doesn't work with "len"
# print(len(p.occupation))
|
# frozen_string_literal: true
module Rubanok
VERSION = "0.2.1"
end
|
package com.clj.blesample.net;
public class UrlUtils {
public static final String APIHTTP = "http://znshop.swzzkf.cn";
public static final String index_noconnected = APIHTTP + "/HardWare/Temperature/index_noconnected";//用户进入温度仪首页获取用户的信息(不链接蓝牙设备)接口
}
|
<reponame>leongaban/redux-saga-exchange
import { initialCommunicationField } from 'shared/helpers/redux';
import * as NS from '../../namespace';
export const initial: NS.IReduxState = {
communication: {
loadFilteredOrders: initialCommunicationField,
},
edit: {
reportArchiveTotalPages: 1,
},
data: {
active: [],
archive: [],
reportArchive: [],
},
};
|
#!/bin/bash
# Copyright 2017 David Snyder
# 2017 Johns Hopkins University (Author: Daniel Garcia-Romero)
# 2017 Johns Hopkins University (Author: Daniel Povey)
#
# Copied from egs/sre16/v1/local/nnet3/xvector/tuning/run_xvector_1a.sh (commit e082c17d4a8f8a791428ae4d9f7ceb776aef3f0b).
#
# Apache 2.0.
# This script trains a DNN similar to the recipe described in
# http://www.danielpovey.com/files/2018_icassp_xvectors.pdf
. ./cmd.sh
set -e
stage=1
train_stage=0
use_gpu=true
remove_egs=false
data=data/train
nnet_dir="/media/sangjik/hdd2/speaker_verification/kaldi/xvector_nnet_1a"
egs_dir="${nnet_dir}/egs"
. ./path.sh
. ./cmd.sh
. ./utils/parse_options.sh
num_pdfs=$(awk '{print $2}' $data/utt2spk | sort | uniq -c | wc -l)
# Now we create the nnet examples using sid/nnet3/xvector/get_egs.sh.
# The argument --num-repeats is related to the number of times a speaker
# repeats per archive. If it seems like you're getting too many archives
# (e.g., more than 200) try increasing the --frames-per-iter option. The
# arguments --min-frames-per-chunk and --max-frames-per-chunk specify the
# minimum and maximum length (in terms of number of frames) of the features
# in the examples.
#
# To make sense of the egs script, it may be necessary to put an "exit 1"
# command immediately after stage 3. Then, inspect
# exp/<your-dir>/egs/temp/ranges.* . The ranges files specify the examples that
# will be created, and which archives they will be stored in. Each line of
# ranges.* has the following form:
# <utt-id> <local-ark-indx> <global-ark-indx> <start-frame> <end-frame> <spk-id>
# For example:
# 100304-f-sre2006-kacg-A 1 2 4079 881 23
# If you're satisfied with the number of archives (e.g., 50-150 archives is
# reasonable) and with the number of examples per speaker (e.g., 1000-5000
# is reasonable) then you can let the script continue to the later stages.
# Otherwise, try increasing or decreasing the --num-repeats option. You might
# need to fiddle with --frames-per-iter. Increasing this value decreases the
# the number of archives and increases the number of examples per archive.
# Decreasing this value increases the number of archives, while decreasing the
# number of examples per archive.
if [ $stage -le 6 ]; then
echo "stage=6"
echo "$0: Getting neural network training egs";
# dump egs.
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $egs_dir/storage ]; then
utils/create_split_dir.pl \
/media/sangjik/hdd2/kaldi_voxceleb/b{03,04,05,06}/$USER/kaldi-data/egs/voxceleb2/v2/xvector-$(date +'%m_%d_%H_%M')/$egs_dir/storage $egs_dir/storage
fi
sid/nnet3/xvector/get_egs.sh --cmd "$train_cmd" \
--nj 1 \
--stage 0 \
--frames-per-iter 1000000000 \
--frames-per-iter-diagnostic 100000 \
--min-frames-per-chunk 200 \
--max-frames-per-chunk 400 \
--num-diagnostic-archives 3 \
--num-repeats 50 \
"$data" $egs_dir
fi
if [ $stage -le 7 ]; then
echo "stage=7"
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(wc -w $egs_dir/pdf2num | awk '{print $1}')
feat_dim=$(cat $egs_dir/info/feat_dim)
# This chunk-size corresponds to the maximum number of frames the
# stats layer is able to pool over. In this script, it corresponds
# to 100 seconds. If the input recording is greater than 100 seconds,
# we will compute multiple xvectors from the same recording and average
# to produce the final xvector.
max_chunk_size=10000
# The smallest number of frames we're comfortable computing an xvector from.
# Note that the hard minimum is given by the left and right context of the
# frame-level layers.
min_chunk_size=25
mkdir -p $nnet_dir/configs
cat <<EOF > $nnet_dir/configs/network.xconfig
# please note that it is important to have input layer with the name=input
# The frame-level layers
input dim=${feat_dim} name=input
relu-batchnorm-layer name=tdnn1 input=Append(-2,-1,0,1,2) dim=512
relu-batchnorm-layer name=tdnn2 input=Append(-2,0,2) dim=512
relu-batchnorm-layer name=tdnn3 input=Append(-3,0,3) dim=512
relu-batchnorm-layer name=tdnn4 dim=512
relu-batchnorm-layer name=tdnn5 dim=1500
# The stats pooling layer. Layers after this are segment-level.
# In the config below, the first and last argument (0, and ${max_chunk_size})
# means that we pool over an input segment starting at frame 0
# and ending at frame ${max_chunk_size} or earlier. The other arguments (1:1)
# mean that no subsampling is performed.
stats-layer name=stats config=mean+stddev(0:1:1:${max_chunk_size})
# This is where we usually extract the embedding (aka xvector) from.
relu-batchnorm-layer name=tdnn6 dim=512 input=stats
# This is where another layer the embedding could be extracted
# from, but usually the previous one works better.
relu-batchnorm-layer name=tdnn7 dim=512
output-layer name=output include-log-softmax=true dim=${num_targets}
EOF
steps/nnet3/xconfig_to_configs.py \
--xconfig-file $nnet_dir/configs/network.xconfig \
--config-dir $nnet_dir/configs/
cp $nnet_dir/configs/final.config $nnet_dir/nnet.config
# These three files will be used by sid/nnet3/xvector/extract_xvectors.sh
echo "output-node name=output input=tdnn6.affine" > $nnet_dir/extract.config
echo "$max_chunk_size" > $nnet_dir/max_chunk_size
echo "$min_chunk_size" > $nnet_dir/min_chunk_size
fi
dropout_schedule='0,0@0.20,0.1@0.50,0'
srand=123
if [ $stage -le 8 ]; then
echo "stage=8"
steps/nnet3/train_raw_dnn.py --stage=$train_stage \
--cmd="$train_cmd" \
--trainer.optimization.proportional-shrink 10 \
--trainer.optimization.momentum=0.5 \
--trainer.optimization.num-jobs-initial=3 \
--trainer.optimization.num-jobs-final=1 \
--trainer.optimization.initial-effective-lrate=0.001 \
--trainer.optimization.final-effective-lrate=0.0001 \
--trainer.optimization.minibatch-size=64 \
--trainer.srand=$srand \
--trainer.max-param-change=2 \
--trainer.num-epochs=3 \
--trainer.dropout-schedule="$dropout_schedule" \
--trainer.shuffle-buffer-size=1000 \
--egs.frames-per-eg=1 \
--egs.dir="$egs_dir" \
--cleanup.remove-egs $remove_egs \
--cleanup.preserve-model-interval=10 \
--use-gpu=wait \
--dir=$nnet_dir || exit 1;
fi
exit 0;
|
<gh_stars>1-10
import * as view from './view'
import Controller from './controller'
const { document, $ } = view
$('select').each(function () {
const $options = $(this).find('option')
$options
.eq(Math.floor(Math.random() * $options.length))
.prop('selected', true)
})
function refill () {
view.$size.val(view.$algorithm.find(':selected').data('size'))
controller.fill()
}
const controller = new Controller()
view.$toggle.click(() => controller.toggle())
view.$step.click(() => controller.step())
view.$shuffle.click(() => controller.shuffle())
view.$reset.click(() => controller.reset())
$(document).change((event) => {
controller.update($(event.target))
if (event.target === view.$algorithm.get(0)) {
refill()
}
})
view.$size.blur(() => controller.fill())
refill()
|
<reponame>AITT-VN/xcontroller_arduino_lib
/*
LineArray.h
*/
#ifndef LINEARRAY_h
#define LINEARRAY_h
#include "Arduino.h"
#include "PCF8574.h"
#include "Wire.h"
#define LINE_1 0
#define LINE_2 1
#define LINE_3 2
#define LINE_4 3
class LineArray
{
public:
LineArray(int sda, int scl);
int* read();
int read(uint8_t index);
private:
PCF8574 _pcf8574 = PCF8574(0x23, &Wire1);
};
#endif |
<reponame>Crowntium/crowntium
// Aleth: Ethereum C++ client, tools and libraries.
// Copyright 2015-2019 Aleth Authors.
// Licensed under the GNU General Public License, Version 3.
#pragma once
#include "TestFace.h"
namespace dev
{
namespace eth
{
class Client;
}
namespace rpc
{
class Test: public TestFace
{
public:
Test(eth::Client& _eth);
virtual RPCModules implementedModules() const override
{
return RPCModules{RPCModule{"test", "1.0"}};
}
virtual std::string test_getLogHash(std::string const& _param1) override;
virtual std::string test_importRawBlock(std::string const& _blockRLP) override;
virtual bool test_setChainParams(const Json::Value& _param1) override;
virtual bool test_mineBlocks(int _number) override;
virtual bool test_modifyTimestamp(int _timestamp) override;
virtual bool test_rewindToBlock(int _number) override;
private:
eth::Client& m_eth;
};
}
}
|
#!/usr/bin/env bash
##############################
# Rare CNV Map Project #
##############################
# Copyright (c) 2017 Ryan L. Collins
# Distributed under terms of the MIT License (see LICENSE)
# Contact: Ryan L. Collins <rlcollins@g.harvard.edu>
# Code development credits availble on GitHub
#Code to gather all data required for all plots used in figure for specific/individual noncoding elements
#####Set parameters
export WRKDIR=/data/talkowski/Samples/rCNVmap
source ${WRKDIR}/bin/rCNVmap/misc/rCNV_code_parameters.sh
#####Reinitialize directory if exists
if ! [ -e ${WRKDIR}/data/plot_data/IndividualNoncodingElementsFigure ]; then
mkdir ${WRKDIR}/data/plot_data/IndividualNoncodingElementsFigure
fi
####Get list of elements ±500kb or E-P connected to (but not overlapping) high-conf disease genes
####NDD
#Get list of genes
cat ${WRKDIR}/data/master_annotations/genelists/extTADA*.genes.list \
${WRKDIR}/data/master_annotations/genelists/DDD_2017.genes.list | sort | uniq | \
sed -e 's/\-/_/g' -e 's/\./_/g' > ${TMPDIR}/NDD_union.genes.list
#Get 750 flanks and any relevant E-P connections
flank=750000
cat \
<( fgrep -wf ${TMPDIR}/NDD_union.genes.list \
<( sed -e 's/\-/_/g' -e 's/\./_/g' \
${WRKDIR}/data/master_annotations/gencode/gencode.v19.gene_boundaries.all.bed ) | \
awk -v OFS="\t" -v flank=${flank} '{ print $1, $2-flank, $2, $4"\n"$1, $3, $3+flank, $4 }' | \
awk -v OFS="\t" '{ if ($2<1) $2=1; print $1, $2, $3, $4, "flank" }' ) \
<( fgrep -wf ${TMPDIR}/NDD_union.genes.list \
<( sed -e 's/\-/_/g' -e 's/\./_/g' \
${WRKDIR}/data/misc/pcHiC_contacts/pcHiC_contacts.formatted_wGenes.min_4.unique_EP_pairs.bed ) | \
awk -v OFS="\t" '{ print $1, $2, $3, $4, "EP" }' ) | \
sort -Vk1,1 -k2,2n -k3,3n -Vk4,4 > \
${TMPDIR}/NDD_union.loci.bed
#Intersect versus significant NDD elements
VF=E4
while read pheno; do
for CNV in DEL DUP; do
awk -v OFS="\t" -v pheno=${pheno} -v CNV=${CNV} '{ print $1, $2, $3, $4, pheno"_"CNV }' \
${WRKDIR}/analysis/perAnno_burden/signif_elements/all_merged/final_loci/${pheno}/${pheno}_${CNV}_${VF}.final_merged_loci.all_classes.bed
done
done < <( fgrep -v "#" ${WRKDIR}/bin/rCNVmap/misc/analysis_group_HPO_mappings.list | \
awk '{ if ($NF>=10000 && $2=="GERM") print $1 }' ) | \
sort -Vk1,1 -k2,2n -k3,3n -Vk4,4 | \
bedtools merge -c 4 -o distinct -i - | bedtools intersect -wa -wb -a - \
-b ${TMPDIR}/NDD_union.loci.bed | bedtools intersect -v -wa -a - \
-b <( fgrep -wf ${TMPDIR}/NDD_union.genes.list \
<( sed -e 's/\-/_/g' -e 's/\./_/g' \
${WRKDIR}/data/master_annotations/gencode/gencode.v19.gene_boundaries.all.bed ) ) > \
${TMPDIR}/NDD_loci_signif.bed
#####CNCR
#Get list of genes
cat ${WRKDIR}/data/master_annotations/genelists/COSMIC*.genes.list | sort | uniq | \
sed -e 's/\-/_/g' -e 's/\./_/g' > ${TMPDIR}/CNCR_union.genes.list
#Get 750kb flanks and any relevant E-P connections
flank=750000
cat \
<( fgrep -wf ${TMPDIR}/CNCR_union.genes.list \
<( sed -e 's/\-/_/g' -e 's/\./_/g' \
${WRKDIR}/data/master_annotations/gencode/gencode.v19.gene_boundaries.all.bed ) | \
awk -v OFS="\t" -v flank=${flank} '{ print $1, $2-flank, $2, $4"\n"$1, $3, $3+flank, $4 }' | \
awk -v OFS="\t" '{ if ($2<1) $2=1; print $1, $2, $3, $4, "flank" }' ) \
<( fgrep -wf ${TMPDIR}/CNCR_union.genes.list \
<( sed -e 's/\-/_/g' -e 's/\./_/g' \
${WRKDIR}/data/misc/pcHiC_contacts/pcHiC_contacts.formatted_wGenes.min_4.unique_EP_pairs.bed ) | \
awk -v OFS="\t" '{ print $1, $2, $3, $4, "EP" }' ) | \
sort -Vk1,1 -k2,2n -k3,3n -Vk4,4 > \
${TMPDIR}/CNCR_union.loci.bed
#Intersect versus significant CNCR elements
VF=E4
while read pheno; do
for CNV in DEL DUP; do
awk -v OFS="\t" -v pheno=${pheno} -v CNV=${CNV} '{ print $1, $2, $3, $4, pheno"_"CNV }' \
${WRKDIR}/analysis/perAnno_burden/signif_elements/all_merged/final_loci/${pheno}/${pheno}_${CNV}_${VF}.final_merged_loci.all_classes.bed
done
done < <( fgrep -v "#" ${WRKDIR}/bin/rCNVmap/misc/analysis_group_HPO_mappings.list | \
awk '{ if ($NF>=10000 && $2=="CNCR") print $1 }' ) | \
sort -Vk1,1 -k2,2n -k3,3n -Vk4,4 | \
bedtools merge -c 4,5 -o distinct -i - | bedtools intersect -wa -wb -a - \
-b ${TMPDIR}/CNCR_union.loci.bed | bedtools intersect -v -wa -a - \
-b <( fgrep -wf ${TMPDIR}/CNCR_union.genes.list \
<( sed -e 's/\-/_/g' -e 's/\./_/g' \
${WRKDIR}/data/master_annotations/gencode/gencode.v19.gene_boundaries.all.bed ) ) > \
${TMPDIR}/CNCR_loci_signif.bed
#####Get number of genes shared between GERM & CNCR from proximity analysis
fgrep -wf <( awk '{ print $(NF-1) }' ${TMPDIR}/CNCR_loci_signif.bed ) \
<( awk '{ print $(NF-1) }' ${TMPDIR}/NDD_loci_signif.bed )
#####Get significant elements with ≥3 gene contacts
####NDD
VF=E4
while read pheno; do
for CNV in DEL DUP; do
awk -v OFS="\t" -v pheno=${pheno} -v CNV=${CNV} '{ print $1, $2, $3, $4, pheno"_"CNV }' \
${WRKDIR}/analysis/perAnno_burden/signif_elements/all_merged/final_loci/${pheno}/${pheno}_${CNV}_${VF}.final_merged_loci.all_classes.bed
done
done < <( fgrep -v "#" ${WRKDIR}/bin/rCNVmap/misc/analysis_group_HPO_mappings.list | \
awk '{ if ($NF>=10000 && $2=="GERM") print $1 }' ) | \
sort -Vk1,1 -k2,2n -k3,3n -Vk4,4 | \
bedtools merge -c 4 -o distinct -i - | bedtools intersect -c -a - \
-b ${WRKDIR}/data/misc/pcHiC_contacts/pcHiC_contacts.formatted_wGenes.min_1.unique_EP_pairs.bed | \
awk '{ if ($NF>2) print $0 }' > ${TMPDIR}/NDD_multi_signif.loci.bed
####CNCR
VF=E4
while read pheno; do
for CNV in DEL DUP; do
awk -v OFS="\t" -v pheno=${pheno} -v CNV=${CNV} '{ print $1, $2, $3, $4, pheno"_"CNV }' \
${WRKDIR}/analysis/perAnno_burden/signif_elements/all_merged/final_loci/${pheno}/${pheno}_${CNV}_${VF}.final_merged_loci.all_classes.bed
done
done < <( fgrep -v "#" ${WRKDIR}/bin/rCNVmap/misc/analysis_group_HPO_mappings.list | \
awk '{ if ($NF>=10000 && $2=="CNCR") print $1 }' ) | \
sort -Vk1,1 -k2,2n -k3,3n -Vk4,4 | \
bedtools merge -c 4 -o distinct -i - | bedtools intersect -c -a - \
-b ${WRKDIR}/data/misc/pcHiC_contacts/pcHiC_contacts.formatted_wGenes.min_1.unique_EP_pairs.bed | \
awk '{ if ($NF>2) print $0 }' > ${TMPDIR}/CNCR_multi_signif.loci.bed
#####Get number of significant elements in both GERM & CNCR
cut -f4 ${TMPDIR}/NDD_multi_signif.loci.bed | fgrep -wf - ${TMPDIR}/CNCR_multi_signif.loci.bed
|
#!/bin/bash
CLEAN=1
#STACK=other-3
STACK=overcloud-0
METAL=deployed-metal-$STACK.yaml
TMP=/tmp/ceph_nodes_$STACK
grep cephstorage $METAL \
| grep -v CephStorageHostnameFormat \
| awk {'print $2'} > $TMP
openstack overcloud delete $STACK --yes
pushd ../metalsmith
bash unprovision.sh $STACK
rm -f deployed-{metal,network}-$STACK.yaml
popd
for F in generated_ceph_spec.yaml generated_deployed_ceph.yaml deployed-{metal,network}-$STACK.yaml cirros-0.4.0-x86_64-disk.{raw,img} tempest-deployer-input.conf; do
if [[ -e $F ]]; then
rm -f $F
fi
done
if [[ $CLEAN -eq 1 ]]; then
for H in $(cat $TMP); do
bash ../metalsmith/clean-disks.sh $H
done
fi
rm -f $TMP
|
#!/bin/sh
_supernova()
{
local cur=${COMP_WORDS[COMP_CWORD]}
local configs=$(cat "${XDG_CONFIG_HOME}"/supernova ~/.supernova ./.supernova 2> /dev/null)
local possibilities=$(echo "${configs}" | sed -n '/^\[.*\]/ s_\[\(.*\)\].*_\1_p' | sort -u)
COMPREPLY=( $(compgen -W "${possibilities}" -- $cur) )
}
complete -F _supernova supernova
|
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author <NAME>
* @version 1.3
* @date Wed Aug 24 19:53:22 EDT 2011
* @see LICENSE (MIT style license file).
*/
package scalation.minima
import scala.math.{abs, max, pow}
import scalation.calculus.Differential.{FunctionV2S, gradient, gradientD}
import scalation.linalgebra.VectorD
import scalation.util.Error
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `SteepestDescent` class solves unconstrained Non-Linear Programming (NLP)
* problems using the Steepest Descent algorithm. Given a function 'f' and a
* starting point 'x', the algorithm computes the gradient and takes steps in
* the opposite direction. The algorithm iterates until it converges. The class
* assumes that partial derivative functions are not available unless explicitly
* given via the 'setDerivatives' method.
*
* dir_k = -gradient (x)
*
* minimize f(x)
*
* @param f the vector-to-scalar objective function
* @param exactLS whether to use exact (e.g., `GoldenLS`)
* or inexact (e.g., `WolfeLS`) Line Search
*/
class SteepestDescent (f: FunctionV2S, exactLS: Boolean = true)
extends Minimizer with Error
{
private val DEBUG = true // debug flag
private val WEIGHT = 1000.0 // weight on penalty for constraint violation
private var given = false // default: functions for partials are not given
private var df: Array [FunctionV2S] = null // array of partials
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the partial derivative functions. If these functions are available,
* they are more efficient and more accurate than estimating the values
* using difference quotients (the default approach).
* @param partials the array of partial derivative functions
*/
def setDerivatives (partials: Array [FunctionV2S]) { df = partials; given = true }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform an exact 'GoldenSectionLS' or inexact 'WolfeLS' line search.
* Search in direction 'dir', returning the distance 'z' to move in that direction.
* @param x the current point
* @param dir the direction to move in
* @param step the initial step size
*/
def lineSearch (x: VectorD, dir: VectorD, step: Double = STEP): Double =
{
def f_1D (z: Double): Double = f(x + dir * z) // create a 1D function
val ls = if (exactLS) new GoldenSectionLS (f_1D) // Golden Section line search
else new WolfeLS (f_1D, .0001, .1) // Wolfe line search (c1 = .0001, c2 = .1)
ls.search (step) // perform a line search
} // lineSearch
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve the Non-Linear Programming (NLP) problem using the Steepest Descent
* algorithm.
* @param x0 the starting point
* @param step the initial step size
* @param toler the tolerance
*/
def solve (x0: VectorD, step: Double = STEP, toler: Double = EPSILON): VectorD =
{
var x = x0 // current point
var f_x = f(x) // objective function at current point
var y: VectorD = null // next point
var f_y = 0.0 // objective function at next point
var dir = if (given) -gradientD (df, x) // initial direction is -gradient: use partials
else -gradient (f, x) // estimate gradient
var dist = 1.0 // distance between current and next point
var down = true // moving down flag
for (k <- 1 to MAX_ITER if down && dist > toler && dir.normSq > toler) {
y = x + dir * lineSearch (x, dir, step) // determine the next point
f_y = f(y) // objective function value for next point
dir = if (given) -gradientD (df, y) // next search direction: use partials
else -gradient (f, y) // estimate gradient
if (DEBUG) println ("solve: k = " + k + ", y = " + y + ", f_y = " + f_y + ", dir = " + dir)
dist = (x - y).normSq // calc the distance between current and next point
down = f_y < f_x // still moving down?
if (down) { x = y; f_x = f_y } // make the next point, the current point
} // for
x // return the current point
} // solve
} // SteepestDescent class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `SteepestDescentTest` object is used to test the `SteepestDescent` class.
*/
object SteepestDescentTest extends App
{
var x0 = VectorD (0.0, 0.0) // starting point
var x: VectorD = null // optimal solution
println ("\nProblem 1: (x_0 - 2)^2 + (x_1 - 3)^2 + 1")
def f (x: VectorD): Double = (x(0) - 2.0) * (x(0) - 2.0) + (x(1) - 3.0) * (x(1) - 3.0) + 1.0
val solver = new SteepestDescent (f)
x = solver.solve (x0)
println ("optimal solution = " + x + ", objective value = " + f(x))
println ("\nProblem 2 (with partials): (x_0 - 2)^2 + (x_1 - 3)^2 + 1")
x0 = VectorD (0.0, 0.0)
def df_dx0 (x: VectorD): Double = 2.0 * x(0) - 4.0
def df_dx1 (x: VectorD): Double = 2.0 * x(1) - 6.0
solver.setDerivatives (Array [FunctionV2S] (df_dx0, df_dx1))
x = solver.solve (x0)
println ("optimal solution = " + x + ", objective value = " + f(x))
println ("\nProblem 3: x_0/4 + 5x_0^2 + x_0^4 - 9x_0^2 x_1 + 3x_1^2 + 2x_1^4")
// @see http://math.fullerton.edu/mathews/n2003/gradientsearch/GradientSearchMod/Links/GradientSearchMod_lnk_5.html
x0 = VectorD (0.0, 0.0)
def f3 (x: VectorD): Double = x(0)/4.0 + 5.0*x(0)*x(0) + pow(x(0),4) -
9.0*x(0)*x(0)*x(1) + 3.0*x(1)*x(1) + 2.0*pow(x(1),4)
val solver3 = new SteepestDescent (f3)
x = solver3.solve (x0)
println ("optimal solution = " + x + ", objective value = " + f3(x))
} // SteepestDescentTest
|
#!/usr/bin/env bash
f=${1}
path=`pwd`
# XXX machine name depends on build type
machine="maia01"
# machine="maxnode2"
echo "Path is ${path}"
ssh ${machine} /bin/bash << EOF
cd ${path}/..
pwd
export LD_LIBRARY_PATH=/opt/gcc-4.9.2/lib64:${LD_LIBRARY_PATH}
src/frontend/hwrun_maia ${path}/test_spmv_dfe ${f} | tee run.log
sleep 5
EOF
|
export interface SitemapGeneratorOptions {
pagesDirectory: string;
exportDirectory: string;
baseUrl?: string;
exportFilename?: string;
changeFreq?: string;
sitemapPriority?: string;
locales?: Array<string>;
isSiteExcludedCallback?: Function;
beforeFinishCallback?: Function;
}
export declare const SitemapGeneratorDefaultOptions: SitemapGeneratorOptions;
|
#!/bin/bash
# git remote add originbitbucket git@bitbucket.org:tttor/bibtex-entry.git
# git remote add origin git@gitlab.com:tttor/bibtex-entry.git
# https://askubuntu.com/questions/370697/how-to-count-number-of-files-in-a-directory-but-not-recursively
echo '=== n entries ==='
find ./entry -maxdepth 1 -type f | wc -l
echo '=== committing ==='
git add --all
git commit -a -m add
echo '=== github.com ==='
git push origin master
echo '=== bitbucket.org ==='
git push originbitbucket master
echo '=== bitbucket.org ==='
git push origingitlab master
|
package de.hswhameln.typetogether.client.proxy;
import de.hswhameln.typetogether.networking.api.Document;
import de.hswhameln.typetogether.networking.api.Lobby;
import de.hswhameln.typetogether.networking.api.User;
import de.hswhameln.typetogether.networking.api.exceptions.InvalidDocumentIdException;
import de.hswhameln.typetogether.networking.api.exceptions.UnknownUserException;
import de.hswhameln.typetogether.networking.proxy.MarshallHandler;
import de.hswhameln.typetogether.networking.proxy.ObjectResolver;
import de.hswhameln.typetogether.networking.shared.AbstractClientProxy;
import de.hswhameln.typetogether.networking.shared.DocumentClientProxy;
import de.hswhameln.typetogether.networking.shared.UserServerProxy;
import de.hswhameln.typetogether.networking.util.LoggerFactory;
import java.io.IOException;
import java.net.Socket;
import java.util.Collection;
import java.util.HashSet;
import java.util.logging.Logger;
import static de.hswhameln.typetogether.networking.FluentExceptionHandler.expectSuccess;
@SuppressWarnings("RedundantThrows")
public class LobbyClientProxy extends AbstractClientProxy implements Lobby {
private final MarshallHandler<User> userMarshallHandler;
private final ObjectResolver<Document> documentObjectResolver;
private final Logger logger = LoggerFactory.getLogger(this);
public LobbyClientProxy(Socket socket) throws IOException {
super(socket);
this.userMarshallHandler = new MarshallHandler<>(UserServerProxy::new, this.in, this.out);
this.documentObjectResolver = new ObjectResolver<>(DocumentClientProxy::new, this.in, this.out, this.socket.getInetAddress());
}
@Override
public void joinDocument(User user, String documentId) throws InvalidDocumentIdException.DocumentDoesNotExistException {
this.safelyExecute(() -> {
this.chooseOption("1");
this.userMarshallHandler.marshall(user);
// "Provide documentId"
logger.fine(this.in.readLine());
this.out.println(documentId);
expectSuccess(this.in)
.andHandleError(InvalidDocumentIdException.DocumentDoesNotExistException.class);
});
}
@Override
public void leaveDocument(User user, String documentId) throws InvalidDocumentIdException.DocumentDoesNotExistException, UnknownUserException {
this.safelyExecute(() -> {
this.chooseOption("2");
this.userMarshallHandler.marshall(user);
// "Provide documentId"
logger.fine(this.in.readLine());
this.out.println(documentId);
expectSuccess(this.in)
.andHandleError(InvalidDocumentIdException.DocumentDoesNotExistException.class)
.andHandleError(UnknownUserException.class);
});
}
@Override
public Document getDocumentById(String documentId) throws InvalidDocumentIdException.DocumentDoesNotExistException {
return this.safelyExecute(() -> {
this.chooseOption("3");
// "Provide documentId"
logger.fine(this.in.readLine());
this.out.println(documentId);
expectSuccess(this.in)
.andHandleError(InvalidDocumentIdException.DocumentDoesNotExistException.class);
return this.documentObjectResolver.resolveObject();
});
}
@Override
public void createDocument(String documentId) throws InvalidDocumentIdException.DocumentAlreadyExistsException {
this.safelyExecute(() -> {
this.chooseOption("4");
// "Provide documentId"
logger.fine(this.in.readLine());
this.out.println(documentId);
expectSuccess(this.in)
.andHandleError(InvalidDocumentIdException.DocumentAlreadyExistsException.class);
});
}
@Override
public void deleteDocument(User user, String documentId) throws InvalidDocumentIdException.DocumentDoesNotExistException {
this.safelyExecute(() -> {
this.chooseOption("5");
this.userMarshallHandler.marshall(user);
// "Provide documentId"
logger.fine(this.in.readLine());
this.out.println(documentId);
expectSuccess(this.in)
.andHandleError(InvalidDocumentIdException.DocumentDoesNotExistException.class);
});
}
@Override
public Collection<String> getDocumentIds() {
return this.safelyExecute(() -> {
this.chooseOption("6");
// "Provide size"
logger.fine(this.in.readLine());
int size = Integer.parseInt(this.in.readLine());
logger.fine("Server responded with size " + size);
Collection<String> documentIds = new HashSet<>();
for (int i = 0; i < size; i++) {
documentIds.add(this.in.readLine());
}
expectSuccess(this.in);
return documentIds;
});
}
}
|
#!/bin/bash -e
find build/x86_64/obj/ -name "*.d" | xargs cat | grep '\.h:' | sed 's!:$!!' | sed 's!mldb/mldb/!mldb/!' | sort | uniq -c | sort -nr -k1 | head -n 100
|
# Import necessary libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Load the data
data = pd.read_csv("books_data.csv")
# Define feature and target variables
X = data['title']
y = data['category']
# Transform words into numerical features
# Vectorize feature and target data
from sklearn.feature_extraction.text import CountVectorizer
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(X).toarray()
# Build the ML model
from sklearn.naive_bayes import MultinomialNB
clf = MultinomialNB()
# Fit the data on the classifier
clf.fit(X, y)
# Make a prediction
pride_and_prejudice = vectorizer.transform(["Pride and Prejudice"]).toarray()
category = clf.predict(pride_and_prejudice)[0]
print("The book 'Pride and Prejudice' is classified as: ", category) |
import cv2
import imageio
import numpy as np
# Initialize the video capture object
cap = cv2.VideoCapture(0)
# Initialize a list to store captured frames
frames = []
try:
while True:
# Capture frame-by-frame
ret, frame = cap.read()
# Display the frame
cv2.imshow('frame', frame)
# Convert the frame to RGB for imageio
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
# Append every other frame to the frames list
if len(frames) % 2 == 0:
frames.append(frame_rgb)
# Check for 'q' key press to exit the loop
if cv2.waitKey(1) & 0xFF == ord('q'):
break
finally:
# Release the video capture object
cap.release()
cv2.destroyAllWindows()
# Save the captured frames as a GIF
imageio.mimsave('Lego_camera_view.gif', frames, fps=20) |
<form>
<label for="name">Name:</label>
<input type="text" name="name" id="name" />
<label for="address">Address:</label>
<input type="text" name="address" id="address" />
<label for="phone">Phone Number:</label>
<input type="text" name="phone" id="phone" />
<input type="submit" />
</form> |
from PyQt5 import QtCore
import pickle
COLUMN_COUNT = 5
COLUMN_COUNT2 = 3
class TableModel(QtCore.QAbstractTableModel): # Model for data
def __init__(self, temp_headers):
QtCore.QAbstractTableModel.__init__(self)
self.__data = []
self.__headers = temp_headers
def save(self):
data = self.__data
pickle_out = open("Data/data.pickle", "wb")
pickle.dump(data, pickle_out)
pickle_out.close()
def load(self, data):
self.__data = data
def rowCount(self, parent):
return len(self.__data)
def columnCount(self, parent):
return COLUMN_COUNT
def data(self, index, role):
if role == QtCore.Qt.DisplayRole:
row = index.row()
column = index.column()
value = self.__data[row][column]
return value
def headerData(self, section, orientation, role):
if role == QtCore.Qt.DisplayRole:
if orientation == QtCore.Qt.Horizontal:
return self.__headers[section]
def find_place(self, date):
i = 0
for row in self.__data:
diff = self.__data[i][0].toJulianDay() - date.toJulianDay()
if diff > 0:
return i
i += 1
return i
def insertRow(self, new_row, parent=QtCore.QModelIndex()):
self.beginInsertRows(parent, len(self.__data), len(self.__data))
if new_row[3].find(",") != -1:
new_row[3] = new_row[3].replace(',', '.')
if new_row[3] == "":
new_row[3] = 0
new_row[3] = float(new_row[3])
row = self.find_place(new_row[0])
new_row.append(0)
self.__data.insert(row, new_row)
self.updateCredit(row)
self.endInsertRows()
return True
def removeRows(self, selected, parent=QtCore.QModelIndex()):
if selected:
for i in selected:
row = selected[0].row()
self.beginRemoveRows(parent, row, row)
self.__data.pop(row)
self.endRemoveRows()
self.updateCredit(row)
return True
else:
return False
def changeCell(self, column, index, new_data):
if new_data != "":
row = index.row()
if column == 3:
if new_data.find(",") != -1:
new_data = new_data.replace(',', '.')
new_data = float(new_data)
self.__data[row][column] = new_data
if column == 0:
old_row = row
row = self.find_place(new_data)
self.__data.insert(row, self.__data[old_row])
self.__data.pop(old_row+1)
self.updateCredit(row)
return True
else:
return False
def updateCredit(self, row):
for i in range(row, len(self.__data)):
if self.__data[i][3] == "":
self.__data[i][3] = 0
if i == 0:
self.__data[i][4] = self.__data[i][3]
else:
self.__data[i][4] = self.__data[i-1][4] + self.__data[i][3]
self.dataChanged.emit(self.index(row, 0), self.index(len(self.__data), 4))
"""def today(self):
index = self.__data.index(QtCore.QDate.currentDate())
print(index)"""
class TableModel2(QtCore.QAbstractTableModel): # Model for Kategorien and Budget
def __init__(self):
QtCore.QAbstractTableModel.__init__(self)
self.__data = []
def save(self):
pickle_out = open("Data/category.pickle", "wb")
pickle.dump(self.__data, pickle_out)
pickle_out.close()
def load(self, data):
self.__data = data
def rowCount(self, parent):
return len(self.__data)
def columnCount(self, parent):
return COLUMN_COUNT2
def data(self, index, role):
if role == QtCore.Qt.EditRole:
value = self.__data[index.row()][index.column()]
return value
if role == QtCore.Qt.DisplayRole:
value = self.__data[index.row()][index.column()]
return value
def flags(self, index):
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def setData(self, index, value, role=QtCore.Qt.EditRole):
if role == QtCore.Qt.EditRole:
if value:
row = index.row()
column = index.column()
self.__data[row][column] = value
self.dataChanged.emit(index, index)
return True
return False
def insertRow(self, parent=QtCore.QModelIndex()):
self.beginInsertRows(parent, len(self.__data), len(self.__data))
self.__data.append(["Neue Kategorie",0,0])
self.endInsertRows()
return True
def removeRow(self, x, parent=QtCore.QModelIndex()):
if x:
self.beginRemoveRows(parent, x[0].row(), x[-1].row())
for i in x:
self.__data.pop(x[0].row())
self.endRemoveRows()
return True
else:
return False
# TODO: einnahme, ausgabe
|
package com.ufrn.embarcados.reaqua.repository;
import com.ufrn.embarcados.reaqua.model.WaterTank;
import org.springframework.data.jpa.repository.JpaRepository;
public interface WaterTankRepository extends JpaRepository<WaterTank, Long> {
}
|
Template.preference.created = function () {
this.autorun(function () {
var mallParams = Session.get("mallParams");
var tmpmallParams = Router.current().params.query.mall;//for /?mall=0001
//console.log("mall.created: tmpmallParams="+tmpmallParams);
if(tmpmallParams)
{
mallParams=tmpmallParams;
if(Meteor.userId())
{
//console.log("mall.created: userid="+Meteor.userId());
Meteor.call("userlocation_set",tmpmallParams,Meteor.userId());
//console.log("mall.created: location="+tmpmallParams);
}
}
else {
if(Meteor.userId())
{
if(UserDataExts.findOne({userID: Meteor.userId()}))
mallParams=UserDataExts.findOne({userID: Meteor.userId()}).location;
}
}
//console.log("mall.created: mallParams="+mallParams);
Session.set("mallParams", mallParams);
//this.subscription = Meteor.subscribe('Preferences',Session.get("mallParams"));
}.bind(this));
};
Template.preference.rendered = function () {
// this.autorun(function () {
// if (!this.subscription.ready()) {
// IonLoading.show();
// } else {
// IonLoading.hide();
// }
// }.bind(this));
};
Template.preference.helpers({
preferences: function () {
//console.log(Preferences.find().fetch());
return Preferences.find({mallindex:Session.get("mallParams")},{sort:{createdAt:-1}});
},
preferencecount: function () {
//console.log(Preferences.find().fetch());
return Preferences.find({mallindex:Session.get("mallParams")}).count();
},
/*
preferencepics: function (taskId) {
//console.log(Preferences.find().fetch());
//console.log("taskId="+taskId);
//console.log("taskId="+taskId+" name="+Preferences.findOne({_id:taskId}).name);
//console.log(Preferences.findOne({_id:taskId}).fileIds[0])}).
//console.log(Preferences.findOne({_id:taskId}).fileIds[0])})
//if(Preferences.findOne({_id:taskId}).fileIds)
//{
// console.log("fileIds="+Preferences.findOne({_id:taskId}).fileIds);
//}
return Preferences.findOne({_id:taskId}).fileIds;
},*/
preferencepic1st: function (taskId) {
return Preferences.findOne({_id:taskId}).fileIds[0];
},
getpreferencepic: function (taskId) {
//console.log("Each fileId="+taskId);
//console.log("Each fileId="+taskId+","+Uploads.findOne({_id: taskId}).url);
return Uploads.find({_id: taskId});
},
});
//Template.shares.helpers({
// shares:function(){
// return Shares.find({},{sort:{createdAt:-1}});
// }
//});
|
/*
window.onload = function(){
$('#fab').draggable(function(){
});
window.document.addEventListener("touchmove", function(event){
event.preventDefault();
var tapX = event.touches[0].clientX;
var tapY = event.touches[0].clientY;
localStorage.setItem('tapX', tapX);
localStorage.setItem('tapY', tapY);
$('.fixed-action-btn').css({
top: tapY - 50,
left: tapX - 30
});
$('.fixed-action-btn ul').css({
left: tapX -310,
bottom: -tapY + 600
});
}, true);
}
*/
|
package cyclops.async.reactive.futurestream.pipeline.stream;
import java.util.stream.Stream;
public interface StreamWrapper<U> {
public Stream<U> stream();
}
|
export PULUMI_CONFIG_PASSPHRASE=
pulumi destroy --stack $1 --non-interactive -y
|
const number1 = 5;
const number2 = 10;
const calculation = (x, y) => {
console.log(`Addition: ${x + y}`);
console.log(`Subtraction: ${x - y}`);
console.log(`Multiplication: ${x * y}`);
console.log(`Division: ${x / y}`);
};
calculation(number1, number2); |
import urllib.request
import json
try:
response = urllib.request.urlopen('http://example.com/api')
data = json.loads(response.read().decode('utf-8'))
except urllib.request.HTTPError as e:
print("Error: ", e.code)
except urllib.request.URLError as e:
print("Error: ", e.reason)
except:
print("An unknown error occurred") |
public String getDeviceType(int w) {
if (w <= 420) {
return "mobile";
} else if (w > 420 && w <= 768) {
return "tablet";
} else if (w > 768 && w < 1024) {
return "desktop";
} else {
return "wide";
}
} |
<reponame>mschnieder/media_manager_plus
DROP TABLE IF EXISTS `%TABLE_PREFIX%media_manager_plus_breakpoints`;
ALTER TABLE `%TABLE_PREFIX%media_manager_type` DROP COLUMN `group`;
ALTER TABLE `%TABLE_PREFIX%media_manager_type` DROP COLUMN `subgroup`; |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_fast_forward_twotone = void 0;
var ic_fast_forward_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "polygon",
"attribs": {
"opacity": ".3",
"points": "15,9.86 15,14.14 18.03,12"
},
"children": [{
"name": "polygon",
"attribs": {
"opacity": ".3",
"points": "15,9.86 15,14.14 18.03,12"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"opacity": ".3",
"points": "6,9.86 6,14.14 9.03,12"
},
"children": [{
"name": "polygon",
"attribs": {
"opacity": ".3",
"points": "6,9.86 6,14.14 9.03,12"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M4,18l8.5-6L4,6V18z M6,9.86L9.03,12L6,14.14V9.86z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M4,18l8.5-6L4,6V18z M6,9.86L9.03,12L6,14.14V9.86z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M21.5,12L13,6v12L21.5,12z M15,9.86L18.03,12L15,14.14V9.86z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M21.5,12L13,6v12L21.5,12z M15,9.86L18.03,12L15,14.14V9.86z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_fast_forward_twotone = ic_fast_forward_twotone; |
#include "z3D/z3D.h"
#include "string.h"
#include "custom_models.h"
#include "objects.h"
#include "settings.h"
#define EDIT_BYTE(offset_, val_) (BASE_[offset_] = val_)
u8 SmallKeyData[][7] = {
{ 0x00, 0x80, 0x00, 0x00, 0x00, 0xCC, 0x00 }, //Forest
{ 0x54, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00 }, //Fire
{ 0x00, 0x00, 0xDA, 0x00, 0x00, 0x00, 0xFF }, //Water
{ 0x25, 0x00, 0x40, 0x00, 0x64, 0x00, 0xAD }, //Shadow
{ 0x80, 0x00, 0x82, 0x00, 0xAD, 0x00, 0xB0 }, //BotW
{ 0x80, 0x55, 0x00, 0x00, 0xFF, 0xAA, 0x00 }, //Spirit
{ 0x44, 0x1E, 0x00, 0x00, 0x86, 0x3B, 0x00 }, //Fortress
{ 0xC4, 0x57, 0x00, 0x00, 0xFF, 0xD1, 0xAD }, //GTG
{ 0x00, 0x00, 0x00, 0x00, 0x1F, 0x1F, 0x1F }, //Ganon
};
void CustomModel_EditLinkToCustomTunic(void* linkCMB) {
char* BASE_ = (char*)linkCMB;
// Edit combinerIndices
EDIT_BYTE(0x6C4, 0x04);// Update combinerCount
EDIT_BYTE(0x6CC, 0x0B); EDIT_BYTE(0x6CD, 0x00);// Add new combiner index (Replacing one of the combiners used by unused deku stick)
EDIT_BYTE(0x6CE, 0x11); EDIT_BYTE(0x6CF, 0x00);
//TextureCombiner0
EDIT_BYTE(0x3588, 0x04); EDIT_BYTE(0x3589, 0x01);// CombinerMode to "Add"
EDIT_BYTE(0x3594, 0x76);// SourceColor0 to "ConstantColor"
EDIT_BYTE(0x359C, 0x03);// Color1Operand to OneMinusAlpha
//TextureCombiner1
EDIT_BYTE(0x35B0, 0x00); EDIT_BYTE(0x35B1, 0x21);// CombinerMode to "Modulate"
EDIT_BYTE(0x35BE, 0xC0); EDIT_BYTE(0x35BF, 0x84);// SourceColor1 to "Texture0"
EDIT_BYTE(0x35C4, 0x00);// Color1Operand to Color
EDIT_BYTE(0x36FC, 0x78);// SourceColor0 to "Previous" (aka return the output of "TextureCombiner0")
EDIT_BYTE(0x36FE, 0x77); EDIT_BYTE(0x36FF, 0x85);// SourceColor1 to "PrimaryColor"
//Edit Texture Entry
EDIT_BYTE(0x44E1, 0x40);// Update texture data length to "16384" bytes
EDIT_BYTE(0x44EC, 0x5B);// Set texture to ETC1a4
}
void CustomModel_EditChildLinkToCustomTunic(void* linkCMB) {
char* BASE_ = (char*)linkCMB;
// Edit combinerIndices
EDIT_BYTE(0x6C4, 0x03);// Update combinerCount
EDIT_BYTE(0x6CC, 0x0D); EDIT_BYTE(0x6CD, 0x00);
//TextureCombiner0
EDIT_BYTE(0x2974, 0x02); EDIT_BYTE(0x2975, 0x64);// CombinerMode to "AddMult"
EDIT_BYTE(0x2978, 0x01);// ColorScale to "One"
EDIT_BYTE(0x2980, 0x76);// SourceColor0 to "ConstantColor"
EDIT_BYTE(0x2984, 0xC0); EDIT_BYTE(0x2985, 0x84);// SourceColor2 to "Texture0"
EDIT_BYTE(0x2988, 0x03);// Color1Operand to OneMinusAlpha
//TextureCombiner1
EDIT_BYTE(0x299C, 0x00); EDIT_BYTE(0x299D, 0x21);// CombinerMode to "Modulate"
EDIT_BYTE(0x29A0, 0x04);// ColorScale to "Four"
EDIT_BYTE(0x29AA, 0x77);// SourceColor1 to "PrimaryColor"
EDIT_BYTE(0x29B0, 0x00);// Color1Operand to Color
//Edit Texture Entry
EDIT_BYTE(0x3441, 0x40);// Update texture data length to "16384" bytes
EDIT_BYTE(0x344C, 0x5B);// Set texture to ETC1a4
}
void CustomModel_EditHeartContainerToDoubleDefense(void* heartContainerCMB) {
char* BASE_ = (char*)heartContainerCMB;
EDIT_BYTE(0xDB, 0x01);
EDIT_BYTE(0xE8, 0x01);
EDIT_BYTE(0x17C, 0x19); EDIT_BYTE(0x17D, 0x19); EDIT_BYTE(0x17E, 0x19);
EDIT_BYTE(0x180, 0x00); EDIT_BYTE(0x181, 0x00); EDIT_BYTE(0x182, 0x00); EDIT_BYTE(0x183, 0xB2);
EDIT_BYTE(0x1FC, 0x01);
EDIT_BYTE(0x20D, 0x00);
EDIT_BYTE(0x210, 0x01);
EDIT_BYTE(0x235, 0x01);
EDIT_BYTE(0x244, 0x02);
EDIT_BYTE(0x2DC, 0xFF); EDIT_BYTE(0x2DD, 0xFF);
EDIT_BYTE(0x358, 0x00);
}
void CustomModel_ApplyColorEditsToSmallKey(void* smallKeyCMB, s32 keyType) {
char* BASE_ = (char*)smallKeyCMB;
for (s32 i = 0; i < 7; i++) {
EDIT_BYTE(0x12C + i, SmallKeyData[keyType][i]);
}
}
void CustomModel_EditTitleScreenLogo(void* titleScreenZAR) {
char* BASE_ = (char*)titleScreenZAR;
// copy_nintendo.cmb:
EDIT_BYTE(0x4F3, 0x40);
EDIT_BYTE(0x5905, 0x00); EDIT_BYTE(0x5906, 0x01);//Change texture dataLength
EDIT_BYTE(0x590A, 0x01);// IsETC1 = true
EDIT_BYTE(0x590D, 0x02);// Width = 512
EDIT_BYTE(0x590E, 0x80);// Height = 128
EDIT_BYTE(0x5910, 0x5B);// ETC1a4
//Edit positionOffset of each shape
EDIT_BYTE(0x597A, 0x80); EDIT_BYTE(0x597B, 0x3F);
EDIT_BYTE(0x597C, 0x33); EDIT_BYTE(0x597D, 0x33); EDIT_BYTE(0x597E, 0x33); EDIT_BYTE(0x597F, 0x40);
//Edit vertices/UVs
EDIT_BYTE(0x5AFE, 0xA0); EDIT_BYTE(0x5B02, 0xA0); EDIT_BYTE(0x5B0A, 0xA0); EDIT_BYTE(0x5B0E, 0xA0);
EDIT_BYTE(0x5B16, 0xA0); EDIT_BYTE(0x5B1A, 0xA0); EDIT_BYTE(0x5B22, 0xA0); EDIT_BYTE(0x5B26, 0xA0);
// title_logo_us.cmb: Edit positionOffset of each shape
EDIT_BYTE(0x36BF3, 0x40); EDIT_BYTE(0x36D33, 0x40); EDIT_BYTE(0x36E73, 0x40); EDIT_BYTE(0x36FB3, 0x40);
EDIT_BYTE(0x370F3, 0x40); EDIT_BYTE(0x37233, 0x40); EDIT_BYTE(0x37373, 0x40); EDIT_BYTE(0x374B3, 0x40);
EDIT_BYTE(0x375F3, 0x40); EDIT_BYTE(0x37733, 0x40); EDIT_BYTE(0x37873, 0x40); EDIT_BYTE(0x379B3, 0x40);
EDIT_BYTE(0x37AF3, 0x40); EDIT_BYTE(0x37C33, 0x40); EDIT_BYTE(0x37D73, 0x40); EDIT_BYTE(0x37EB3, 0x40);
EDIT_BYTE(0x37FF3, 0x40); EDIT_BYTE(0x38133, 0x40); EDIT_BYTE(0x38273, 0x40); EDIT_BYTE(0x383B3, 0x40);
EDIT_BYTE(0x384F3, 0x40); EDIT_BYTE(0x38633, 0x40);
// g_title_fire.cmab
EDIT_BYTE(0x5E570, 0x01);// Change keyframe count to 1 so we only have to change one keyframe
EDIT_BYTE(0x5E580, 0x0A); EDIT_BYTE(0x5E581, 0xD7); EDIT_BYTE(0x5E582, 0x23); EDIT_BYTE(0x5E583, 0x3D);// Red to 0.04
EDIT_BYTE(0x5E660, 0x01);
EDIT_BYTE(0x5E670, 0x91); EDIT_BYTE(0x5E671, 0xED); EDIT_BYTE(0x5E672, 0x5C); EDIT_BYTE(0x5E673, 0x3F);// Green 0.863
// g_title_fire_ura.cmab
EDIT_BYTE(0x5EA80, 0x01);
EDIT_BYTE(0x5EA90, 0x0A); EDIT_BYTE(0x5E581, 0xD7); EDIT_BYTE(0x5E582, 0x23); EDIT_BYTE(0x5E583, 0x3D);
EDIT_BYTE(0x5EB70, 0x01);
EDIT_BYTE(0x5EB80, 0x91); EDIT_BYTE(0x5EB81, 0xED); EDIT_BYTE(0x5EB82, 0x5C); EDIT_BYTE(0x5EB83, 0x3F);
}
// The same offsets work for both fairy ocarina and ocarina of time,
// so we will just reuse this function for both
void CustomModel_SetOcarinaToRGBA565(void* ocarinaCMB) {
char* BASE_ = (char*)ocarinaCMB;
EDIT_BYTE(0x3F2, 0x01); EDIT_BYTE(0x3F8, 0x5A);
}
void CustomModel_SetBossKeyToRGBA565(void* bossKeyCMB) {
char* BASE_ = (char*)bossKeyCMB;
EDIT_BYTE(0x43D, 0x10);
EDIT_BYTE(0x442, 0x01); EDIT_BYTE(0x448, 0x5B);
EDIT_BYTE(0x44A, 0x00); EDIT_BYTE(0x44B, 0x00);
}
void CustomModel_Update(void) {
// Make sure custom_assets is loaded
if (ExtendedObject_GetIndex(&gGlobalContext->objectCtx, OBJECT_CUSTOM_GENERAL_ASSETS) < 0) {
ExtendedObject_Spawn(&gGlobalContext->objectCtx, OBJECT_CUSTOM_GENERAL_ASSETS);
}
}
|
import test from 'ava';
import { Substitute, Arg } from '../../src/index';
class Key {
private constructor(private _value: string) { }
static create() {
return new this('123');
}
get value(): string {
return this._value;
}
}
class IData {
private constructor(private _serverCheck: Date, private _data: number[]) { }
static create() {
return new this(new Date(), [1]);
}
set data(newData: number[]) {
this._data = newData;
}
get serverCheck(): Date {
return this._serverCheck;
}
get data(): number[] {
return this._data;
}
}
abstract class IFetch {
abstract getUpdates(arg: Key): Promise<IData>
abstract storeUpdates(arg: IData): Promise<void>
}
class Service {
constructor(private _database: IFetch) { }
public async handle(arg?: Key) {
const updateData = await this.getData(arg);
updateData.data = [100];
await this._database.storeUpdates(updateData);
}
private getData(arg?: Key) {
return this._database.getUpdates(arg);
}
}
test('issue 36 - promises returning object with properties', async t => {
const emptyFetch = Substitute.for<IFetch>();
emptyFetch.getUpdates(Key.create()).returns(Promise.resolve<IData>(IData.create()));
const result = await emptyFetch.getUpdates(Key.create());
t.true(result.serverCheck instanceof Date, 'given date is instanceof Date');
t.deepEqual(result.data, [1], 'arrays are deep equal')
});
test('using objects or classes as arguments should be able to match mock', async t => {
const db = Substitute.for<IFetch>();
const data = IData.create();
db.getUpdates(Key.create()).returns(Promise.resolve(data));
const service = new Service(db);
await service.handle(Key.create());
db.received(1).storeUpdates(Arg.is((arg: IData) =>
arg.serverCheck instanceof Date &&
arg instanceof IData &&
arg.data[0] === 100
));
t.pass();
}); |
#!/bin/bash
export overcloud_virt_type="kvm"
export domain="lab1.local"
export undercloud_instance="undercloud"
export prov_inspection_iprange="192.168.24.51,192.168.24.91"
export prov_dhcp_start="192.168.24.100"
export prov_dhcp_end="192.168.24.200"
export prov_ip="192.168.24.1"
export prov_subnet_len="24"
export prov_cidr="192.168.24.0/${prov_subnet_len}"
export prov_ip_cidr="${prov_ip}/${prov_subnet_len}"
export fixed_vip="192.168.24.250"
# Interfaces for providing tests run (need only if network isolation enabled)
export internal_vlan="vlan710"
export internal_interface="eth1"
export internal_ip_addr="10.1.0.5"
export internal_net_mask="255.255.255.0"
export external_vlan="vlan720"
export external_interface="eth1"
export external_ip_addr="10.2.0.5"
export external_net_mask="255.255.255.0"
export tenant_ip_net="10.0.0.0/24"
# TODO: rework after AGENT_NODES, CONTROLLER_NODES be used as an input for rhosp
export overcloud_cont_instance="1,2,3"
export overcloud_ctrlcont_instance="1,2,3"
export overcloud_compute_instance="1"
export overcloud_dpdk_instance="1"
export overcloud_sriov_instance="1"
export overcloud_ceph_instance="1,2,3"
# to allow nova to use hp as well (2 are used by vrouter)
export vrouter_huge_pages_1g='32'
#SRIOV parameters
export sriov_physical_interface="ens2f3"
export sriov_physical_network="sriov1"
export sriov_vf_number="4"
# IPA params
export ipa_instance="ipa"
#export ipa_mgmt_ip="$ipa_mgmt_ip" - defined outside
export ipa_prov_ip="192.168.24.5"
|
#!/bin/bash
#
# Helper script for docker-compose app service. Ensures db services is actually
# available before executing runserver command.
#
while !</dev/tcp/db/5432; do
echo 'Waiting on db service; sleeping for 1 second.'
sleep 1
done 2>/dev/null
echo 'The db service is available.'
./manage.py runserver 0.0.0.0:8000
|
<reponame>cristidraghici/react-form-errors
'use strict';
exports.__esModule = true;
exports.default = maxLength;
var _lodash = require('lodash');
function maxLength(length, cannotBeEqual) {
return function (data) {
if (!cannotBeEqual && (0, _lodash.size)(data) <= length) {
return null;
}
if ((0, _lodash.size)(data) < length) {
return null;
}
return 'The maximum length is: ' + String(length) + ' characters.';
};
} |
import React, {Component} from 'react'
import Card from 'material-ui/Card'
import Button from 'material-ui/Button'
import TextField from 'material-ui/TextField'
import Icon from 'material-ui/Icon'
import PropTypes from 'prop-types'
import {withStyles} from 'material-ui/styles'
const styles = theme => ({
card: {
marginRight:'12px',
marginLeft: '12px',
padding: '10px'
},
textField: {
marginLeft: theme.spacing.unit,
marginRight: theme.spacing.unit,
width: 300
},
numberField: {
marginLeft: theme.spacing.unit,
marginRight: theme.spacing.unit,
width: 70
}
})
class VRObjectForm extends Component {
state = {
objUrl: '',
mtlUrl: '',
translateX: 0,
translateY: 0,
translateZ: 0,
rotateX: 0,
rotateY: 0,
rotateZ: 0,
scale: 1,
color:'white'
}
componentDidMount = () => {
if(this.props.vrObject && Object.keys(this.props.vrObject).length != 0){
const vrObject = this.props.vrObject
this.setState({
objUrl: vrObject.objUrl,
mtlUrl: vrObject.mtlUrl,
translateX: Number(vrObject.translateX),
translateY: Number(vrObject.translateY),
translateZ: Number(vrObject.translateZ),
rotateX: Number(vrObject.rotateX),
rotateY: Number(vrObject.rotateY),
rotateZ: Number(vrObject.rotateZ),
scale: Number(vrObject.scale),
color:vrObject.color
})
}
}
handleChange = name => event => {
this.setState({[name]: event.target.value})
this.props.handleUpdate(this.props.index, this.props.type, name, event.target.value)
}
render() {
const {classes} = this.props
return (
<Card className={classes.card}>
<TextField
id="obj"
label=".obj url"
value={this.state.objUrl}
onChange={this.handleChange('objUrl')}
className={classes.textField}
margin="normal"
/><br/>
<TextField
id="mtl"
label=".mtl url"
value={this.state.mtlUrl}
onChange={this.handleChange('mtlUrl')}
className={classes.textField}
margin="normal"
/><br/>
<TextField
value={this.state.translateX}
label="TranslateX"
onChange={this.handleChange('translateX')}
type="number"
className={classes.numberField}
margin="normal"
/>
<TextField
value={this.state.translateY}
label="TranslateY"
onChange={this.handleChange( 'translateY')}
type="number"
className={classes.numberField}
margin="normal"
/>
<TextField
value={this.state.translateZ}
label="TranslateZ"
onChange={this.handleChange('translateZ')}
type="number"
className={classes.numberField}
margin="normal"
/><br/>
<TextField
value={this.state.rotateX}
label="RotateX"
onChange={this.handleChange('rotateX')}
type="number"
className={classes.numberField}
margin="normal"
/>
<TextField
value={this.state.rotateY}
label="RotateY"
onChange={this.handleChange('rotateY')}
type="number"
className={classes.numberField}
margin="normal"
/>
<TextField
value={this.state.rotateZ}
label="RotateZ"
onChange={this.handleChange('rotateZ')}
type="number"
className={classes.numberField}
margin="normal"
/><br/>
<TextField
value={this.state.scale}
label="Scale"
onChange={this.handleChange('scale')}
type="number"
className={classes.numberField}
margin="normal"
/>
<TextField
value={this.state.color}
label="Color"
onChange={this.handleChange('color')}
className={classes.numberField}
margin="normal"
/>
<Button onClick={this.props.removeObject(this.props.type, this.props.index)}>
<Icon style={{marginRight: '5px'}}>cancel</Icon> Delete
</Button><br/>
</Card>)
}
}
VRObjectForm.propTypes = {
classes: PropTypes.object.isRequired,
index: PropTypes.number.isRequired,
vrObject: PropTypes.object.isRequired,
handleUpdate: PropTypes.func.isRequired,
removeObject: PropTypes.func.isRequired
}
export default withStyles(styles)(VRObjectForm)
|
#include <stdio.h>
int main()
{
int N = 20;
for(int i = 0; i <= N; i++) {
if(i % 5 == 0) {
printf("Square of %d is %d\n", i, i * i);
}
}
return 0;
} |
##################################################
# Import Own Assets
##################################################
from hyperparameter_hunter.metrics import ScoringMixIn, Metric, format_metrics, wrap_xgboost_metric
from hyperparameter_hunter.metrics import get_formatted_target_metric, get_clean_prediction
##################################################
# Import Miscellaneous Assets
##################################################
from collections import OrderedDict
import numpy as np
import pandas as pd
import pytest
import sys
##################################################
# Import Learning Assets
##################################################
from sklearn.metrics import roc_auc_score, hamming_loss, r2_score, f1_score
try:
from xgboost import DMatrix
except ImportError:
pass
##################################################
# Dummy Objects for Testing
##################################################
class EmptyClass(object):
pass
def my_r2_score(foo, bar):
return r2_score(foo, bar)
_metrics = dict(roc_auc_score=roc_auc_score)
_in_fold, _oof, _holdout = "all", "all", "all"
empty_class, empty_func = EmptyClass(), lambda _: _
def args_ids_for(scenarios):
return dict(argvalues=scenarios, ids=[f"{_}" for _ in range(len(scenarios))])
def keyed_args_ids_for(scenarios):
arg_values, scenario_ids = [], []
for group_key, scenario_group in scenarios.items():
arg_values.extend(scenario_group)
scenario_ids.extend([f"{group_key}[{_}]" for _ in range(len(scenario_group))])
return dict(argvalues=arg_values, ids=scenario_ids)
##################################################
# Metric Scenarios
##################################################
@pytest.fixture(scope="session")
def metric_init_params_lookup():
"""Lookup dictionary for `Metric` initialization parameters used in test scenarios. Keys
correspond to those in `metric_init_final_attributes_lookup`"""
return dict(
m_0=("roc_auc_score",),
m_1=("roc_auc_score", roc_auc_score),
m_2=("my_f1_score", "f1_score"),
m_3=("hamming_loss", hamming_loss),
m_4=("r2_score", r2_score, "min"),
m_5=("my_r2_score", my_r2_score),
)
@pytest.fixture(scope="session")
def metric_init_final_attributes_lookup():
"""Lookup dictionary for the expected values of `Metric` attributes after an instance has been
initialized with the value of the corresponding key in `metric_init_params_lookup`. The `Metric`
attributes whose values are verified are as follows: `name`, `metric_function`, `direction`"""
return dict(
m_0=("roc_auc_score", roc_auc_score, "max"),
m_1=("roc_auc_score", roc_auc_score, "max"),
m_2=("my_f1_score", f1_score, "max"),
m_3=("hamming_loss", hamming_loss, "min"),
m_4=("r2_score", r2_score, "min"),
m_5=("my_r2_score", my_r2_score, "max"),
)
@pytest.fixture(scope="function", params=["m_0", "m_1", "m_2", "m_3", "m_4", "m_5"])
def metric_instance(metric_init_params_lookup, metric_init_final_attributes_lookup, request):
"""Instance of `metrics.Metric` initialized with the corresponding values in
`metric_init_params_lookup`"""
metric = Metric(*metric_init_params_lookup[request.param])
#################### Ensure Attributes Properly Initialized ####################
(_name, _metric_function, _direction) = metric_init_final_attributes_lookup[request.param]
assert metric.metric_function == _metric_function
assert metric.direction == _direction
assert str(metric) == "Metric({}, {}, {})".format(_name, _metric_function.__name__, _direction)
return metric
def test_metric_initialization_helpers(metric_instance):
assert hasattr(metric_instance, "name")
assert hasattr(metric_instance, "metric_function")
assert hasattr(metric_instance, "direction")
# TODO: Add test to verify `Metric.__call__` calls `Metric.metric_function` with expected inputs
@pytest.mark.parametrize("direction", ["foo", "MAX", "bar"])
def test_metric_initialization_invalid_direction(direction):
with pytest.raises(ValueError, match="`direction` must be 'infer', 'max', or 'min', not .*"):
Metric("some_metric", roc_auc_score, direction)
##################################################
# ScoringMixIn Initialization Scenarios
##################################################
scoring_mix_in_init_params = ["metrics", "in_fold", "oof", "holdout"]
scenarios_valid_metrics = [
[_metrics],
[{"1": roc_auc_score}],
[dict(my_roc_auc=roc_auc_score, roc_auc_score=None)],
[dict(foo=roc_auc_score, roc_auc_score=None)],
[dict(foo=roc_auc_score, roc_auc_score=None, foo_2="roc_auc_score")],
[["roc_auc_score"]],
[["f1_score", "accuracy_score", "roc_auc_score"]],
]
scenarios_valid_metrics_lists = [
(_metrics, _in_fold, None, None),
(_metrics, None, None, None),
(_metrics, ["roc_auc_score"], _oof, _holdout),
(
["f1_score", "accuracy_score", "roc_auc_score"],
["f1_score"],
["accuracy_score"],
["roc_auc_score"],
),
(["f1_score", "accuracy_score", "roc_auc_score"], ["f1_score"], _oof, _holdout),
]
scenarios_type_error = dict(
metrics=[
("foo", _in_fold, _oof, _holdout),
(1, _in_fold, _oof, _holdout),
(None, _in_fold, _oof, _holdout),
# (['f1_score', 'accuracy_score', 'roc_auc_score'], _in_fold, _oof, _holdout), # This correctly fails
(empty_class, _in_fold, _oof, _holdout),
(empty_func, _in_fold, _oof, _holdout),
(tuple(), _in_fold, _oof, _holdout),
(list(), _in_fold, _oof, _holdout),
(dict(), _in_fold, _oof, _holdout),
],
metrics_key=[
({1: roc_auc_score}, _in_fold, _oof, _holdout),
({empty_class: roc_auc_score}, _in_fold, _oof, _holdout),
({empty_func: roc_auc_score}, _in_fold, _oof, _holdout),
({tuple(): roc_auc_score}, _in_fold, _oof, _holdout),
],
metrics_value=[
({"roc_auc_score": 1}, _in_fold, _oof, _holdout),
({"roc_auc_score": 1.2}, _in_fold, _oof, _holdout),
({"roc_auc_score": ["a", "b"]}, _in_fold, _oof, _holdout),
({"roc_auc_score": dict(a=1, b=2)}, _in_fold, _oof, _holdout),
],
metrics_lists=[
(_metrics, "foo", _oof, _holdout),
(_metrics, _in_fold, "foo", _holdout),
(_metrics, _in_fold, _oof, "foo"),
(_metrics, empty_class, _oof, _holdout),
(_metrics, empty_func, _oof, _holdout),
(_metrics, ("a", "b"), _oof, _holdout),
(_metrics, 1, _oof, _holdout),
(_metrics, 1.2, _oof, _holdout),
(_metrics, 1.2, "foo", empty_func),
],
metrics_lists_values=[
(_metrics, [1], _oof, _holdout),
(_metrics, _in_fold, [1.2], _holdout),
(_metrics, _in_fold, _oof, [empty_func]),
(_metrics, [empty_class], _oof, _holdout),
(_metrics, [tuple()], _oof, _holdout),
(_metrics, [["roc_auc"]], _oof, _holdout),
(_metrics, [dict(a=1, b=2)], 1, 1),
(_metrics, [None], _oof, _holdout),
],
)
scenarios_attribute_error = [
(dict(roc_auc="foo"), _in_fold, _oof, _holdout),
(dict(foo=None), _in_fold, _oof, _holdout),
(["foo"], _in_fold, _oof, _holdout),
(["roc_auc", "foo"], _in_fold, _oof, _holdout),
({"roc_auc_score": ("a", "b")}, _in_fold, _oof, _holdout),
]
scenarios_key_error = [
(_metrics, ["foo"], _oof, _holdout),
(_metrics, _in_fold, ["foo"], _holdout),
(_metrics, _in_fold, _oof, ["foo"]),
(_metrics, ["roc_auc", "foo"], _oof, _holdout),
]
@pytest.mark.parametrize(["metrics"], **args_ids_for(scenarios_valid_metrics))
def test_valid_scoring_mix_in_initialization_metrics(metrics):
ScoringMixIn(metrics=metrics, in_fold=_in_fold, oof=_oof, holdout=_holdout)
@pytest.mark.parametrize(scoring_mix_in_init_params, **args_ids_for(scenarios_valid_metrics_lists))
def test_valid_scoring_mix_in_initialization_metrics_lists(metrics, in_fold, oof, holdout):
ScoringMixIn(metrics=metrics, in_fold=in_fold, oof=oof, holdout=holdout)
@pytest.mark.parametrize(scoring_mix_in_init_params, **keyed_args_ids_for(scenarios_type_error))
def test_type_error_scoring_mix_in_initialization(metrics, in_fold, oof, holdout):
with pytest.raises(TypeError):
ScoringMixIn(metrics=metrics, in_fold=in_fold, oof=oof, holdout=holdout)
@pytest.mark.parametrize(scoring_mix_in_init_params, **args_ids_for(scenarios_attribute_error))
def test_attribute_error_scoring_mix_in_initialization(metrics, in_fold, oof, holdout):
with pytest.raises(AttributeError):
ScoringMixIn(metrics=metrics, in_fold=in_fold, oof=oof, holdout=holdout)
@pytest.mark.parametrize(scoring_mix_in_init_params, **args_ids_for(scenarios_key_error))
def test_key_error_scoring_mix_in_initialization(metrics, in_fold, oof, holdout):
with pytest.raises(KeyError):
ScoringMixIn(metrics=metrics, in_fold=in_fold, oof=oof, holdout=holdout)
##################################################
# `ScoringMixIn.evaluate` Scenarios
##################################################
data_types = ["in_fold", "oof", "holdout"]
#################### `ScoringMixIn` Instance Helpers ####################
def _get_mixin_data_types(mixin):
return [getattr(mixin, f"_ScoringMixIn__{_}") for _ in data_types]
def _call_evaluate(mixin):
for data_type in data_types:
mixin.evaluate(data_type, np.array([1, 0, 1, 0]), np.array([0.9, 0.3, 0.7, 0.8]))
return mixin
#################### `ScoringMixIn` Fixtures ####################
@pytest.fixture(scope="function")
def scoring_mix_in_fixture_0():
mixin = ScoringMixIn(metrics=dict(roc_auc=roc_auc_score, f1=f1_score), do_score=True)
assert _get_mixin_data_types(mixin) == [["roc_auc", "f1"], ["roc_auc", "f1"], ["roc_auc", "f1"]]
return mixin
@pytest.fixture(scope="function")
def scoring_mix_in_fixture_1():
mixin = ScoringMixIn(metrics=dict(roc_auc=roc_auc_score, f1=f1_score), oof=["f1"], holdout=None)
assert _get_mixin_data_types(mixin) == [["roc_auc", "f1"], ["f1"], []]
return mixin
@pytest.fixture(scope="function")
def scoring_mix_in_fixture_2():
mixin = ScoringMixIn(metrics=["f1_score"], do_score=False)
assert _get_mixin_data_types(mixin) == [["f1_score"], ["f1_score"], ["f1_score"]]
return mixin
@pytest.fixture(params=[f"scoring_mix_in_fixture_{_}" for _ in [0, 1, 2]])
def scoring_mix_in_fixture(request):
return request.getfixturevalue(request.param)
#################### `ScoringMixIn` Tests ####################
def test_initial_results(scoring_mix_in_fixture):
assert all(_ is None for _ in scoring_mix_in_fixture.last_evaluation_results.values())
def test_evaluate_mix_in_0(scoring_mix_in_fixture_0):
scoring_mix_in_fixture_0 = _call_evaluate(scoring_mix_in_fixture_0)
assert scoring_mix_in_fixture_0.last_evaluation_results == {
_: OrderedDict([("roc_auc", 0.75), ("f1", 0.8)]) for _ in data_types
}
def test_evaluate_mix_in_1(scoring_mix_in_fixture_1):
scoring_mix_in_fixture_1 = _call_evaluate(scoring_mix_in_fixture_1)
assert scoring_mix_in_fixture_1.last_evaluation_results == dict(
in_fold=OrderedDict([("roc_auc", 0.75), ("f1", 0.8)]),
oof=OrderedDict([("f1", 0.8)]),
holdout=OrderedDict(),
)
def test_evaluate_mix_in_2(scoring_mix_in_fixture_2):
scoring_mix_in_fixture_2 = _call_evaluate(scoring_mix_in_fixture_2)
assert scoring_mix_in_fixture_2.last_evaluation_results == {_: None for _ in data_types}
##################################################
# get_formatted_target_metric Scenarios
##################################################
@pytest.mark.parametrize(
"target_metric",
argvalues=[[], {}, lambda: True, type("Foo", tuple(), {}), type("Foo", tuple(), {})(), 1, 3.14],
)
def test_get_formatted_target_metric_type_error(target_metric):
with pytest.raises(TypeError):
get_formatted_target_metric(target_metric, format_metrics(["roc_auc_score"]))
@pytest.mark.parametrize(
"target_metric",
argvalues=[("oof", "roc_auc_score", "foo"), ("foo", "roc_auc_score"), ("holdout", "foo")],
)
def test_get_formatted_target_metric_value_error(target_metric):
with pytest.raises(ValueError):
get_formatted_target_metric(target_metric, format_metrics(["roc_auc_score"]))
##################################################
# get_clean_prediction Scenarios
##################################################
@pytest.mark.parametrize(
["target", "prediction", "expected"],
argvalues=[
([1, 0, 1, 0], [1, 0, 1, 0], [1, 0, 1, 0]),
([[3.1, 2.2], [4.1, 0.9]], [[3.2, 2.3], [3.9, 0.8]], [[3.2, 2.3], [3.9, 0.8]]),
([1, 0, 1, 0], [0.9, 0.1, 0.8, 0.2], [1.0, 0.0, 1.0, 0.0]),
([1, 0, 1, 0], [2.3, -1.2, 1.9, 0.01], [1.0, 0.0, 1.0, 0.0]),
(
[[0, 0, 1], [0, 1, 0], [1, 0, 0]],
[[-0.1, 0.2, 0.7], [0.2, 1.9, 0.1], [0.7, -0.1, -0.2]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 0.0]],
),
(
[[0, 0, 1], [0, 1, 0], [1, 0, 0]],
[[-0.1, 0.2, 0.7], [2.3, 1.9, 0.1], [0.7, -0.1, -0.2]],
[[0.0, 0.0, 1.0], [1.0, 1.0, 0.0], [1.0, 0.0, 0.0]],
),
],
)
def test_get_clean_prediction(target, prediction, expected):
assert pd.DataFrame(
get_clean_prediction(pd.DataFrame(target), pd.DataFrame(prediction))
).equals(pd.DataFrame(expected))
##################################################
# `wrap_xgboost_metric` Scenarios
##################################################
@pytest.mark.skipif("xgboost" not in sys.modules, reason="Requires `XGBoost` library")
def test_wrap_xgboost_metric():
eval_metric = wrap_xgboost_metric(roc_auc_score, "roc_auc")
d_matrix = DMatrix(np.array([[7], [14], [21], [28]]), label=[1, 0, 1, 0])
assert eval_metric([0.9, 0.3, 0.7, 0.8], d_matrix) == ("roc_auc", 0.75)
|
#!/bin/bash
sudo docker logs -f godotengine-org--mariadb
|
package com.leetcode;
import java.util.HashMap;
import java.util.Map;
public class Solution_13 {
static Map<Character, Integer> map = new HashMap<>();
static {
map.put('I', 1);
map.put('V', 5);
map.put('X', 10);
map.put('L', 50);
map.put('C', 100);
map.put('D', 500);
map.put('M', 1000);
}
public int romanToInt(String s) {
if (s == null || s.trim().equals("")) return 0;
int result = 0;
for (int i = 0; i < s.length(); i++) {
if (i == s.length() - 1) {
result += map.get(s.charAt(i));
break;
}
char a = s.charAt(i);
char b = s.charAt(i + 1);
if (lagerOrEquals(a, b)) {
result += map.get(a);
} else {
result = result + map.get(b) - map.get(a);
i++;
}
}
return result;
}
public boolean lagerOrEquals(Character a, Character b) {
return map.get(a) >= map.get(b);
}
}
|
<gh_stars>0
# Helper module to develop git-annex backends
#
# https://git-annex.branchable.com/design/external_backend_protocol/
#
# Derived from AnnexRemote Copyright (C) 2017 <NAME> (GPL-3)
"""Interface and essential utilities to implement external git-annex backends
"""
import logging
from abc import (
ABCMeta,
abstractmethod,
)
import sys
import traceback
class Backend(metaclass=ABCMeta):
"""Metaclass for backends.
It implements the communication with git-annex via the external backend
protocol. More information on the protocol is available at
https://git-annex.branchable.com/design/external_backend_protocol/
External backends can be built by implementing the abstract methods defined
in this class.
Attributes
----------
annex : Master
The Master object to which this backend is linked. Master acts as an
abstraction layer for git-annex.
"""
def __init__(self, annex):
self.annex = annex
@abstractmethod
def can_verify(self):
"""Returns whether the backend can verify the content of files match a
key it generated. The verification does not need to be
cryptographically secure, but should catch data corruption.
Returns
-------
bool
"""
@abstractmethod
def is_stable(self):
"""Returns whether a key it has generated will always have the same
content. The answer to this is almost always yes; URL keys are an
example of a type of key that may have different content at different
times.
Returns
-------
bool
"""
@abstractmethod
def is_cryptographically_secure(self):
""" Returns whether keys it generates are verified using a
cryptographically secure hash.
Note that sha1 is not a cryptographically secure hash any longer.
A program can change its answer to this question as the state of the
art advances, and should aim to stay ahead of the state of the art by
a reasonable amount of time.
Returns
-------
bool
"""
@abstractmethod
def gen_key(self, local_file):
"""Examine the content of `local_file` and from it generate a key.
While it is doing this, it can send any number of PROGRESS messages
indication the position in the file that it's gotten to.
Parameters
----------
local_file: str
Path for which to generate a key.
Note that in some cases, local_file may contain whitespace.
Returns
-------
str
The generated key.
Raises
------
BackendError
If the file could not be received from the backend.
"""
@abstractmethod
def verify_content(self, key, content_file):
"""Examine a file and verify it has the content expected given a key
While it is doing this, it can send any number of PROGRESS messages
indicating the position in the file that it's gotten to.
If `can_verify() == False`, git-annex not ask to do this.
Returns
-------
bool
"""
def error(self, error_msg):
"""Communicate a generic error.
Can be sent at any time if things get too messed up to
continue. If the program receives an error() from git-annex, it can
exit with its own error(). Eg.: self.annex.error("Error received.
Exiting.") raise SystemExit
Parameters
----------
error_msg : str
The error message received from git-annex
"""
self.annex.error("Error received. Exiting.")
raise SystemExit
# Exceptions
class AnnexError(Exception):
"""
Common base class for all annexbackend exceptions.
"""
class ProtocolError(AnnexError):
"""
Base class for protocol errors
"""
class UnsupportedRequest(ProtocolError):
"""
Must be raised when an optional request is not supported by the backend.
"""
class UnexpectedMessage(ProtocolError):
"""
Raised when git-annex sends a message which is not expected at the moment
"""
class BackendError(AnnexError):
"""
Must be raised by the backend when a request did not succeed.
"""
class NotLinkedError(AnnexError):
"""
Will be raised when a Master instance is accessed without being
linked to a Backend instance
"""
class Protocol(object):
"""
Helper class handling the receiving part of the protocol (git-annex to
backend) It parses the requests coming from git-annex and calls the
respective method of the backend object.
"""
def __init__(self, backend):
self.backend = backend
self.version = "VERSION 1"
def command(self, line):
line = line.strip()
if not line:
raise ProtocolError("Got empty line")
parts = line.split(" ", 1)
method = self.lookupMethod(parts[0])
if method is None:
raise UnsupportedRequest(f'Unknown request {line!r}')
try:
if len(parts) == 1:
reply = method()
else:
reply = method(parts[1])
except TypeError as e:
raise SyntaxError(e)
else:
return reply
def lookupMethod(self, command):
return getattr(self, 'do_' + command.upper(), None)
def do_GETVERSION(self):
return self.version
def do_CANVERIFY(self):
return 'CANVERIFY-YES' if self.backend.can_verify() else 'CANVERIFY-NO'
def do_ISSTABLE(self):
return 'ISSTABLE-YES' if self.backend.is_stable() else 'ISSTABLE-NO'
def do_ISCRYPTOGRAPHICALLYSECURE(self):
return 'ISCRYPTOGRAPHICALLYSECURE-YES' \
if self.backend.is_cryptographically_secure() \
else 'ISCRYPTOGRAPHICALLYSECURE-NO'
def do_GENKEY(self, *arg):
try:
key = self.backend.gen_key(arg[0])
return f'GENKEY-SUCCESS {key}'
except BackendError as e:
return f'GENKEY-FAILURE {str(e)}'
def do_VERIFYKEYCONTENT(self, *arg):
try:
success = self.backend.verify_content(*arg[0].split(" ", 1))
except BackendError:
success = False
return 'VERIFYKEYCONTENT-SUCCESS' if success \
else 'VERIFYKEYCONTENT-FAILURE'
def do_ERROR(self, message):
self.backend.error(message)
class Master(object):
"""
Metaclass for backends.
Attributes
----------
input : io.TextIOBase
Where to listen for git-annex request messages.
Default: sys.stdin
output : io.TextIOBase
Where to send replies and backend messages
Default: sys.stdout
backend : Backend
A class implementing the Backend interface to which this master
is linked.
"""
def __init__(self, output=sys.stdout):
"""
Initialize the Master with an ouput.
Parameters
----------
output : io.TextIOBase
Where to send replies and backend messages
Default: sys.stdout
"""
self.output = output
def LinkBackend(self, backend):
"""
Link the Master to a backend. This must be done before calling Listen()
Parameters
----------
backend : Backend
A class implementing Backend interface to which this master
will be linked.
"""
self.backend = backend
self.protocol = Protocol(backend)
def Listen(self, input=sys.stdin):
"""
Listen on `input` for messages from git annex.
Parameters
----------
input : io.TextIOBase
Where to listen for git-annex request messages.
Default: sys.stdin
Raises
----------
NotLinkedError
If there is no backend linked to this master.
"""
if not (hasattr(self, 'backend') and hasattr(self, 'protocol')):
raise NotLinkedError("Please execute LinkBackend(backend) first.")
self.input = input
while True:
# due to a bug in python 2 we can't use an iterator here: https://bugs.python.org/issue1633941
line = self.input.readline()
if not line:
break
line = line.rstrip()
try:
reply = self.protocol.command(line)
if reply:
self._send(reply)
except UnsupportedRequest as e:
self.debug(str(e))
self._send("UNSUPPORTED-REQUEST")
except Exception as e:
for line in traceback.format_exc().splitlines():
self.debug(line)
self.error(e)
raise SystemExit
def debug(self, *args):
"""
Tells git-annex to display the message if --debug is enabled.
Parameters
----------
message : str
The message to be displayed to the user
"""
self._send("DEBUG", *args)
def error(self, *args):
"""
Generic error. Can be sent at any time if things get too messed up to continue.
When possible, raise a BackendError inside the respective functions.
The backend program should exit after sending this, as git-annex will
not talk to it any further.
Parameters
----------
error_msg : str
The error message to be sent to git-annex
"""
self._send("ERROR", *args)
def progress(self, progress):
"""
Indicates the current progress of the transfer (in bytes). May be repeated
any number of times during the transfer process, but it's wasteful to update
the progress until at least another 1% of the file has been sent.
This is highly recommended for ``*_store()``. (It is optional but good for
``*_retrieve()``.)
Parameters
----------
progress : int
The current progress of the transfer in bytes.
"""
self._send("PROGRESS {progress}".format(progress=int(progress)))
def _send(self, *args, **kwargs):
print(*args, file=self.output, **kwargs)
self.output.flush()
|
/**
* Copyright (c) 2016-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
#pragma once
namespace rseq {
namespace internal {
constexpr int kCachelineSize = 64;
template <typename T, bool alreadyCachelineAligned>
struct CachelinePaddedImpl;
template <typename T>
struct CachelinePaddedImpl<T, true> {
T item;
};
template <typename T>
struct CachelinePaddedImpl<T, false> {
T item;
char padding[kCachelineSize - sizeof(T) % kCachelineSize];
};
template <typename T>
struct CachelinePadded {
// Casting from the return value of get() back to a CachelinePadded<T> is
// guaranteed to work if T is standard-layout.
T* get() {
return &paddedItem.item;
}
// Note: can't be private; this struct must remain standard-layout to get the
// guarantee that we can cast back and forth between the item and this struct
// (in particular, we need this for Code objects).
CachelinePaddedImpl<T, sizeof(T) % kCachelineSize == 0> paddedItem;
};
} // namespace internal
} // namespace rseq
|
from flwr.common import Weights
from typing import List, Tuple, Optional
import numpy as np
from .weighted_aggregate import Weighted_Aggregate
def FedAdagrad_Aggregate(
current_weights: Weights,
results: List[Tuple[Weights, int]],
eta=1.0,
tau=1e-2,
beta_1=None,
beta_2=None,
) -> Weights:
'''
current_weights(model weights): The current newest global model weights .
results: [(model weights, dataset size)].
learning_rate(float): eta, Server-side learning rate.
tau(float): Controls the algorithm's degree of adaptability.
'''
v_t: Optional[Weights] = None
# fedavg algo.
fedavg_weights = Weighted_Aggregate(current_weights=current_weights,
results=results,
learning_rate=1.0)
if fedavg_weights is None: return None
# Get difference of delta weights
aggregated_updates = [
subset_weights - current_weights[idx]
for idx, subset_weights in enumerate(fedavg_weights)
]
# Adagrad
delta_t = aggregated_updates
if not v_t:
v_t = [np.zeros_like(subset_weights) for subset_weights in delta_t]
v_t = [
v_t[idx] + np.multiply(subset_weights, subset_weights)
for idx, subset_weights in enumerate(delta_t)
]
weights_prime = [
current_weights[idx]
+ eta * delta_t[idx] / (np.sqrt(v_t[idx]) + tau)
for idx in range(len(delta_t))
]
return weights_prime
def FedAdam_Aggregate(
current_weights: Weights,
results: List[Tuple[Weights, int]],
eta=1.0,
tau=1e-1,
beta_1=0.9,
beta_2=0.99,
) -> Weights:
'''
current_weights(model weights): The current newest global model weights .
results: [(model weights, dataset size)].
learning_rate(float): eta, Server-side learning rate.
tau(float): Controls the algorithm's degree of adaptability.
beta_1 (float): Momentum parameter. Defaults to 0.9.
beta_2 (float): Second moment parameter. Defaults to 0.99.
'''
delta_t: Optional[Weights] = None
m_t: Optional[Weights] = None
v_t: Optional[Weights] = None
# fedavg algo.
fedavg_weights = Weighted_Aggregate(current_weights=current_weights,
results=results,
learning_rate=1.0)
if fedavg_weights is None: return None
# Get difference of delta weights
aggregated_updates = [
subset_weights - current_weights[idx]
for idx, subset_weights in enumerate(fedavg_weights)
]
# Adam
delta_t = aggregated_updates
if not m_t:
m_t = [np.zeros_like(x) for x in delta_t]
m_t = [
beta_1 * x + (1.0 - beta_1) * y
for x, y in zip(m_t, delta_t)
]
if not v_t:
v_t = [np.zeros_like(x) for x in delta_t]
v_t = [
beta_2 * x + (1.0 - beta_2) * np.multiply(y, y)
for x, y in zip(v_t, delta_t)
]
weights_prime = [
x + eta * y / (np.sqrt(z) + tau)
for x, y, z in zip(current_weights, m_t, v_t)
]
return weights_prime
def FedYogi_Aggregate(
current_weights: Weights,
results: List[Tuple[Weights, int]],
eta=1.0,
tau=1e-1,
beta_1=0.9,
beta_2=0.99,
) -> Weights:
'''
current_weights(model weights): The current newest global model weights .
results: [(model weights, dataset size)].
learning_rate(float): eta, Server-side learning rate.
tau(float): Controls the algorithm's degree of adaptability.
beta_1 (float): Momentum parameter. Defaults to 0.9.
beta_2 (float): Second moment parameter. Defaults to 0.99.
'''
delta_t: Optional[Weights] = None
m_t: Optional[Weights] = None
v_t: Optional[Weights] = None
# fedavg algo.
fedavg_weights = Weighted_Aggregate(current_weights=current_weights,
results=results,
learning_rate=1.0)
if fedavg_weights is None: return None
# Get difference of delta weights
aggregated_updates = [
subset_weights - current_weights[idx]
for idx, subset_weights in enumerate(fedavg_weights)
]
# Yogi
delta_t = aggregated_updates
if not m_t:
m_t = [np.zeros_like(x) for x in delta_t]
m_t = [
beta_1 * x + (1.0 - beta_1) * y
for x, y in zip(m_t, delta_t)
]
if not v_t:
v_t = [np.zeros_like(x) for x in delta_t]
v_t = [
x - (1.0 - beta_2) * np.multiply(y, y) * np.sign(x - np.multiply(y, y))
for x, y in zip(v_t, delta_t)
]
weights_prime = [
x + eta * y / (np.sqrt(z) + tau)
for x, y, z in zip(current_weights, m_t, v_t)
]
return weights_prime |
#!/bin/bash
for i in $(seq -f "%02g" 1 25)
do
GOFILE="./day_$i/main.go"
if test -f "$GOFILE"
then
echo "#### Day $i ####"
if test -f "$GOFILE"
then
echo "Go: "
go run $GOFILE
fi
printf "\n"
fi
done
|
#!/bin/sh
docker run --rm -it --tty --volume .:/app composer install |
<reponame>neno--/ks
package com.github.nenomm.ks.ktable;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.context.annotation.Profile;
// /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic kTable-topic-D --property "parse.key=true" --property "key.separator=:"
// /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic kTable-topic-D --property print.key=true --property print.value=true
// /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic testAggregateOutput --property print.key=true --property print.value=true
// /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic testAggregateStringOutput --property print.key=true --property print.value=true
// /opt/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --list | grep -i ktable-test-app-id-D
@Profile("KTable")
@EnableBinding(KTableCustomInput.class)
public class KTableStreamsTester {
private static final Logger logger = LoggerFactory.getLogger(KTableStreamsTester.class);
@StreamListener
public void doSomeKTableInput(@Input(KTableCustomInput.INPUT) KTable<String, String> input) {
logger.info("Inside ktable listener");
// this will print something on commit interval
input.toStream().foreach((key, value) -> {
logger.info("KTABLE KEY: {}, VALUE: {}", key, value);
});
//this will consume all messages, so nothing will go to grouping...
//input.toStream().to("kTableOutput");
KGroupedTable<String, String> shareVolume1 = input.groupBy((k, v) -> new KeyValue<>(k, v), Serialized.with(Serdes.String(), Serdes.String()));
KTable<String, Long> result = shareVolume1.count();
// so you can display it in console:
KStream<String, String> testAggregateStringOutput = result.toStream().mapValues((readOnlyKey, value) -> Long.toString(value));
//result.toStream().to("testAggregateOutput", Produced.with(Serdes.String(), Serdes.Long()));
testAggregateStringOutput.to("testAggregateStringOutput");
}
}
|
<filename>src/main/java/net/henbit/raytracing/life/hittable/Hittable.java
package net.henbit.raytracing.life.hittable;
import net.henbit.raytracing.life.AABB;
import net.henbit.raytracing.life.HitRecord;
import net.henbit.raytracing.life.Ray;
import net.henbit.raytracing.life.Vector3;
public abstract class Hittable
{
public abstract boolean hit(final Ray ray, double tMin, double tMax, HitRecord hitRecord);
public abstract boolean boundingBox(double time0, double time1, AABB outputBox);
public double pdfValue(final Vector3 origin, final Vector3 v)
{
return 0.0;
}
public Vector3 random(final Vector3 origin)
{
return new Vector3(1, 0, 0);
}
}
|
<reponame>CrafterKina/JustEnoughItems
package mezz.jei.gui.ingredients;
import javax.annotation.Nullable;
import java.awt.Color;
import java.awt.Rectangle;
import java.util.Collection;
import java.util.List;
import com.google.common.base.Joiner;
import mezz.jei.Internal;
import mezz.jei.config.Config;
import mezz.jei.config.Constants;
import mezz.jei.gui.TooltipRenderer;
import mezz.jei.util.Translator;
import mezz.jei.util.color.ColorNamer;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.Gui;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.renderer.ItemModelMesher;
import net.minecraft.client.renderer.RenderItem;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.client.renderer.block.model.ItemCameraTransforms;
import net.minecraft.client.renderer.texture.TextureManager;
import net.minecraft.client.renderer.texture.TextureMap;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.TextFormatting;
import net.minecraftforge.client.ForgeHooksClient;
public class GuiItemStackFast {
private static final ResourceLocation RES_ITEM_GLINT = new ResourceLocation("textures/misc/enchanted_item_glint.png");
private static final int blacklistItemColor = Color.yellow.getRGB();
private static final int blacklistWildColor = Color.red.getRGB();
private static final int blacklistModColor = Color.blue.getRGB();
private final Rectangle area;
private final int padding;
private final ItemModelMesher itemModelMesher;
@Nullable
private ItemStack itemStack;
public GuiItemStackFast(int xPosition, int yPosition, int padding) {
this.padding = padding;
final int size = 16 + (2 * padding);
this.area = new Rectangle(xPosition, yPosition, size, size);
this.itemModelMesher = Minecraft.getMinecraft().getRenderItem().getItemModelMesher();
}
public Rectangle getArea() {
return area;
}
public void setItemStack(ItemStack itemStack) {
this.itemStack = itemStack;
}
@Nullable
public ItemStack getItemStack() {
return itemStack;
}
public void clear() {
this.itemStack = null;
}
public boolean isMouseOver(int mouseX, int mouseY) {
return (itemStack != null) && area.contains(mouseX, mouseY);
}
public void renderItemAndEffectIntoGUI() {
if (itemStack == null) {
return;
}
IBakedModel bakedModel = itemModelMesher.getItemModel(itemStack);
bakedModel = bakedModel.getOverrides().handleItemState(bakedModel, itemStack, null, null);
if (Config.isEditModeEnabled()) {
renderEditMode();
GlStateManager.enableBlend();
}
GlStateManager.pushMatrix();
{
GlStateManager.translate(area.x + padding + 8.0f, area.y + padding + 8.0f, 150.0F);
GlStateManager.scale(16F, -16F, 16F);
bakedModel = ForgeHooksClient.handleCameraTransforms(bakedModel, ItemCameraTransforms.TransformType.GUI, false);
GlStateManager.translate(-0.5F, -0.5F, -0.5F);
Minecraft minecraft = Minecraft.getMinecraft();
RenderItem renderItem = minecraft.getRenderItem();
renderItem.renderModel(bakedModel, itemStack);
if (itemStack.hasEffect()) {
renderEffect(bakedModel);
}
}
GlStateManager.popMatrix();
}
private void renderEffect(IBakedModel model) {
Minecraft minecraft = Minecraft.getMinecraft();
TextureManager textureManager = minecraft.getTextureManager();
RenderItem renderItem = minecraft.getRenderItem();
GlStateManager.depthMask(false);
GlStateManager.depthFunc(514);
GlStateManager.blendFunc(768, 1);
textureManager.bindTexture(RES_ITEM_GLINT);
GlStateManager.matrixMode(5890);
GlStateManager.pushMatrix();
GlStateManager.scale(8.0F, 8.0F, 8.0F);
float f = (float) (Minecraft.getSystemTime() % 3000L) / 3000.0F / 8.0F;
GlStateManager.translate(f, 0.0F, 0.0F);
GlStateManager.rotate(-50.0F, 0.0F, 0.0F, 1.0F);
renderItem.renderModel(model, -8372020);
GlStateManager.popMatrix();
GlStateManager.pushMatrix();
GlStateManager.scale(8.0F, 8.0F, 8.0F);
float f1 = (float) (Minecraft.getSystemTime() % 4873L) / 4873.0F / 8.0F;
GlStateManager.translate(-f1, 0.0F, 0.0F);
GlStateManager.rotate(10.0F, 0.0F, 0.0F, 1.0F);
renderItem.renderModel(model, -8372020);
GlStateManager.popMatrix();
GlStateManager.matrixMode(5888);
GlStateManager.blendFunc(770, 771);
GlStateManager.depthFunc(515);
GlStateManager.depthMask(true);
textureManager.bindTexture(TextureMap.LOCATION_BLOCKS_TEXTURE);
}
public void renderSlow() {
if (Config.isEditModeEnabled()) {
renderEditMode();
}
Minecraft minecraft = Minecraft.getMinecraft();
RenderItem renderItem = minecraft.getRenderItem();
renderItem.renderItemAndEffectIntoGUI(null, itemStack, area.x + padding, area.y + padding);
GlStateManager.disableBlend();
}
public void renderOverlay(Minecraft minecraft) {
if (itemStack == null) {
return;
}
FontRenderer font = getFontRenderer(minecraft, itemStack);
RenderItem renderItem = minecraft.getRenderItem();
renderItem.renderItemOverlayIntoGUI(font, itemStack, area.x + padding, area.y + padding, null);
}
private void renderEditMode() {
if (itemStack == null) {
return;
}
if (Config.isItemOnConfigBlacklist(itemStack, Config.ItemBlacklistType.ITEM)) {
GuiScreen.drawRect(area.x + padding, area.y + padding, area.x + 8 + padding, area.y + 16 + padding, blacklistItemColor);
GlStateManager.color(1f, 1f, 1f, 1f);
}
if (Config.isItemOnConfigBlacklist(itemStack, Config.ItemBlacklistType.WILDCARD)) {
GuiScreen.drawRect(area.x + 8 + padding, area.y + padding, area.x + 16 + padding, area.y + 16 + padding, blacklistWildColor);
GlStateManager.color(1f, 1f, 1f, 1f);
}
if (Config.isItemOnConfigBlacklist(itemStack, Config.ItemBlacklistType.MOD_ID)) {
GuiScreen.drawRect(area.x + padding, area.y + 8 + padding, area.x + 16 + padding, area.y + 16 + padding, blacklistModColor);
GlStateManager.color(1f, 1f, 1f, 1f);
}
}
public static FontRenderer getFontRenderer(Minecraft minecraft, ItemStack itemStack) {
Item item = itemStack.getItem();
FontRenderer fontRenderer = item.getFontRenderer(itemStack);
if (fontRenderer == null) {
fontRenderer = minecraft.fontRendererObj;
}
return fontRenderer;
}
public void drawHovered(Minecraft minecraft) {
if (itemStack == null) {
return;
}
renderSlow();
renderOverlay(minecraft);
drawHighlight();
}
public void drawHighlight() {
if (itemStack == null) {
return;
}
GlStateManager.disableDepth();
Gui.drawRect(area.x, area.y, area.x + area.width, area.y + area.height, 0x7FFFFFFF);
GlStateManager.color(1f, 1f, 1f, 1f);
GlStateManager.enableDepth();
}
public void drawTooltip(Minecraft minecraft, int mouseX, int mouseY) {
if (itemStack == null) {
return;
}
List<String> tooltip = getTooltip(minecraft, itemStack);
FontRenderer fontRenderer = getFontRenderer(minecraft, itemStack);
TooltipRenderer.drawHoveringText(itemStack, minecraft, tooltip, mouseX, mouseY, fontRenderer);
}
private static List<String> getTooltip(Minecraft minecraft, ItemStack itemStack) {
List<String> list = itemStack.getTooltip(minecraft.thePlayer, minecraft.gameSettings.advancedItemTooltips);
for (int k = 0; k < list.size(); ++k) {
if (k == 0) {
list.set(k, itemStack.getRarity().rarityColor + list.get(k));
} else {
list.set(k, TextFormatting.GRAY + list.get(k));
}
}
int maxWidth = Constants.MAX_TOOLTIP_WIDTH;
for (String tooltipLine : list) {
int width = minecraft.fontRendererObj.getStringWidth(tooltipLine);
if (width > maxWidth) {
maxWidth = width;
}
}
if (Config.isColorSearchEnabled()) {
ColorNamer colorNamer = Internal.getColorNamer();
if (colorNamer != null) {
Collection<String> colorNames = colorNamer.getColorNames(itemStack);
if (!colorNames.isEmpty()) {
String colorNamesString = Joiner.on(", ").join(colorNames);
String colorNamesLocalizedString = TextFormatting.GRAY + Translator.translateToLocalFormatted("jei.tooltip.item.colors", colorNamesString);
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(colorNamesLocalizedString, maxWidth));
}
}
}
if (Config.isEditModeEnabled()) {
list.add("");
list.add(TextFormatting.ITALIC + Translator.translateToLocal("gui.jei.editMode.description"));
if (Config.isItemOnConfigBlacklist(itemStack, Config.ItemBlacklistType.ITEM)) {
String description = TextFormatting.YELLOW + Translator.translateToLocal("gui.jei.editMode.description.show");
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(description, maxWidth));
} else {
String description = TextFormatting.YELLOW + Translator.translateToLocal("gui.jei.editMode.description.hide");
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(description, maxWidth));
}
if (Config.isItemOnConfigBlacklist(itemStack, Config.ItemBlacklistType.WILDCARD)) {
String description = TextFormatting.RED + Translator.translateToLocal("gui.jei.editMode.description.show.wild");
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(description, maxWidth));
} else {
String description = TextFormatting.RED + Translator.translateToLocal("gui.jei.editMode.description.hide.wild");
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(description, maxWidth));
}
if (Config.isItemOnConfigBlacklist(itemStack, Config.ItemBlacklistType.MOD_ID)) {
String description = TextFormatting.BLUE + Translator.translateToLocal("gui.jei.editMode.description.show.mod.id");
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(description, maxWidth));
} else {
String description = TextFormatting.BLUE + Translator.translateToLocal("gui.jei.editMode.description.hide.mod.id");
list.addAll(minecraft.fontRendererObj.listFormattedStringToWidth(description, maxWidth));
}
}
return list;
}
}
|
package ormx;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;
public class OrmIterator<T> implements Iterable<T>, Iterator<T>, AutoCloseable {
final OrmObjectAdapter<T> adapter;
final ResultSet resultSet;
private boolean next;
public OrmIterator(OrmObjectAdapter<T> adapter, ResultSet resultSet) {
this.adapter = adapter;
this.resultSet = resultSet;
}
@Override public Iterator<T> iterator() {
return this;
}
@Override public boolean hasNext() {
try {
return next = resultSet.next();
} catch (SQLException e) {
close();
throw new RuntimeException(e.getMessage(), e);
} finally {
if (!next) close();
}
}
@Override public T next() {
try {
return adapter.resultSetToEntityOrThrow(resultSet);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override public void remove() {
}
@Override public void close() {
OrmUtils.close(resultSet);
}
}
|
def get_first_category(categories: list) -> str:
return categories[0]["cat_name"] |
import { isMaybeEmail, isMaybePhone } from '@authenticator/identity/validators';
describe('validators Test', (): void => {
test('checks for possible email', (): void => {
const tableTest = [
{
email: '<EMAIL>',
result: true,
},
{
email: '<EMAIL>',
result: false,
},
{
email: '+1555555555',
result: false,
},
{
email: '1234567789',
result: false,
},
];
tableTest.forEach((tc): void => {
expect(isMaybeEmail(tc.email)).toBe(tc.result);
});
});
test('checks for possible phone', (): void => {
const tableTest = [
{
phone: '+1555555555',
result: true,
},
{
phone: '1e10',
result: false,
},
{
phone: '1 (555) 555-5555',
result: true,
},
{
phone: '+1555ABC',
result: false,
},
];
tableTest.forEach((tc): void => {
expect(isMaybePhone(tc.phone)).toBe(tc.result);
});
});
});
|
#!/usr/bin/env bash
cd $(dirname $0)
rm -fr opera*.datadir
rm *.log
rm -f ./transactions.rlp |
<reponame>vinci-project/goVncPVM<filename>goVncTCP/server.go
package tcpServer
import (
"encoding/hex"
"encoding/json"
"goVncPVM/goVncTCP/client"
"goVncPVM/goVncTCP/tools"
"goVncPVM/helpers"
"log"
"net"
"runtime"
"time"
"github.com/go-redis/redis"
"github.com/tidwall/evio"
)
var redisDB *redis.Client
var clients map[tools.Node]*client.Client
var serverConnections map[string]*tools.ServerConnection
var readChannel chan string
var errorChannel chan tools.Node
var localAddresses []string
var tranChannel *chan string
var myNodeType tools.NodeType
var myPublicKey string
var myPrivateKey []byte
var myIp string
func getActiveNodes() (nodes []tools.Node, err error) {
//
stringCmd := redisDB.Get(tools.NodesTable)
if helpers.IsRedisError(stringCmd) {
//
err = stringCmd.Err()
return
}
var nodesList tools.NodesList
err = json.Unmarshal([]byte(stringCmd.Val()), &nodesList)
if err != nil {
//
log.Println("Can't marshal bytes. ", err)
return
}
for _, node := range nodesList.NLIST {
//
if net.ParseIP(node.ADDRESS) == nil {
//
log.Println("Can't parse IP. ")
continue
}
if len(node.PUBLICKEY) != 66 {
//
log.Println("Wrong publickey.")
continue
}
if tools.StringInSlice(node.ADDRESS, localAddresses) {
//
log.Println("Our local IP.")
if node.PUBLICKEY == myPublicKey {
//
myNodeType = node.TYPE
} else {
//
log.Println("it's not my public key!!!")
//panic("it's not my public key!!!")
}
continue
}
if node.PUBLICKEY == myPublicKey {
// if we start go in docker, we can't rely on IP's
log.Println("I've found myself! PK: ", myPublicKey)
continue
}
nodes = append(nodes, node)
}
return
}
func handleConnection(c evio.Conn, in []byte) (out []byte, action evio.Action) {
//
if myNodeType == tools.Stem {
//
return
}
ip, _, _ := net.SplitHostPort(c.RemoteAddr().String())
data := string(in)
if node, ok := serverConnections[ip]; ok {
//
if node.NodeData.TYPE == tools.Stem {
// We do not work with stem node
return
}
tranType := helpers.GetRawTransactionType(data)
if tranType == "ST" {
//
if node.HelloReceived {
//
readChannel <- data
}
} else if tranType == "HL" {
//
readChannel <- data
}
} else {
//
log.Println("Sorry, I don't know you, ", ip)
}
return
}
func createConnectionsWithNodes() (delay time.Duration, action evio.Action) {
//
log.Println("tick", time.Now().Unix())
delay = 10 * time.Second
nodes, _ := getActiveNodes()
log.Println("NODES FROM SERVER: ", nodes)
for node, client := range clients {
//
if !tools.NodeInNodes(node, nodes) {
//
log.Println("We don't need this node anymore")
client.CloseConnection()
delete(clients, node)
}
}
for ip, connection := range serverConnections {
//
if !tools.NodeInNodes(connection.NodeData, nodes) {
//
log.Println("We don't need this connection anymore")
delete(serverConnections, ip)
}
}
for _, node := range nodes {
//
if _, ok := clients[node]; ok {
//
log.Println("Such node already exists")
continue
}
if _, ok := serverConnections[node.ADDRESS]; !ok {
//
serverConnection := new(tools.ServerConnection)
serverConnection.CopyNode(&node)
serverConnections[node.ADDRESS] = serverConnection
serverConnection.HelloReceived = true
continue
}
if node.TYPE == tools.Stem {
// We do not work with STEM
continue
}
servAddr := net.JoinHostPort(node.ADDRESS, tools.NodePort)
conn, err := net.DialTimeout("tcp", servAddr, 3*time.Second)
if err != nil {
//
log.Println("Dial failed:", err.Error())
continue
}
// log.Println(conn.LocalAddr(), conn.RemoteAddr(), err)
// err = conn.SetKeepAlive(true)
// if err != nil {
// //
//
// log.Println("Set keep alive failed: ", err.Error())
// continue
// }
//
// err = conn.SetKeepAlivePeriod(3 * time.Second)
// if err != nil {
// //
//
// log.Println("Set keep alive timeout failed: ", err.Error())
// continue
// }
log.Println("New connection")
newClient := client.NewClient(node, conn, &errorChannel)
newClient.Start()
// Send Hello transaction with signature
if helloTransaction, ok := helpers.CreateHelloTransaction(myPublicKey, myPrivateKey, myIp); ok {
//
newClient.Write(helloTransaction)
clients[node] = newClient
} else {
//
newClient.CloseConnection()
}
}
return
}
func closeConnections() {
//
for _, client := range clients {
//
client.CloseConnection()
}
}
func startServer() (err error) {
//
var events evio.Events
events.NumLoops = runtime.NumCPU()
events.Data = handleConnection
events.Tick = createConnectionsWithNodes
return evio.Serve(events, "tcp://"+net.JoinHostPort("0.0.0.0", tools.NodePort))
}
func readWorker() {
// We get data, checking it validity, signature and resending to everyone
// Also, here we chek for client errors
for {
//
select {
case data := <-readChannel:
//
log.Println("NEW DATA")
tranType := helpers.GetRawTransactionType(data)
if tranType == "HL" {
// HELLO transaction
if helloTransaction, err := helpers.ParseHelloTransaction(data); err == nil {
//
if ok := helpers.VerifyHelloTransaction(helloTransaction); !ok {
//
if serverConnection, ok := serverConnections[helloTransaction.ADDRESS]; ok {
//
serverConnection.HelloReceived = true
serverConnections[helloTransaction.ADDRESS] = serverConnection
}
}
}
} else if tranType == "ST" {
// Simple transaction
if simpleTransaction, err := helpers.ParseSimpleTransaction(data); err == nil {
//
if transactionTime, _, ok := helpers.VerifySimpleTransaction(simpleTransaction); ok {
//
log.Println("RECEIVED FROM TCP SERVER: ", data)
errRedis := redisDB.ZAdd("RAW TRANSACTIONS", redis.Z{
Score: float64(transactionTime),
Member: data,
})
if helpers.IsRedisError(errRedis) {
//
log.Println(errRedis.Err())
}
}
}
} else if tranType == "AT" {
// Applicant transaction
if simpleTransaction, err := helpers.ParseApplicantTransaction(data); err == nil {
//
if transactionTime, _, ok := helpers.VerifyApplicantTransaction(simpleTransaction); ok {
//
log.Println("RECEIVED FROM TCP SERVER: ", data)
errRedis := redisDB.ZAdd("RAW TRANSACTIONS", redis.Z{
Score: float64(transactionTime),
Member: data,
})
if helpers.IsRedisError(errRedis) {
//
log.Println(errRedis.Err())
}
}
}
} else if tranType == "VT" {
// Vote transaction
if simpleTransaction, err := helpers.ParseVoteTransaction(data); err == nil {
//
if transactionTime, _, ok := helpers.VerifyVoteTransaction(simpleTransaction); ok {
//
log.Println("RECEIVED FROM TCP SERVER: ", data)
errRedis := redisDB.ZAdd("RAW TRANSACTIONS", redis.Z{
Score: float64(transactionTime),
Member: data,
})
if helpers.IsRedisError(errRedis) {
//
log.Println(errRedis.Err())
}
}
}
} else if tranType == "UAT" {
// Unregister Apllicant transaction
if simpleTransaction, err := helpers.ParseUATransaction(data); err == nil {
//
if transactionTime, _, ok := helpers.VerifyUATransaction(simpleTransaction); ok {
//
log.Println("RECEIVED FROM TCP SERVER: ", data)
errRedis := redisDB.ZAdd("RAW TRANSACTIONS", redis.Z{
Score: float64(transactionTime),
Member: data,
})
if helpers.IsRedisError(errRedis) {
//
log.Println(errRedis.Err())
}
}
}
} else if tranType == "UVT" {
// Unregister Vote transaction
if simpleTransaction, err := helpers.ParseUVTransaction(data); err == nil {
//
if transactionTime, _, ok := helpers.VerifyUVTransaction(simpleTransaction); ok {
//
log.Println("RECEIVED FROM TCP SERVER: ", data)
errRedis := redisDB.ZAdd("RAW TRANSACTIONS", redis.Z{
Score: float64(transactionTime),
Member: data,
})
if helpers.IsRedisError(errRedis) {
//
log.Println(errRedis.Err())
}
}
}
}
case node := <-errorChannel:
//
log.Println("ERROR: ", node.ADDRESS)
delete(clients, node)
case tran := <-*tranChannel:
//
for _, client := range clients {
//
log.Println("RECEIVED FROM REST: ", tran)
client.Write(tran)
}
}
}
}
func Start(r *redis.Client, c *chan string, privateKey []byte, ip string) {
//
redisDB = r
tranChannel = c
myPrivateKey = privateKey
myPublicKey = hex.EncodeToString(helpers.PubkeyFromSeckey(myPrivateKey))
myIp = ip
clients = make(map[tools.Node]*client.Client)
serverConnections = make(map[string]*tools.ServerConnection)
readChannel = make(chan string, 1024)
errorChannel = make(chan tools.Node)
localAddresses = tools.GetLocalIps()
go readWorker()
if err := startServer(); err != nil {
//
panic(err.Error())
}
close(readChannel)
close(errorChannel)
}
|
import pygame
def draw_split_rect_horizontal(display_surf, split_color, margin, height, m, window_shape):
rect_y = (margin + height) * m
rect = pygame.draw.rect(display_surf, split_color, [0, rect_y, window_shape[0], height]) |
#!/bin/sh
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This disables LuaJIT for now, to avoid all the warnings from it. Maybe we need
# to talk to the author of it, or ideally, figure out how to get clang-analyzer to
# ignore them ?
# Where are our LLVM tools?
LLVM_BASE=${LLVM:-/opt/llvm}
NPROCS=${NPROCS:-$(getconf _NPROCESSORS_ONLN)}
NOCLEAN=${NOCLEAN:-}
OUTPUT_BASE=${OUTPUT_BASE:-/home/jenkins/clang-analyzer}
# Options
options="--status-bugs --keep-empty"
configure="--enable-experimental-plugins"
# Additional checkers
# Phil says these are all FP's: -enable-checker alpha.security.ArrayBoundV2
checkers="-enable-checker alpha.unix.cstring.BufferOverlap \
-enable-checker alpha.unix.PthreadLock\
-enable-checker alpha.core.BoolAssignment \
-enable-checker alpha.core.CastSize \
-enable-checker alpha.core.SizeofPtr"
# These shenanigans are here to allow it to run both manually, and via Jenkins
test -z "${ATS_MAKE}" && ATS_MAKE="make"
test ! -z "${WORKSPACE}" && cd "${WORKSPACE}/src"
# Check to see if this is a Github PR build (so not a github branch per-se)
test "${JOB_NAME#*-github}" != "${JOB_NAME}" && ATS_BRANCH="github"
# Where to store the results, special case for the CI
output="/tmp"
# Find a Jenkins output tree if possible
if [ "${JOB_NAME#*-github}" != "${JOB_NAME}" ]; then
# This is a Github PR build, override the branch name accordingly
ATS_BRANCH="github"
if [ -w "${OUTPUT_BASE}/${ATS_BRANCH}" ]; then
output="${OUTPUT_BASE}/${ATS_BRANCH}/${ghprbPullId}"
[ ! -d "${output}"] && mkdir "${output}"
fi
github_pr=" PR #${ghprbPullId}"
results_url="https://ci.trafficserver.apache.org/clang-analyzer/${ATS_BRANCH}/${ghprbPullId}/"
else
test -w "${OUTPUT_BASE}/${ATS_BRANCH}" && output="${OUTPUT_BASE}/${ATS_BRANCH}"
github_pr=""
results_url="https://ci.trafficserver.apache.org/clang-analyzer/${ATS_BRANCH}/"
fi
# Tell scan-build to use clang as the underlying compiler to actually build
# source. If you don't do this, it will default to GCC.
export CCC_CC=${LLVM_BASE}/bin/clang
export CCC_CXX=${LLVM_BASE}/bin/clang++
# This can be used to override any of those settings above
[ -f ./.clang-analyzer ] && source ./.clang-analyzer
# Start the build / scan
[ "$output" != "/tmp" ] && echo "Results (if any) can be found at ${results_url}"
autoreconf -fi
${LLVM_BASE}/bin/scan-build ./configure ${configure} \
CXXFLAGS="-stdlib=libc++ -I/opt/llvm/include/c++/v1 -std=c++17" \
LDFLAGS="-L/opt/llvm/lib64 -Wl,-rpath=/opt/llvm/lib64"
# Since we don't want the analyzer to look at LuaJIT, build it first
# without scan-build. The subsequent make will then skip it.
${ATS_MAKE} -j $NPROCS -C lib all-local V=1 Q=
${LLVM_BASE}/bin/scan-build ${checkers} ${options} -o ${output} \
--html-title="clang-analyzer: ${ATS_BRANCH}${github_pr}"\
${ATS_MAKE} -j $NPROCS V=1 Q=
status=$?
# Clean the work area unless NOCLEAN is set. This is jsut for debugging when you
# need to see what the generated build did.
if [ ! -z "$NOCLEAN" ]; then
${ATS_MAKE} distclean
fi
[ "$output" != "/tmp" ] && echo "Results (if any) can be found at ${results_url}"
# Cleanup old reports, for main clang and github as well (if the local helper script is available)
if [ -x "/admin/bin/clean-clang.sh" ]; then
/admin/bin/clean-clang.sh
fi
# Exit with the scan-build exit code (thanks to --status-bugs)
exit $status
|
class DatabaseError extends RuntimeError {
constructor(message, errorCode) {
super(message);
this.errorCode = errorCode;
}
getErrorCode() {
return this.errorCode;
}
}
// Create an instance of DatabaseError and demonstrate its usage
try {
throw new DatabaseError('Connection failed', 500);
} catch (error) {
console.log(error.name); // Output: DatabaseError
console.log(error.message); // Output: Connection failed
console.log(error.getErrorCode()); // Output: 500
} |
import reducer from './reducers'
export { default as accountsOperations } from './operations'
export { default as accountsTypes } from './types'
export default reducer
|
<reponame>hallyn/lxd
/*
* An example of how to use lxd's golang /dev/lxd client. This is intended to
* be run from inside a container.
*/
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"net"
"net/http"
"os"
)
type DevLxdDialer struct {
Path string
}
func (d DevLxdDialer) DevLxdDial(network, path string) (net.Conn, error) {
addr, err := net.ResolveUnixAddr("unix", d.Path)
if err != nil {
return nil, err
}
conn, err := net.DialUnix("unix", nil, addr)
if err != nil {
return nil, err
}
return conn, err
}
var DevLxdTransport = &http.Transport{
Dial: DevLxdDialer{"/dev/lxd/sock"}.DevLxdDial,
}
func main() {
c := http.Client{Transport: DevLxdTransport}
raw, err := c.Get("http://meshuggah-rocks/")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if raw.StatusCode != http.StatusOK {
fmt.Println("http error", raw.StatusCode)
result, err := ioutil.ReadAll(raw.Body)
if err == nil {
fmt.Println(string(result))
}
os.Exit(1)
}
result := []string{}
if err := json.NewDecoder(raw.Body).Decode(&result); err != nil {
fmt.Println("err decoding response", err)
os.Exit(1)
}
if result[0] != "/1.0" {
fmt.Println("unknown response", result)
os.Exit(1)
}
if len(os.Args) > 1 {
raw, err := c.Get(fmt.Sprintf("http://meshuggah-rocks/1.0/config/%s", os.Args[1]))
if err != nil {
fmt.Println(err)
os.Exit(1)
}
key := ""
if err := json.NewDecoder(raw.Body).Decode(&key); err != nil {
fmt.Println("err decoding response", err)
os.Exit(1)
}
fmt.Println(key)
} else {
fmt.Println("/dev/lxd ok")
}
}
|
#!/bin/bash
####
# Original source of the following script
# Source : https://github.com/big-data-europe/docker-hadoop/blob/master/datanode/run.sh
####
datadir=`echo $HDFS_CONF_dfs_datanode_data_dir | perl -pe 's#file://##'`
if [ ! -d $datadir ]; then
echo "Datanode data directory not found: $datadir"
exit 2
fi
$HADOOP_PREFIX/bin/hdfs --config $HADOOP_CONF_DIR datanode
|
package controllers;
import models.StateModel.SkillTreeModel;
import models.StateModel.StatsModel;
import utilities.GameStateManager;
import utilities.KeyCommand.KeyCommand;
import utilities.State.State;
import views.StatsView;
import views.View;
import java.awt.event.KeyEvent;
public class SkillTreeViewController extends Controller {
private SkillTreeModel model;
//constructor of the mainMenuController
public SkillTreeViewController(SkillTreeModel model, GameStateManager gsm){
super(gsm);
this.model = model;
}
@Override
public void loadKeyCommand() {
keyMap.put(KeyEvent.VK_UP, new KeyCommand(){
@Override
public void execute() {
model.up();
}
});
keyMap.put(KeyEvent.VK_DOWN, new KeyCommand() {
@Override
public void execute() {
model.down();
}
});
keyMap.put(KeyEvent.VK_ENTER, new KeyCommand() {
@Override
public void execute() {
model.levelUp();
}
});
keyMap.put(KeyEvent.VK_SHIFT, new KeyCommand() {
@Override
public void execute() {
model.setActive();
model.initLists();
}
});
keyMap.put(KeyEvent.VK_T, new KeyCommand(){
@Override
public void execute() {
statsStateTransition();
}
});
}
private void statsStateTransition() {
gsm.removeState();
View view = gsm.getCurrentView();
Controller controller = gsm.getCurrentController();
State state = new State(view, controller);
gsm.changeState(state);
}
@Override
public void updateModel() {
}
}
|
#!/bin/bash
eww close powertitle
systemctl suspend
|
List<int> myList = new List<int>() {0, 5, 3, 2, 1, 4};
void SortListDescending(List<int> listToSort)
{
listToSort.Sort((a,b) => b.CompareTo(a));
}
SortListDescending(myList);
// After calling the SortListDescending() method,
// myList will be sorted in descending order: {5, 4, 3, 2, 1, 0} |
<reponame>samredway/cape-webservices
# Copyright 2018 BLEMUNDSBURY AI LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from package_settings import NAME, VERSION, PACKAGES, DESCRIPTION
from setuptools import setup
from pathlib import Path
import json
import urllib.request
from functools import lru_cache
@lru_cache(maxsize=50)
def _get_github_sha(github_install_url: str):
"""From the github_install_url get the hash of the latest commit"""
repository = Path(github_install_url).stem.split('#egg', 1)[0]
organisation = Path(github_install_url).parent.stem
with urllib.request.urlopen(f'https://api.github.com/repos/{organisation}/{repository}/commits/master') as response:
return json.loads(response.read())['sha']
setup(
name=NAME,
version=VERSION,
long_description=DESCRIPTION,
author='<NAME>',
author_email='<EMAIL>',
packages=PACKAGES,
include_package_data=True,
install_requires=[
'Authomatic==0.1.0.post1',
'beautifulsoup4==4.6.0',
'markdown==2.6.11',
'peewee==3.5.2',
'pytest==3.6.4',
'requests==2.18.1',
'sanic==0.6.0',
'numexpr==2.6.5.dev0',
'cape.client==0.2.0',
'cape_userdb==' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-userdb#egg=cape_userdb'),
'cape_api_helpers==' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-api-helpers#egg=cape_api_helpers'),
'cape_responder==' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-responder#egg=cape_responder'),
'cape_document_manager==' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-document-manager#egg=cape_document_manager'),
],
dependency_links=[
'git+https://github.com/pydata/numexpr@cfeae8ae246e95f23613e8b587746ed788b81f35#egg=numexpr-2.6.5.dev0',
'git+https://github.com/bloomsburyai/cape-userdb#egg=cape_userdb-' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-userdb#egg=cape_userdb'),
'git+https://github.com/bloomsburyai/cape-api-helpers#egg=cape_api_helpers-' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-api-helpers#egg=cape_api_helpers'),
'git+https://github.com/bloomsburyai/cape-responder#egg=cape_responder-' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-responder#egg=cape_responder'),
'git+https://github.com/bloomsburyai/cape-document-manager#egg=cape_document_manager-' + _get_github_sha(
'git+https://github.com/bloomsburyai/cape-document-manager#egg=cape_document_manager'),
],
package_data={
'': ['*.*'],
},
)
|
"use strict";
const app = require("./express/server");
const { logger } = require("./express/utils/logger");
const { PORT } = require("./express/_constants");
const database = require("./express/db-config");
app.listen(PORT, () => {
database().then(() => logger.info(`connected to mongodb database`));
logger.info(`server running on port:${PORT}`);
});
|
<reponame>jonkumin/GV-Production<filename>src/cache.ts
import { CustomCacheKey } from '@xdn/core/router'
const ONE_HOUR = 60 * 60
const ONE_DAY = 24 * ONE_HOUR
const queryParametersToExclude = [
'utm_medium',
'utm_campaign',
'utm_source',
'utm_content',
'cjevent',
'_hsenc',
'_hsmi',
'hsCtaTracking',
'fbclid',
'ref',
'token',
'afsrc',
'sid',
'rewardsmgr',
'sref_id',
'334',
'_escaped_fragment_',
'yoReviewsPage',
];
/**
* The default cache setting for pages in the shopping flow
*/
export const CACHE_PAGES = {
edge: {
maxAgeSeconds: ONE_DAY,
staleWhileRevalidateSeconds: ONE_HOUR,
},
browser: {
maxAgeSeconds: 0,
serviceWorkerSeconds: ONE_DAY,
},
key: new CustomCacheKey().excludeQueryParameters(...queryParametersToExclude),
}
/**
* The default cache setting for static assets like JS, CSS, and images.
*/
export const CACHE_ASSETS = {
edge: {
maxAgeSeconds: ONE_DAY,
},
browser: {
maxAgeSeconds: 0,
serviceWorkerSeconds: ONE_DAY,
},
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2881-1
#
# Security announcement date: 2016-01-26 00:00:00 UTC
# Script generation date: 2017-01-19 21:07:03 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - mysql-server-5.5:5.5.47-0ubuntu0.12.04.1
#
# Last versions recommanded by security team:
# - mysql-server-5.5:5.5.54-0ubuntu0.12.04.1
#
# CVE List:
# - CVE-2016-0503
# - CVE-2016-0504
# - CVE-2016-0505
# - CVE-2016-0546
# - CVE-2016-0595
# - CVE-2016-0596
# - CVE-2016-0597
# - CVE-2016-0598
# - CVE-2016-0600
# - CVE-2016-0606
# - CVE-2016-0607
# - CVE-2016-0608
# - CVE-2016-0609
# - CVE-2016-0610
# - CVE-2016-0611
# - CVE-2016-0616
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade mysql-server-5.5=5.5.54-0ubuntu0.12.04.1 -y
|
<gh_stars>1-10
package xos
import (
"io/ioutil"
"os"
"path/filepath"
"strings"
)
const (
MODE_DIR = 0
MODE_FILE = 1
)
// 返回下层所有文件或目录
func ListSubFiles(path string, mode int) ([]string, error) {
var r []string
d, err := ioutil.ReadDir(path)
if err != nil {
return r, err
}
for _, d := range d {
if mode == MODE_DIR {
if d.IsDir() {
r = append(r, d.Name())
}
}
if mode == MODE_FILE {
if !d.IsDir() {
r = append(r, d.Name())
}
}
}
return r, nil
}
// 返回符合特定后缀的文件 或者目录
func ListSubFilesRecur(path string, suffix string, mode int) (files []string, err error) {
files = make([]string, 0, 30)
err = filepath.Walk(path, func(filename string, fi os.FileInfo, err error) error {
if err != nil {
return nil
}
if mode == MODE_FILE {
if !fi.IsDir() && strings.HasSuffix(fi.Name(), suffix) {
files = append(files, filename)
}
return nil
} else if mode == MODE_DIR {
if fi.IsDir() {
files = append(files, filename)
}
}
return nil
})
return files, err
}
// 返回所有文件或目录 递归
func ListAllFilesRecur(path string, mode int) (files []string, err error) {
files = make([]string, 0, 30)
err = filepath.Walk(path, func(filename string, fi os.FileInfo, err error) error {
if err != nil {
return nil
}
if mode == MODE_FILE {
if !fi.IsDir() {
files = append(files, filename)
}
return nil
} else if mode == MODE_DIR {
if fi.IsDir() {
files = append(files, filename)
}
}
return nil
})
return files, err
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.