text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
/*
Copyright 2021 The loki-operator Authors.
Licensed under the Apache License, PROJECT_VERSION 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package lokioperator
const (
GroupName = "lokioperator.l0calh0st.cn"
)
|
<reponame>StediInc/spot
import { headers, Int64 } from "@stediinc/spot";
class HeadersClass {
headersMethod(
notHeaders: {
property: string;
},
@headers
headers: {
property: string;
/** property description */
"property-with-description": string;
optionalProperty?: Int64;
},
@headers
nonObjectHeaders: string,
@headers
headersWithIllegalPropertyName: {
"illegal-field-name-header%$": string;
},
@headers
headersWithEmptyPropertyName: {
"": string;
},
@headers
headersWithIllegalType: {
property: boolean;
},
@headers
optionalHeaders?: {
property: string;
}
) {}
}
|
../build/boxqp_test
../build/deriv_test
../build/double_int_test
../build/ilqr_deriv_test
../build/ilqr_forward_test
|
#!/bin/bash
start_client() {
./../bin/demo \
--data_path ./files.list_video \
--src_frame_rate 10 \
--wait_time 0 \
--rtsp=false \
--loop=false \
--config_fname "client.json" \
--alsologtostderr \
--perf_db_dir="./perf_client"
}
start_server() {
./../bin/demo \
--data_path ./files.list_video \
--src_frame_rate 10 \
--wait_time 0 \
--rtsp=false \
--loop=false \
--config_fname "server.json" \
--alsologtostderr \
--perf_db_dir="./perf_server"
}
source env.sh
mkdir -p output
start_client & start_server
wait
|
<reponame>Miciah/rhc<gh_stars>0
require 'coverage_helper'
require 'webmock/rspec'
require 'fakefs/safe'
require 'rbconfig'
require 'pry' if ENV['PRY']
# chmod isn't implemented in the released fakefs gem
# but is in git. Once the git version is released we
# should remove this and actively check permissions
class FakeFS::File
def self.chmod(*args)
# noop
end
# FakeFS incorrectly assigns this to '/'
remove_const(:PATH_SEPARATOR) rescue nil
const_set(:PATH_SEPARATOR, ":")
def self.executable?(path)
# if the file exists we will assume it is executable
# for testing purposes
self.exists?(path)
end
end
#include 'mocha'
require 'rhc/cli'
include WebMock::API
def stderr
$stderr.rewind
# some systems might redirect warnings to stderr
[$stderr,$terminal].map(&:read).delete_if{|x| x.strip.empty?}.join(' ')
end
module Commander::UI
alias :enable_paging_old :enable_paging
def enable_paging
end
end
module ClassSpecHelpers
include Commander::Delegates
def const_for(obj=nil)
if obj
Object.const_set(const_for, obj)
else
"#{description}".split(" ").map{|word| word.capitalize}.join.gsub(/[^\w]/, '')
end
end
def new_command_runner *args, &block
Commander::Runner.instance_variable_set :"@singleton", RHC::CommandRunner.new(args)
program :name, 'test'
program :version, '1.2.3'
program :description, 'something'
program :help_formatter, RHC::HelpFormatter
#create_test_command
yield if block
Commander::Runner.instance
end
#
#
#
def expects_running *args
mock_terminal
r = new_command_runner *args do
instance #ensure instance is created before subject :new is mocked
subject.should_receive(:new).any_number_of_times.and_return(instance)
RHC::Commands.to_commander
end
lambda { r.run! }
end
class MockHighLineTerminal < HighLine
def initialize(input, output)
super
@last_read_pos = 0
end
##
# read
#
# seeks to the last read in the IO stream and reads
# the data from that position so we don't repeat
# reads or get empty data due to writes moving
# the caret to the end of the stream
def read
@output.seek(@last_read_pos)
result = @output.read
@last_read_pos = @output.pos
result
end
##
# write_line
#
# writes a line of data to the end of the
# input stream appending a newline so
# highline knows to stop processing and then
# resets the caret position to the last read
def write_line(str)
reset_pos = @input.pos
# seek end so we don't overwrite anything
@input.seek(0, IO::SEEK_END)
result = @input.write "#{str}\n"
@input.seek(reset_pos)
result
end
end
def mock_terminal
@input = StringIO.new
@output = StringIO.new
$stderr = (@error = StringIO.new)
$terminal = MockHighLineTerminal.new @input, @output
end
def capture(&block)
old_stdout = $stdout
old_stderr = $stderr
old_terminal = $terminal
@input = StringIO.new
@output = StringIO.new
$stdout = @output
$stderr = (@error = StringIO.new)
$terminal = MockHighLineTerminal.new @input, @output
yield
@output.string
ensure
$stdout = old_stdout
$stderr = old_stderr
$terminal = old_terminal
end
def capture_all(&block)
old_stdout = $stdout
old_stderr = $stderr
old_terminal = $terminal
@input = StringIO.new
@output = StringIO.new
$stdout = @output
$stderr = @output
$terminal = MockHighLineTerminal.new @input, @output
yield
@output.string
ensure
$stdout = old_stdout
$stderr = old_stderr
$terminal = old_terminal
end
def run(input=[])
#Commander::Runner.instance_variable_set :"@singleton", nil
mock_terminal
input.each { |i| $terminal.write_line(i) }
#"#{@output.string}\n#{$stderr.string}"
RHC::CLI.start(arguments)
end
def run_output(input=[])
run(input)
rescue SystemExit => e
"#{@output.string}\n#{$stderr.string}#{e}"
else
"#{@output.string}\n#{$stderr.string}"
end
#
# usage: stub_request(...).with(&user_agent_header)
#
def user_agent_header
lambda do |request|
request.headers['User-Agent'] =~ %r{\Arhc/\d+\.\d+.\d+ \(.*?ruby.*?\)}
end
end
def base_config(&block)
config = RHC::Config.new
config.stub(:load_config_files)
defaults = config.instance_variable_get(:@defaults)
yield config, defaults if block_given?
RHC::Config.stub(:default).and_return(config)
end
def user_config
base_config do |config, defaults|
defaults.add 'default_rhlogin', 'test_user'
defaults.add 'password', '<PASSWORD>'
end
end
end
module ExitCodeMatchers
Spec::Matchers.define :exit_with_code do |code|
actual = nil
match do |block|
begin
actual = block.call
rescue SystemExit => e
actual = e.status
end
actual and actual == code
end
failure_message_for_should do |block|
"expected block to call exit(#{code}) but exit" +
(actual.nil? ? " not called" : "(#{actual}) was called")
end
failure_message_for_should_not do |block|
"expected block not to call exit(#{code})"
end
description do
"expect block to call exit(#{code})"
end
end
end
module CommanderInvocationMatchers
Spec::Matchers.define :call do |method|
chain :on do |object|
@object = object
end
chain :with do |args|
@args = args
end
match do |block|
e = @object.should_receive(method)
e.with(@args) if @args
begin
block.call
true
rescue SystemExit => e
false
end
end
description do
"expect block to invoke '#{method}' on #{@object} with #{@args}"
end
end
end
def mac?
RbConfig::CONFIG['host_os'] =~ /^darwin/
end
Spec::Runner.configure do |config|
config.include(ExitCodeMatchers)
config.include(CommanderInvocationMatchers)
config.include(ClassSpecHelpers)
end
|
package device
// TODO (NW): Write Tests
|
def construct_request_body(begin_date: str, end_date: str) -> dict:
return {
"beginDate": begin_date,
"endDate": end_date
} |
angular.module('app', ['ngRoute', 'angular.filter'])
// .config(($routeProvider) => { //route gives you the .when and .otherwise
//one way
// $routeProvider
// .when('/', {
// template: '<a href="#/hello">Hello</a><h1>Home Sweet Home</h1>',
// })
// .when('/hello', {
// template: '<a href="#/">Home</a><h1>{{header}}</h1>',
// controller: 'MainCtrl',
// })
// .when('/hello/:name', {
// template: '<a href="#/">Home</a><h1>{{header}}</h1>',
// controller: 'MainCtrl',
// })
// .otherwise('/')
// })
// .controller('MainCtrl', function ($scope, $routeParams) {
// // $scope.header = `Hello ${$routeParams.name}`
// // const { name } = $routeParams
// $scope.header = `Hello ${$routeParams.name || '<NAME>'}` //to finish the hello/
// })
//OR with a input and button
//Now skipped
// $routeProvider
// .when('/', {
// templateUrl: 'app/partials/home.html'
// })
// .when('/hello', {
// templateUrl: 'app/partials/hello.html',
// controller: 'HelloCtrl',
// })
// .when('/hello/:name', {
// templateUrl: 'app/partials/hello-person.html',
// controller: 'HelloPersonCtrl',
// })
// .otherwise('/')
// })
// .controller('HelloCtrl', function ($scope, $location) {
// $scope.hello = () => $location.path(`/hello/${$scope.name}`)
// })
// .controller('HelloPersonCtrl', function ($scope, $routeParams) {
// $scope.header = `Hello ${$routeParams.name}`
// })
// .filter('camelcase', function () {
// return function (string) {
// return string.tuUpperCase()
// }
// })
// // or
// .filter('camelcase', () => string => string.toUpperCase())
// })
//now skipped
// .filter('camelcase', () => (
// string => {
// const upperCamelCase = string
// .toLowerCase()
// .split(' ')
// .map(word => word[0].toUpperCase() + word.slice(1))
// .join('')
// return upperCamelCase[0].toLowerCase() + upperCamelCase.slice(1)
// }
// )
// )
|
#!/bin/bash
# Synology task scheduler has a problem running scripts under non-root user
if [ "$2" != "" ]; then
user_dir=`eval echo "~$2"`
if [ "${user_dir}" != "" ]; then
cd ${user_dir}
fi
fi
# -----------------------------------------------------------------------------
# Following variables could be changed
# -----------------------------------------------------------------------------
name="flibusta"
# -----------------------------------------------------------------------------
# Main body
# -----------------------------------------------------------------------------
cdate="$(date +%Y%m%d_%H%M%S)"
mydir=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd)
adir="$1/${name}"
odir="$1/inpx"
udir="$1/upd_${name}"
glog="${mydir}/${name}_merge_${cdate}.log"
exec 3>&1 4>&2
trap 'exec 2>&4 1>&3' 0 1 2 3 RETURN
exec 1>${glog} 2>&1
# Clean old database directories - we have at least one good download
find $1 -maxdepth 1 -type d -name "flibusta_*" | sort -nr | tail -n +6 | xargs -r -I {} rm -rf {}/
${mydir}/libmerge \
--verbose \
--keep-updates \
--destination "${adir};${udir}"
res=$?
if (( ${res} == 1 )); then
echo "LIBMERGE error!"
exit 1
fi
# Clean updates leaving last ones so libget2 would not download unnesessary updates next time
find ${udir} -type f | sort -nr | tail -n +11 | xargs -r -I {} rm -r {}
|
#include <iostream>
int main() {
int target_sum = 15;
int numbers[] = {2, 5, 7, 10};
int arraySize = sizeof(numbers) / sizeof(numbers[0]);
// Create an array to store the optimal indices
int optimalIndices[arraySize];
int leftIndex = 0;
int rightIndex = arraySize - 1;
// Iterate through the array, sum the elements at the left and right indices
while (leftIndex < rightIndex) {
int currentSum = numbers[leftIndex] + numbers[rightIndex];
if (currentSum < targetSum) {
leftIndex++;
} else if (currentSum > targetSum) {
rightIndex--;
} else {
optimalIndices[0] = leftIndex;
optimalIndices[1] = rightIndex;
break;
}
}
std::cout << "Optimal indices: [";
for (int i = 0; i < arraySize; i++) {
std::cout << optimalIndices[i] << " ";
}
std::cout << "]\n";
return 0;
} |
#!/bin/bash
usage() { echo "Usage: $0 [-c <channelname>] -n [chaincodename]" 1>&2; exit 1; }
while getopts ":c:n:" o; do
case "${o}" in
c)
c=${OPTARG}
;;
n)
n=${OPTARG}
;;
*)
usage
;;
esac
done
shift $((OPTIND-1))
if [ -z "${c}" ] || [ -z "${n}" ] ; then
usage
fi
echo "create channel channelID ${c} chaincodeName ${n} "
DATA=/home/ubuntu/hyperledgerconfig/data
export FABRIC_CFG_PATH=$DATA/
PEER_ORGS=("org1" "org2" "org3" "org4" "org5")
NUM_PEERS=5
CHANNEL_NAME=${c}
CHANNEL_TX_FILE=$DATA/$CHANNEL_NAME.tx
CA_CHAINFILE=${DATA}/org0-ca-cert.pem
ORDERER_HOST=orderer0.org0.deevo.com
export ORDERER_PORT_ARGS=" -o orderer0.org0.deevo.com:7050 --tls --cafile $CA_CHAINFILE --clientauth"
QUERY_TIMEOUT=30
# install chaincode on peer0-org1, peer0-org2
for ORG in ${PEER_ORGS[*]}; do
#initPeerVars $ORG 1
PEER_HOST=peer0.${ORG}.deevo.com
PEER_NAME=${PEER_HOST}
ORG_ADMIN_HOME=$DATA/orgs/$ORG/admin
CA_CHAINFILE=${DATA}/${ORG}-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=$ORG_ADMIN_HOME
export FABRIC_CA_CLIENT_TLS_CERTFILES=$CA_CHAINFILE
export CORE_PEER_MSPCONFIGPATH=$ORG_ADMIN_HOME/msp
export CORE_PEER_ID=$PEER_HOST
export CORE_PEER_ADDRESS=$PEER_HOST:7051
export CORE_PEER_LOCALMSPID=${ORG}MSP
export CORE_LOGGING_LEVEL=DEBUG
export CORE_PEER_TLS_ENABLED=true
export CORE_PEER_TLS_CLIENTAUTHREQUIRED=true
export CORE_PEER_TLS_ROOTCERT_FILE=$CA_CHAINFILE
export CORE_PEER_TLS_CLIENTCERT_FILE=$DATA/tls/$PEER_NAME-cli-client.crt
export CORE_PEER_TLS_CLIENTKEY_FILE=$DATA/tls/$PEER_NAME-cli-client.key
export CORE_PEER_PROFILE_ENABLED=true
# gossip variables
export CORE_PEER_GOSSIP_USELEADERELECTION=true
export CORE_PEER_GOSSIP_ORGLEADER=false
echo "Install for $PEER_HOST ..."
export ORDERER_PORT_ARGS=" -o orderer0.org0.deevo.com:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
echo $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode install -n $n -v 1.0 -p github.com/deevotech/sc-chaincode.deevo.io/supplychain/go
#$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode install -n ${n} -v 1.0 -p github.com/hyperledger/fabric/examples/chaincode/go/chaincode_example02
#sleep 3
done
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode list --installed -C $CHANNEL_NAME
#initPeerVars ${PORGS[1]} 1
#switchToAdminIdentity
ORG=org1
PEER_HOST=peer0.${ORG}.deevo.com
PEER_NAME=${PEER_HOST}
ORG_ADMIN_HOME=$DATA/orgs/$ORG/admin
CA_CHAINFILE=${DATA}/${ORG}-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=$ORG_ADMIN_HOME
export FABRIC_CA_CLIENT_TLS_CERTFILES=$CA_CHAINFILE
export CORE_PEER_MSPCONFIGPATH=$ORG_ADMIN_HOME/msp
export CORE_PEER_ID=$PEER_HOST
export CORE_PEER_ADDRESS=$PEER_HOST:7051
export CORE_PEER_LOCALMSPID=${ORG}MSP
export CORE_LOGGING_LEVEL=DEBUG
export CORE_PEER_TLS_ENABLED=true
export CORE_PEER_TLS_CLIENTAUTHREQUIRED=true
export CORE_PEER_TLS_ROOTCERT_FILE=$CA_CHAINFILE
export CORE_PEER_TLS_CLIENTCERT_FILE=$DATA/tls/$PEER_NAME-cli-client.crt
export CORE_PEER_TLS_CLIENTKEY_FILE=$DATA/tls/$PEER_NAME-cli-client.key
export CORE_PEER_PROFILE_ENABLED=true
# gossip variables
export CORE_PEER_GOSSIP_USELEADERELECTION=true
export CORE_PEER_GOSSIP_ORGLEADER=false
export ORDERER_PORT_ARGS=" -o orderer0.org0.deevo.com:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
echo $ORDERER_CONN_ARGS
echo "Instantiating chaincode on $PEER_HOST ..."
export ORDERER_PORT_ARGS=" -o orderer0.org0.deevo.com:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode instantiate -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["init"]}' $ORDERER_CONN_ARGS
sleep 10
#initPeerVars ${PORGS[0]} 1
#switchToUserIdentity
ORG=org1
PEER_HOST=peer0.${ORG}.deevo.com
PEER_NAME=${PEER_HOST}
ORG_ADMIN_HOME=$DATA/orgs/$ORG/admin
CA_CHAINFILE=${DATA}/${ORG}-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=$ORG_ADMIN_HOME
export FABRIC_CA_CLIENT_TLS_CERTFILES=$CA_CHAINFILE
export CORE_PEER_MSPCONFIGPATH=$ORG_ADMIN_HOME/msp
export CORE_PEER_ID=$PEER_HOST
export CORE_PEER_ADDRESS=$PEER_HOST:7051
export CORE_PEER_LOCALMSPID=${ORG}MSP
export CORE_LOGGING_LEVEL=DEBUG
export CORE_PEER_TLS_ENABLED=true
export CORE_PEER_TLS_CLIENTAUTHREQUIRED=true
export CORE_PEER_TLS_ROOTCERT_FILE=$CA_CHAINFILE
export CORE_PEER_TLS_CLIENTCERT_FILE=$DATA/tls/$PEER_NAME-cli-client.crt
export CORE_PEER_TLS_CLIENTKEY_FILE=$DATA/tls/$PEER_NAME-cli-client.key
export CORE_PEER_PROFILE_ENABLED=true
# gossip variables
export CORE_PEER_GOSSIP_USELEADERELECTION=true
export CORE_PEER_GOSSIP_ORGLEADER=false
echo "Updating anchor peers for $PEER_HOST ..."
export ORDERER_PORT_ARGS=" -o orderer0.org0.deevo.com:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
echo $ORDERER_CONN_ARGS
echo "Sending invoke transaction to $PEER_HOST ..."
echo "init orgs"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","1","supplier1","1","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","2","supplier2","1", "67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","3","farmer1","2","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","4","farmer2","2","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","5","factory1","3","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","6","factory2","3","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","7","retailer1","4","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","8","retailer3","4","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","9","consumer1","5","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","10","consumer2","5","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
#$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","11","tree1","6","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
#$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","12","tree2","6","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
echo "init trees for farmer1 and farmer2"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initFarmerTree","11","tree1","1000","11", "12", "1", "3", "1000"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initFarmerTree","12","tree2","1000","13", "14", "3", "4", "1000"]}' $ORDERER_CONN_ARGS
echo "init suppliermaterials"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","1","material1","10","1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","2","material2","20","1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","3","material3","15","2"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","4","material4","30","2"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","2","material5","30","1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","3","material6","30","2"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action sell material1 to farmer1"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material1","3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material2","3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material3","4"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material4","4"]}' $ORDERER_CONN_ARGS
sleep 3 "action material to tree"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material1","11"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material2","11"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material3","12"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material4","12"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action get historyfor Materials 1, 2, 3, 4"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material2"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material4"]}' $ORDERER_CONN_ARGS
sleep 3
#Rich Query (Only supported if CouchDB is used as state database):
echo "query Materials By Owner"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["queryMaterialsByOwner","3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["queryMaterialsByOwner","4"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action harvest agri product"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["harvestAgriProduct","111", "aproduct1", "11", "1000", "3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["harvestAgriProduct","112", "aproduct1", "12", "2000", "4"]}' $ORDERER_CONN_ARGS
echo "action sell agri product for factory 1 and factory 2"
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerAgriProduct","aproduct1", "5"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerAgriProduct","aproduct2", "6"]}' $ORDERER_CONN_ARGS
echo "action make product from agri product"
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["makeProduct","111", "221", "product1", "10000", "5"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["makeProduct","112", "222", "product2", "20000", "6"]}' $ORDERER_CONN_ARGS
echo "action change to retailer"
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product1", "7"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product2", "8"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action sell to customer 1 and customer 2"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product1", "9"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product2", "10"]}' $ORDERER_CONN_ARGS
sleep 3
echo "get history of product1"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForProduct","product1"]}' $ORDERER_CONN_ARGS
echo "get history of product2"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForProduct","product2"]}' $ORDERER_CONN_ARGS
echo "done test"
|
SELECT title, author, year_of_publication, ISBN FROM books; |
<reponame>xing231224/mine-admin<filename>src/utils/jsonp.js<gh_stars>0
import Vue from 'vue'
import router from '../router'
import Cookies from 'js-cookie'
import VueJsonp from 'vue-jsonp'
const url = window.location.hostname
Vue.use(VueJsonp)
var jsonpUrl = 'http://192.168.3.11:9999/auth/sso/token/login'
var jsonpOut = 'http://192.168.3.11:9999/auth/sso/token/logout'
const TokenKey = 'Admin-Token'
export function getJurisdiction(token, type) {
var Url = ''
if (type === 'out') {
Url = jsonpOut
} else {
Url = jsonpUrl
}
Vue.jsonp(Url, {
token: token
}).then((res) => {
if (res.status === '200') {
if (type === 'out') {
location.reload()
} else {
Cookies.set(TokenKey, res.msg)
//Cookies.setDomain(url)
window.location.reload()
}
// window.location='http://192.168.2.243:9531/'
} else {
if (type === 'out') {
location.reload()
} else {
//router.push({ path: '/' })
}
//router.push({ path: '/login' })
}
}).catch(error => {
})
}
/* getJurisdiction()*/
export function setInfo(url,token) {
Vue.jsonp(url, {
token: token
}).then((res) => {
console.log(res)
}).catch(error => {
})
}
|
import { FormEvent } from "react";
import { signin } from "next-auth/client";
import { withApollo } from "../apollo";
import { useSessionQuery } from "../graphql/queries/Session";
import { useInsertPostMutation } from "../graphql/mutations/InsertPost";
import { PageLayout } from "../components/PageLayout";
import { PostsList } from "../components/PostsList";
const PostsPage = () => {
return (
<PageLayout>
<MaybeSignInReminder />
<h2>Submit Post</h2>
<SubmitPostForm />
<h2>Posts</h2>
<PostsList where={{}} />
</PageLayout>
);
};
const MaybeSignInReminder = () => {
const { user } = useSessionQuery();
if (user) return <></>;
return (
<blockquote>
<p>
<span role="img" aria-label="Info">
ℹ
</span>{" "}
Anonymous users can see everything & make anonymous posts, but cannot vote.
</p>
<p>
<button onClick={() => signin("google")}>Sign in with Google</button> to vote!
<span />
{/* span prevents unwanted styles from being applied to button. github.com/xz/new.css/pull/73 */}
</p>
<style jsx>{`
blockquote {
text-align: center;
}
`}</style>
</blockquote>
);
};
const SubmitPostForm = () => {
const [insertPost, { loading }] = useInsertPostMutation();
function handleSubmit(event: FormEvent<HTMLFormElement>) {
event.preventDefault();
const form = event.currentTarget;
const formData = new window.FormData(form);
insertPost(formData.get("title") as string);
form.reset();
}
return (
<form onSubmit={handleSubmit}>
<input
name="title"
placeholder="Say something creative..."
aria-label="title"
type="text"
required
/>
<button type="submit" disabled={loading}>
Submit
</button>
<style jsx>{`
form {
display: flex;
}
input {
width: 100%;
margin-right: 0.5rem;
}
`}</style>
</form>
);
};
export default withApollo({ preload: true }, PostsPage);
|
<filename>AngularSpring/web/webapp/src/app/app-routing.module.ts<gh_stars>0
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { PersonListComponent } from './person-list/person-list.component';
import { PersonFormComponent } from './person-form/person-form.component';
import { FindPersonComponent } from './find-person/find-person.component';
import { from } from 'rxjs';
const routes: Routes = [
{ path: 'people', component: PersonListComponent },
{ path: 'addPerson', component: PersonFormComponent },
{ path: 'find', component: FindPersonComponent}
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
|
import java.util.*;
public class HuffmanEncoding {
public static void main(String[] args) {
String s = "abbcccddddeeeee";
char[] data = s.toCharArray();
int[] freq = new int[256];
for (char ch : data) {
freq[ch]++;
}
PriorityQueue<TreeNode> queue = new PriorityQueue<TreeNode>(Comparator.comparingInt(tn -> tn.freq));
for(int i = 0; i < freq.length; i++) {
if(freq[i] > 0) {
TreeNode node = new TreeNode(i, freq[i]);
queue.add(node);
}
}
while(queue.size() > 1) {
TreeNode first = queue.poll();
TreeNode second = queue.poll();
TreeNode merged = new TreeNode(first.freq + second.freq);
merged.left = first;
merged.right = second;
queue.add(merged);
}
StringBuilder sb = new StringBuilder();
codes(queue.peek(), "", sb);
System.out.println(sb.toString());
}
public static void codes(TreeNode node, String str, StringBuilder sb) {
if (node == null)
return;
if (node.left == null && node.right == null) {
sb.append(node.data + ": " + str + "\n");
return;
}
codes(node.left, str + "0", sb);
codes(node.right, str + "1", sb);
}
static class TreeNode {
int data;
int freq;
TreeNode left;
TreeNode right;
public TreeNode(int data, int freq) {
this.data = data;
this.freq = freq;
}
public TreeNode(int freq) {
this.freq = freq;
}
}
}
Output:
a: 0
b: 10
c: 110
d: 1110
e: 1111 |
url=$1
env=$2
event=$3
string=$4
epoch_seconds=$(date +%s)
payload_string="{\"events\": [{\"name\": \"$event\", \"env\": \"$env\", \"time\": \"$epoch_seconds\", \"data\": \"$string\"}]}"
curl -X POST -H "Content-Type:application/json" -d "$payload_string" "$url/affirmative/store"
|
#!/usr/bin/env bash
# Base16 Brewer - Gnome Terminal color scheme install script
# Timothée Poisot (http://github.com/tpoisot)
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Brewer Light"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-brewer-light"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "'#0c0d0e:#e31a1c:#31a354:#dca060:#3182bd:#756bb1:#80b1d3:#b7b8b9:#737475:#e31a1c:#31a354:#dca060:#3182bd:#756bb1:#80b1d3:#fcfdfe'"
dset palette "'#fcfdfe:#e31a1c:#31a354:#dca060:#3182bd:#756bb1:#80b1d3:#b7b8b9:#737475:#e31a1c:#31a354:#dca060:#3182bd:#756bb1:#80b1d3:#0c0d0e'"
dset background-color "'#fcfdfe'"
dset foreground-color "'#515253'"
dset bold-color "'#515253'"
dset bold-color-same-as-fg "true"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#fcfdfe:#e31a1c:#31a354:#dca060:#3182bd:#756bb1:#80b1d3:#b7b8b9:#737475:#e31a1c:#31a354:#dca060:#3182bd:#756bb1:#80b1d3:#0c0d0e"
gset string background_color "#fcfdfe"
gset string foreground_color "#515253"
gset string bold_color "#515253"
gset bool bold_color_same_as_fg "true"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
/** @noSelfInFile */
export { };
declare global {
function DebugPrint(...args: any[]): void;
function DebugPrintTable(...args: any[]): void;
function PrintTable(...args: any[]): void;
/**
* Rolls a Psuedo Random chance. If failed, chances increases, otherwise chances are reset
* Numbers taken from https://gaming.stackexchange.com/a/290788
*/
function RollPseudoRandom(base_chance: number, entity: CDOTA_BaseNPC): boolean;
}
|
<reponame>eirikb/domdom-deno
import { isProbablyPlainObject } from './dom-stower.ts';
import {
BaseTransformer,
Entries,
Entry,
ListenerCallbackWithType,
Pathifier,
} from '../data/index.ts';
import { Domdom } from 'domdom.ts';
import { React } from './types.ts';
const pathSymbol = Symbol('Path');
const proxiedSymbol = Symbol('Proxied');
const p = (o, path: string[] = [], hack = false) => {
const oldPath = (o || {})[pathSymbol];
if (oldPath) path = oldPath;
if (!o || !isProbablyPlainObject(o)) o = {};
return new Proxy(o, {
get: (target, key) => {
if (hack) {
path.pop();
key = '$' + String(key);
hack = false;
}
if (key === pathSymbol) return path;
else if (key === '$path') return path.join('.');
else if (key === '$x') key = '*';
else if (key === '$xx') key = '**';
else if (key === '$$') hack = true;
return p(target[key], path.concat(String(key)), hack);
},
});
};
function pathus(path: string | PathOf): string {
if (typeof path === 'string') return path;
return path.$path;
}
function deregulate(value: any): any {
if (isProbablyPlainObject(value)) {
return Object.assign({}, value);
} else if (Array.isArray(value)) {
return value.slice();
} else {
return value;
}
}
export class GodMode<T> {
public data: T;
public React: React;
private readonly domdom: Domdom;
constructor(domdom: Domdom) {
this.domdom = domdom;
this.data = this.proxify({}) as T;
this.React = domdom.React;
}
private _set = (path: string[], value: any) => {
value = deregulate(value);
const p = path.join('.');
if (Array.isArray(this.domdom.get(p))) {
this.domdom.set(p, []);
}
this.domdom.set(p, value);
};
private _unset = (path: string[]) => {
this.domdom.unset(path.join('.'));
};
private proxify(o: any, path: string[] = []) {
if (!(isProbablyPlainObject(o) || Array.isArray(o))) {
return o;
}
if (o[proxiedSymbol]) return o;
return new Proxy(o, {
set: (target, key, value) => {
const p = path.concat(String(key));
this._set(p, value);
target[key] = value;
return true;
},
deleteProperty: (target, key) => {
this._unset(path.concat(String(key)));
return delete target[key];
},
get: (target, key) => {
if (key === 'constructor') return target[key];
else if (key === pathSymbol) return path;
else if (key === proxiedSymbol) return true;
else if (key === '$path') return path.concat(key).join('.');
const value = target[key];
if (typeof value === 'function') {
return (...args) => {
const res = value.call(target, ...args);
this._set(path, target);
return res;
};
}
return this.proxify(value, path.concat(String(key)));
},
});
}
don = (path: string | PathOf): Pathifier => {
const pathAsString = pathus(path);
const self = this;
const pathifier = this.domdom.don(pathAsString);
pathifier.addTransformer(
new (class extends BaseTransformer {
entries: Entries = new Entries();
private proxify(entry: Entry): Entry {
entry.value = self.proxify(
deregulate(entry.value),
entry.opts.path.split('.')
);
return entry;
}
add(index: number, entry: Entry): void {
this.next?.add(index, this.proxify(entry));
}
remove(index: number, entry: Entry): void {
this.next?.remove(index, entry);
}
update(oldIndex: number, index: number, entry: Entry): void {
this.next?.update(oldIndex, index, this.proxify(entry));
}
})()
);
return pathifier;
};
trigger = (path: string | PathOf, value?: any) => {
return this.domdom.trigger(pathus(path), value);
};
get = <T = any>(path?: string | PathOf): T | undefined => {
if (!path) return this.domdom.get();
return this.domdom.get(pathus(path));
};
set = (path: string | PathOf, value: any, byKey?: string) => {
this.domdom.set(pathus(path), value, byKey);
};
unset = (path: string | PathOf) => {
this.domdom.unset(pathus(path));
};
on = <T = any>(
flags: string,
path: string | PathOf,
listener: ListenerCallbackWithType<T>
): string => {
return this.domdom.on([flags, pathus(path)].join(' '), (value, opts) =>
listener(this.proxify(deregulate(value), opts.path.split('.')), opts)
);
};
init = (parent: HTMLElement, child?: HTMLElement) =>
this.domdom.init(parent, child);
pathOf<X = T>(o?: X): PathOf<X> {
return p(o) as PathOf<X>;
}
}
export type PathOf<T = unknown> = {
[P in keyof T]: PathOf<T[P]>;
} &
(T extends Array<infer A>
? {
$path: string;
$: PathOf<A>;
$x: PathOf<A>;
$xx: PathOf<A>;
$$: {
[key: string]: PathOf<A>;
};
[index: number]: PathOf<A>;
}
: {
$path: string;
$: PathOf<T>;
$x: PathOf<T>;
$xx: PathOf<T>;
$$: {
[key: string]: PathOf<T>;
};
});
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const objects_1 = require("alcalzone-shared/objects");
function invertOperation(operation) {
return objects_1.composeObject(objects_1.entries(operation).map(([key, value]) => {
switch (typeof value) {
case "number":
return [key, Number.NaN];
case "boolean":
return [key, !value];
default:
return [key, null];
}
}));
}
exports.invertOperation = invertOperation;
|
<reponame>Dnpypy/java-oop<filename>src/academy/devonline/java/home_section001_classes/methods_dyna_array/DynaArrayAdd/DynaArrayAddTest.java
package academy.devonline.java.home_section001_classes.methods_dyna_array.DynaArrayAdd;
public class DynaArrayAddTest {
public static void main(String[] args) {
DynaArrayAdd dynaArray = new DynaArrayAdd();
dynaArray.add(0);
dynaArray.add(1);
dynaArray.add(2);
dynaArray.add(3);
dynaArray.add(4);
dynaArray.add(5);
dynaArray.add(new int[]{9, 8, 7, 6, 5, 4, 3, 2, 1});
DynaArrayAdd dynaArray2 = new DynaArrayAdd();
dynaArray2.add(new int[]{-9, -8, -7, -6, -5, -4, -3, -2, -1, 0});
dynaArray.add(dynaArray2);
dynaArray.add(99);
dynaArray.add(98);
dynaArray.add(97);
// [0, 1, 2, 3, 4, 5, 9, 8, 7, 6, 5, 4, 3, 2, 1, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 99, 98, 97]
System.out.println(dynaArray.asString());
System.out.println(dynaArray.count);
int[] arrN = {0, 1, 2, 3, 4, 5, 9, 8, 7, 6, 5, 4, 3, 2, 1, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 99, 98, 97};
System.out.println(arrN.length);
}
}
|
import type { DirSpec, FileSpec } from './interfaces';
export function createDirSpec(
name: string,
files: FileSpec[] = [],
folders: DirSpec[] = []
): DirSpec {
return { files, folders, name };
}
export function createFileSpec(
name: string,
template?: string,
raw?: boolean,
): FileSpec {
return { template, name, raw };
}
|
#!/bin/sh
# Releases binary artifacts to bintray.
#
# Dependencies:
# $BINTRAY_USER
# $BINTRAY_PASS
# $RELEASE_TRIGGER
# The sbt-git plugin is going to pull info straight from git, but when building
# into container we leave the .git folder out to not invalidate layer caching,
# and mount it only for the release phase. However, the sbt-git plugin fails
# *silently* if it can't locate a git repository and just rolls back to a system
# timestamp as the version number. To avoid this, we want to manually verify the
# git directory is present first and fail if not.
#
# (We check relative to $PWD rather than this script path, since sbt will fail
# in other unexpected ways if you aren't already at project root, so better to
# just fail if we're not at project root.)
if [ ! -d "$PWD/.git" ]; then
echo "ERROR: No git repo located for versioning, aborting..."
exit 1
fi
# Check for BINTRAY variables used by sbt-bintray for authentication.
# (see https://github.com/sbt/sbt-bintray#credentials)
if [ -z "$BINTRAY_USER" ] || [ -z "$BINTRAY_PASS" ]; then
echo "Missing BINTRAY_USER or BINTRAY_PASS environment variable! Exiting..."
exit 1
fi
# Publish to Bintray. Will do nothing by default for safety unless overridden
# via RELEASE_TRIGGER=true.
if [ "$RELEASE_TRIGGER" = "true" ]; then
sbt releaseBoth
else
echo "Not really releasing, set RELEASE_TRIGGER=true to go live."
fi
|
public class MyClass
{
public int prop1 { get; set; }
public int prop2 { get; set; }
}
List<MyClass> myList = new List<MyClass>();
for (int i = 0; i < 10; i++)
{
myList.Add(new MyClass() { prop1 = i, prop2 = i * 10 });
} |
use std::fs;
use std::path::Path;
fn generate_file_count_report(directory_path: &str) -> String {
let mut report = String::new();
let path = Path::new(directory_path);
if let Ok(entries) = fs::read_dir(path) {
for entry in entries {
if let Ok(entry) = entry {
if entry.path().is_dir() {
let subdirectory_path = entry.path();
let subdirectory_name = subdirectory_path.file_name().unwrap().to_str().unwrap();
let file_count = count_files_in_directory(&subdirectory_path);
write!(report, "{}: {} files\n", subdirectory_path.display(), file_count).unwrap();
}
}
}
}
report
}
fn count_files_in_directory(directory_path: &Path) -> u32 {
let mut file_count = 0;
if let Ok(entries) = fs::read_dir(directory_path) {
for entry in entries {
if let Ok(entry) = entry {
if entry.path().is_file() {
file_count += 1;
} else if entry.path().is_dir() {
file_count += count_files_in_directory(&entry.path());
}
}
}
}
file_count
}
fn main() {
let report = generate_file_count_report("main_directory");
println!("{}", report);
} |
function (page, done) {
let url = page.getURL("last")
if(!url){ done(); return; }
let u = new URL(url);
let r = u.origin+'/robots.txt'
let rl = "<a href='"+r+"' target='_blank'>"+u.origin+'/robots.txt'+"</a>";
let msg = "";
let type = "info";
//TODO: set the robots.txt body of that domain in a global map so to prevent unnecessary fetched
let count = function (needle, haystack)
{
if(needle && haystack)
{
let nr = (haystack.match(new RegExp(needle, 'gi')) || []).length;
return nr;
}
else
{
return 0;
}
}
this.fetch(r, { responseFormat: 'text' }, (response) => {
if(response.redirected == true )
{
type="warning";
msg=rl+" redirects to "+"<a href='"+response.url+"' target='_blank'>"+response.url+"</a>";
}
else
{
if(response.status===200)
{
if((response.body.includes('<div')) || (response.body.includes('<body')) || (response.body.includes('<DIV')) || (response.body.includes('href="')) || (response.body.includes("href='")))
{
type="error";
msg= rl+"returns HTTP "+response.status+", but looks like it is an HTML page.";
}
else if(response.body.trim()==="")
{
type="info";
msg=rl+" returns HTTP "+response.status+" but no content (<b>blank robots.txt</b>)";
}
else
{
type="info";
msg=rl+" returns HTTP "+response.status;
let nr_disallow = count('disallow\:',response.body);
let nr_allow = count('allow\:',response.body)-nr_disallow;
let nr_noindex = count('noindex\:',response.body);
let nr_user_agent = count('user-agent\:',response.body);
let nr_crawl_delay = count('crawl-delay\:',response.body);
let nr_host = count('host\:',response.body);
let cmsgA = [];
if(nr_user_agent>0) {cmsgA.push(nr_user_agent+"*User-agent ");}
if(nr_disallow>0) {cmsgA.push(nr_disallow+"*Disallow ");}
if(nr_allow>0) {cmsgA.push(nr_allow+"*Allow ");}
if(nr_noindex>0) {cmsgA.push(nr_noindex+"*Noindex ");}
if(nr_crawl_delay>0) {cmsgA.push(nr_crawl_delay+"*Crawl-delay ");}
if(nr_host>0) {cmsgA.push(nr_host+"*Host ");}
if(cmsgA.length>0){msg = msg+" ("+cmsgA.join(', ')+")";}
}
}
else
{
type="warning";
msg=rl+" returns HTTP "+response.status;
}
}
if(msg!="")
{
msg = msg + " <a href='https://www.google.com/webmasters/tools/robots-testing-tool?hl=en&authuser=0&siteUrl="+u.origin+"/' target='_blank'>GSC</a>";
done(this.createResult('SITE', msg, type));
}
done();
});
} |
<reponame>Bleuh/mini-api
import { model } from "mongoose";
import { IProduct } from "./products.types";
import ProductSchema from "./products.schema";
const Product = model<IProduct>("product", ProductSchema);
export default Product;
|
def rotate_array(arr, k):
n = len(arr)
for i in range(k):
temp = arr[n-1]
for i in range(n-1,0,-1):
arr[i] = arr[i-1]
arr[0] = temp
return arr
print(rotate_array([1,2,3,4,5],2)) # prints [4,5,1,2,3] |
<reponame>flexiooss/poom-services<gh_stars>0
const test1 = require('./Test_givenFormatTime__giveDate__thenGetError')
const test2 = require('./Test_givenFormatTime__giveDateTime__thenGetError')
const test3 = require('./Test_givenFormatTime__giveFloat__thenGetError')
const test4 = require('./Test_givenFormatTime__giveInt__thenGetError')
const test5 = require('./Test_givenFormatTime__giveString__thenGetError')
const test6 = require('./Test_givenFormatTime__giveTime__thenGetError')
const test7 = require('./Test_givenFormatTimeEN__giveTime__thenGetError')
const test8 = require('./Test_givenFormatTimeFR__giveTime__thenGetError')
export const testList = [
test1, test2, test3, test4, test5, test6, test7, test8
]
|
import * as React from 'react';
import ScreenPropsContext from './ScreenPropsContext';
import useCompatNavigation from './useCompatNavigation';
function ScreenComponent(props) {
const navigation = useCompatNavigation();
const screenProps = React.useContext(ScreenPropsContext);
return React.createElement(props.component, {
navigation: navigation,
screenProps: screenProps
});
}
export default React.memo(ScreenComponent);
//# sourceMappingURL=CompatScreen.js.map |
#!/bin/bash
#
# Copyright (c) 2020 Nordic Semiconductor ASA
#
# SPDX-License-Identifier: Apache-2.0
#
for dir in 'cbor_decode/' 'cbor_encode/' ;
do
pushd "$dir"
./test.sh
[[ $? -ne 0 ]] && popd && exit 1
popd
done
|
<reponame>sillyhong/whongjiagou-learn
//send方法其实可以有两个参数 ,第一个参数是任意类型 第二个参数只能是http server net server socket
let c = require('child_process');
let http = require('http');
let os = require('os');
let server = http.createServer(function (req, res) {
res.setHeader('Content-Type', 'text/html;charset=utf8');
res.end('请求在父进程被处理');
});
server.listen(8080);
for (let i = 0; i < os.cpus.length; i++) {
let p1 = c.fork('server.js', [], {
cwd: __dirname
});
p1.send('server', server);
}
|
from datetime import datetime, timezone
def isotime(at=None):
"""Stringify time in ISO 8601 format"""
if not at:
at = datetime.utcnow()
if not at.tzinfo: # If timezone information is not provided, assume UTC
at = at.replace(tzinfo=timezone.utc)
return at.strftime('%Y-%m-%dT%H:%M:%SZ') |
<reponame>ricardodantas/website-carbon-calculator<filename>src/lib/constants.ts
export const KWG_PER_GB = 1.805;
export const RETURNING_VISITOR_PERCENTAGE = 0.75;
export const FIRST_TIME_VIEWING_PERCENTAGE = 0.25;
export const PERCENTAGE_OF_DATA_LOADED_ON_SUBSEQUENT_LOAD = 0.02;
export const CARBON_PER_KWG_GRID = 475;
export const CARBON_PER_KWG_RENEWABLE = 33.4;
export const PERCENTAGE_OF_ENERGY_IN_DATACENTER = 0.1008;
export const PERCENTAGE_OF_ENERGY_IN_TRANSMISSION_AND_END_USER = 0.8992;
export const CO2_GRAMS_TO_LITRES = 0.5562;
export const GOOGLE_PAGESPEED_API_ENDPOINT =
'https://www.googleapis.com/pagespeedonline/v5/runPagespeed';
export const GREEN_FOUNDATION_API_ENDPOINT =
'https://api.thegreenwebfoundation.org/greencheck';
|
<gh_stars>0
package main
import (
"fmt"
"log"
"github.com/dmies/adventOfGo/filehandler"
)
func main() {
program, err := filehandler.ImportNumberList("./input.txt")
if err != nil {
log.Fatal(err)
}
fmt.Printf("program %v", program)
}
|
#!/bin/bash
PGVER=11
trap 'echo "# $BASH_COMMAND"' DEBUG
# setup yum to pull PostgreSQL from yum.postgresql.org
yum -y -q install https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm
# install citus repo for up-to-date citus
#cp /scripts/config_file.repo /etc/yum.repos.d/citus-community.repo
# install some basics
yum -y -q install readline-devel
yum -y -q install hostname
yum -y -q install epel-release
# install postgresql and a bunch of accessories
yum -y -q install postgresql11
yum -y -q install postgresql11-server
yum -y -q install postgresql11-contrib
yum -y -q install postgresql11-devel postgresql11-libs
yum -y -q install python36-psycopg2
yum -y -q install citus_11
# set up SSL certs
yum -y -q install openssl openssl-devel
sh /etc/ssl/certs/make-dummy-cert /etc/ssl/certs/postgres.cert
chown postgres:postgres /etc/ssl/certs/postgres.cert
# put binaries in postgres' path
ln -s /usr/pgsql-11/bin/pg_ctl /usr/bin/
ln -s /usr/pgsql-11/bin/pg_config /usr/bin/
ln -s /usr/pgsql-11/bin/pg_controldata /usr/bin/
ln -s /usr/pgsql-11/bin/initdb /usr/bin/
ln -s /usr/pgsql-11/bin/postgres /usr/bin/
# install extensions
#yum -y -q install postgresql-${PGVER}-postgis-2.1 postgresql-${PGVER}-postgis-2.1-scripts
# install python requirements
yum -y -q install python36-pip
yum -y -q install python36-devel
pip3 install -U requests
# install WAL-E
# pip install -U six
# pip install -U wal-e
# yum -y -q install daemontools
# yum -y -q install lzop pv
# clean up yum cache to shrink image
yum clean all
|
#!/usr/bin/env bash
function help() {
echo "USAGE: $0 [options]"
echo ""
echo "OPTIONS:"
echo " -a/--nagents # agents (odd integer default: 1))"
echo " -c/--ncoordinators # coordinators (integer default: 1))"
echo " -d/--ndbservers # db servers (integer default: 2))"
echo " -t/--transport Protocol (ssl|tcp default: tcp)"
echo " -j/--jwt-secret JWT-Secret (string default: )"
echo " --log-level-agency Log level (agency) (string default: )"
echo " --log-level-cluster Log level (cluster) (string default: )"
echo " -l/--log-level Log level (string default: )"
echo " -i/--interactive Interactive mode (C|D|R default: '')"
echo " -x/--xterm XTerm command (default: xterm)"
echo " -o/--xterm-options XTerm options (default: --geometry=80x43)"
echo " -b/--offset-ports Offset ports (default: 0, i.e. A:4001, C:8530, D:8629)"
echo " -q/--source-dir ArangoDB source dir (default: .)"
echo " -B/--bin-dir ArangoDB binary dir (default: ./build)"
echo " -O/--ongoing-ports Ongoing ports (default: false)"
echo " --rr Run arangod with rr (true|false default: false)"
echo " --cluster-init Use cluster-init dir (default: false)"
echo " --auto-upgrade Use for upgrade (default: false)"
echo " -e/--encryption-secret Encryption at rest secret (string default: )"
echo " -E/--enable-hotbackup Disable hot backup (default default: true)"
echo ""
echo "EXAMPLES:"
echo " $0"
echo " $0 -a 1 -c 1 -d 3 -t ssl"
echo " $0 -a 3 -c 1 -d 2 -t tcp -i C"
}
# defaults
NRAGENTS=1
NRDBSERVERS=3
NRCOORDINATORS=1
POOLSZ=""
TRANSPORT="tcp"
LOG_LEVEL="INFO"
LOG_LEVEL_AGENCY="INFO"
LOG_LEVEL_CLUSTER="INFO"
if [ -z "$XTERM" ] ; then
XTERM="x-terminal-emulator"
fi
if [ -z "$XTERMOPTIONS" ] ; then
XTERMOPTIONS=" --geometry=80x43 -e "
fi
BUILD="./build"
JWT_SECRET=""
PORT_OFFSET=0
SRC_DIR="."
USE_RR="false"
ENCRYPTION_SECRET=""
ENABLE_HOTBACKUP="true"
parse_args(){
while [[ -n "$1" ]]; do
case "${1}" in
-a|--agency-size)
NRAGENTS=${2}
shift
;;
-c|--ncoordinators)
NRCOORDINATORS=${2}
shift
;;
-d|--ndbservers)
NRDBSERVERS=${2}
shift
;;
-t|--transport)
TRANSPORT=${2}
shift
;;
-l|--log-level)
LOG_LEVEL=${2}
shift
;;
--log-level-agency)
LOG_LEVEL_AGENCY=${2}
shift
;;
--log-level-cluster)
LOG_LEVEL_CLUSTER=${2}
shift
;;
-i|--interactive)
INTERACTIVE_MODE=${2}
shift
;;
-j|--jwt-secret)
JWT_SECRET=${2}
shift
;;
-e|--encryption-secret)
ENCRYPTION_SECRET=${2}
shift
;;
-E|--enable-hotbackup)
ENABLE_HOTBACKUP=${2}
shift
;;
-q|--src-dir)
SRC_DIR=${2}
shift
;;
-x|--xterm)
XTERM=${2}
shift
;;
-o|--xterm-options)
XTERMOPTIONS=${2}
shift
;;
-b|--port-offset)
PORT_OFFSET=${2}
shift
;;
-h|--help)
help
exit 1
;;
-B|--build)
BUILD=${2}
shift
;;
-O|--ongoing-ports)
ONGOING_PORTS=${2}
shift
;;
--cluster-init)
shift
;;
--auto-upgrade)
AUTOUPGRADE=${2}
shift
;;
--rr)
USE_RR=${2}
if [ "$USE_RR" != "false" ] && [ "$USE_RR" != "true" ] ; then
echo 'Invalid parameter: '\
'`--rr` expects `true` or `false`, but got `'"$USE_RR"'`' \
>&2
help
exit 1
fi
shift
;;
*)
echo "Unknown parameter: ${1}" >&2
help
exit 1
;;
esac
if ! shift; then
echo 'Missing parameter argument.' >&2
return 1
fi
done
}
|
def forward(self, x):
# shape: [1, n]
symbolCount = 0
# 1.step:
X = torch.tensor(x)
wvectors = self.wordvecs(X)[0]
q = self.pfc3(self.pfc2(self.pfc1(wvectors)))
embeds = attention(q, wvectors)
embeds = torch.reshape(embeds, [1, -1, 232])
def attention(query, vectors):
# Compute attention mechanism
attn_scores = torch.matmul(query, vectors.transpose(0, 1))
attn_weights = F.softmax(attn_scores, dim=-1)
attended_vectors = torch.matmul(attn_weights, vectors)
return attended_vectors |
#!/bin/bash
#
# Copyright IBM Corp All Rights Reserved
#
# SPDX-License-Identifier: Apache-2.0
#
# Exit on first error, print all commands.
set -e
help()
{
echo "Usage: $0 [command]"
}
if [ $# -ne 1 ]
then
help
exit 0
fi
CLEAN=$1
if [ ${CLEAN} == "clean" ]; then
echo "Clean teardown"
cd ../volumes
rm -rf peer*.*.exchange.com
rm -rf zookeeper*_*
rm -rf kafka*_*
rm -rf couchdb*
rm -rf orderer*.exchange.com
fi
docker stop $(docker ps -aq)
docker rm $(docker ps -aq)
docker rmi $(docker images dev-* -q)
|
import {Component, OnDestroy, OnInit, QueryList, ViewChildren} from '@angular/core';
// import {Episode} from "../../entity/episode";
import {HomeChild, HomeService} from "../home.service";
import {Bangumi} from "../../entity/bangumi";
import {FAVORITE_LABEL} from '../../entity/constants';
import {Subscription} from 'rxjs';
import {Announce} from '../../entity/announce';
import {PersistStorage} from '../../user-service/persist-storage';
import {SwiperComponent, SwiperConfigInterface} from 'ngx-swiper-wrapper';
const BANGUMI_TYPE_KEY = 'default_bangumi_type';
const DEFAULT_SWIPER_CONFIG: SwiperConfigInterface = {
autoplay: {
delay: 5000,
},
direction: 'horizontal',
slidesPerView: 1,
scrollbar: false,
pagination: {
el: '.swiper-pagination',
type: 'bullets',
},
loop: true
};
@Component({
selector: 'default-component',
templateUrl: './default.html',
styleUrls: ['./default.less']
})
export class DefaultComponent extends HomeChild implements OnInit, OnDestroy {
private _subscription = new Subscription();
@ViewChildren('swiper')
public Grids: QueryList<SwiperComponent>;
private Swiper: SwiperComponent;
// recentEpisodes: Episode[];
isLoading = false;
onAirBangumi: Bangumi[];
bangumiType = 1001; // 1001 = CN; 1002 = RAW; -1 = ALL
FAVORITE_LABEL = FAVORITE_LABEL;
announce_in_banner: Announce[];
announce_in_bangumi: Announce[];
swiper_config = DEFAULT_SWIPER_CONFIG;
constructor(homeService: HomeService, private _persistStorage: PersistStorage) {
super(homeService);
}
changeBangumiType(type: number) {
this.isLoading = true;
this.bangumiType = type;
this._persistStorage.setItem(BANGUMI_TYPE_KEY, `${type}`);
this.getOnAir();
}
getOnAir() {
this._subscription.add(
this.homeService.onAir(this.bangumiType)
.subscribe(
(bangumiList: Bangumi[]) => {
this.onAirBangumi = bangumiList;
this.isLoading = false;
},
error => {
// console.log(error);
this.isLoading = false;
}
)
);
}
onSwiperHover(hover: boolean) {
if (hover) {
this.Swiper.directiveRef.stopAutoplay();
} else {
this.Swiper.directiveRef.startAutoplay();
}
}
ngOnInit(): void {
// this.homeService.recentEpisodes()
// .subscribe(
// (episodeList: Episode[]) => {
// this.recentEpisodes = episodeList;
// },
// error => console.log(error)
// );
let defaultBangumiType = this._persistStorage.getItem(BANGUMI_TYPE_KEY, null);
if (defaultBangumiType !== null) {
this.bangumiType = parseInt(defaultBangumiType, 10);
}
this.getOnAir();
this._subscription.add(
this.homeService.listAnnounce()
.subscribe((announce_list) => {
this.announce_in_banner = announce_list.filter((announce) => {
return announce.position === Announce.POSITION_BANNER;
});
this.announce_in_bangumi = announce_list.filter(announce => {
return announce.position === Announce.POSITION_BANGUMI;
});
})
);
}
ngOnDestroy(): void {
this._subscription.unsubscribe();
}
}
|
<gh_stars>0
'''
'''
import os
import json
import sys
import emoji
import json
import os
import string
import sys
def remove_emoji(text):
return emoji.get_emoji_regexp().sub(u'', text)
def clean_text(the_text):
lower = the_text.lower().split()
cleaned = ' '.join(lower)
trans_dict = {}
for key in string.punctuation:
if key == "'":
trans_dict[key] = ''
else:
trans_dict[key] = ' '
text_punct = str.maketrans(trans_dict)
text_low = cleaned.lower()
text_toks = text_low.translate(text_punct).split()
return text_toks
if __name__ == '__main__':
dirname = sys.argv[1]
dest = sys.argv[2]
count = 0
reply_count = 0
for root, dirs, files in os.walk(dirname):
for filename in files:
print(filename)
full_path = os.path.join(root, filename)
dest_path = os.path.join(dest, filename)
cmts = []
with open(full_path) as handle:
for new_line in handle:
the_payload = json.loads(new_line)
the_text = ''
if the_payload['kind'] == 'youtube#commentThread':
the_text = the_payload['snippet']['topLevelComment']['snippet']['textOriginal']
elif the_payload['kind'] == 'youtube#comment':
the_text = the_payload['snippet']['textOriginal']
cleaned_toks = clean_text(the_text)
the_payload['cleaned_tokens'] = cleaned_toks
cmts.append(the_payload)
with open(dest_path, 'a') as handle:
for cmt in cmts:
handle.write(json.dumps(cmt))
handle.write('\n')
|
#!/usr/bin/bash
# this script requires mu2etools and dhtools be setup
# $1 is the name of the digi (ie CeEndpointMixTriggered, etc) file.
# $2 is the dataset description (ie MDC2020).
# $3 is the campaign version of the input (digi) file.
# $4 is the campaign version of the output (reco) file.
# $5 is the database purpose
# $6 is the database version
# $7 is the number of input collections to merge (merge factor)
if [[ $# -eq 0 ]] ; then
usage='Usage:
gen_Reco.sh [primaryName] [datasetDescription] [digiInput] \
[recoOutput] [purpose] [version] [mergeFactor]
This script will produce the fcl files needed for a mixing stage. It
is necessary to provide, in order:
- the name of the primary [primaryName]
- the dataset description [datasetDescription],
- the campaign version of the input digi file [digiInput],
- the campaign version of the output reco file [recoOutput],
- the name of the DB purpose (perfect, best, startup) [purpose]
- the DB version [version]
- the number of input collections to merge into 1 output [mergeFactor]
Example:
gen_Reco.sh CeEndpointMixTriggered MDC2020 k m perfect v1_0 10
This will produce the fcl files for a reco stage
on CeEndpointMixTriggered digis, merging 10 inputs per output. The output
files will have the MDC2020m description.'
echo "$usage"
exit 0
fi
samweb list-file-locations --schema=root --defname="dig.mu2e.$1.$2$3.art" | cut -f1 > Digis.txt
echo '#include "Production/JobConfig/reco/Reco.fcl"' > template.fcl
echo 'services.DbService.purpose:' $5 >> template.fcl
echo 'services.DbService.version:' $6 >> template.fcl
generate_fcl --dsowner=mu2e --override-outputs --auto-description --embed template.fcl --dsconf "$2$4_$5_$6" \
--inputs "Digis.txt" --merge-factor=$7
for dirname in 000 001 002 003 004 005 006 007 008 009; do
if test -d $dirname; then
echo "found dir $dirname"
rm -rf $1Reco$4_$dirname
mv $dirname $1Reco$4_$dirname
fi
done
|
package ctag.exception;
import ctag.CTagInput;
/**
* Thrown when the {@link CTagInput} is asked for for bytes while the end is
* already reached.
*
* @since 1.1
*/
public class EndException extends CTagException {
public EndException( String message ) {
super( message );
}
}
|
package core.config;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import core.AppContext;
import core.AppContext.Profil;
import core.exceptions.FatalException;
/**
* Gere la configuration de l'Application et contient aussi le {@link AppContext} actuel.<br>
* Est aussi distribuée dans toute l'Application. <br>
* Ne doit pas être instanciée et ne contient que des méthodes {@code static}
*/
public class AppConfig {
public static final String RES_FOLDER = "ressources/";
private static final Properties properties = new Properties();
private static final Properties appProps = new Properties();
private static Profil mode;
/**
* Charge le contenu du .properties. Recupere le contexte.
*
* @param mode - Le profil actuel de l'App.
* @throws IOException - Si le fichier n'est en accessible LECTURE.
* @throws FatalException - Si n'a pas pu charger le .properties.
*/
public static void loadProperties(Profil mode) throws IOException {
AppConfig.mode = mode;
try (InputStream input =
new BufferedInputStream(new FileInputStream(AppConfig.RES_FOLDER + mode.getPropUrl()))) {
AppConfig.properties.load(input);
} catch (IOException exception) {
throw new FatalException("Could not load " + AppConfig.mode.getPropUrl());
}
try (InputStream input =
new BufferedInputStream(new FileInputStream(AppConfig.RES_FOLDER + "app.properties"))) {
AppConfig.appProps.load(input);
AppConfig.properties.load(input);
} catch (IOException exception) {
throw new FatalException("Could not load app.properties");
}
}
public static Properties getProps() {
return properties;
}
/**
* @return le {@link Profil} actuel.
*/
public static Profil getProfil() {
return AppConfig.mode;
}
/**
* Envoie la valeur se trouvant dans le .properties.
*
* @param cle - La clé servant d'identifiant pour cettte valeur.
* @return La valeur correspondant à la key
* @throws FatalException - Si la key fournie n'est pas le .properties
*/
public static String getValueOf(String cle) {
String returnValue = AppConfig.properties.getProperty(cle);
if (returnValue == null) {
throw new FatalException("Could not find '" + cle + "' in " + AppConfig.mode.getPropUrl());
}
return returnValue;
}
/**
* @param cle - La clé servant d'identifiant pour cettte valeur.
* @see AppConfig#getValueOf(String)
* @return Un conversion de la valeur en Int.
*/
public static int getInt(String cle) {
String val = AppConfig.getValueOf(cle);
try {
return Integer.parseInt(val);
} catch (NumberFormatException exception) {
throw new FatalException("There is no integer matching the key '" + cle + "' in "
+ AppConfig.mode.getPropUrl(), exception);
}
}
/**
* Récolte tous les contenus de l'app.properties.
*
* @return une map contenant le contenu d' app.properties
*/
public static Map<String, String> getMapAppProps() {
return AppConfig.appProps.entrySet().stream()
.collect(Collectors.toMap(e -> (String) e.getKey(), e -> (String) e.getValue()));
}
/**
* Envoie la valeur se trouvant dans le app.properties.
*
* @param cle - La clé servant d'identifiant pour cettte valeur.
* @return La valeur correspondant à la key
* @see AppConfig#getValueOf(String)
* @throws FatalException - Si la key fournie n'est pas le .properties
*/
public static String getAppValueOf(String cle) {
String returnValue = AppConfig.appProps.getProperty(cle);
if (returnValue == null) {
throw new FatalException("Could not find '" + cle + "' in " + AppConfig.mode.getPropUrl());
}
return returnValue;
}
}
|
from flask import Flask, request, jsonify
app = Flask(__name__)
app.users = []
@app.route('/users', methods=['GET'])
def get_users():
return jsonify(app.users)
@app.route('/users', methods=['POST'])
def create_user():
data = request.get_json()
app.users.append(data)
return jsonify({'message': 'user created'})
@app.route('/users/<int:index>', methods=['PUT'])
def update_user(index):
data = request.get_json()
app.users[index] = data
return jsonify({'message': 'user updated'})
@app.route('/users/<int:index>', methods=['DELETE'])
def delete_user(index):
del app.users[index]
return jsonify({'message': 'user deleted'})
if __name__ == '__main__':
app.run() |
import tensorflow as tf
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=(28,28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax'),
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=5) |
<reponame>seanchambo/prisma
package com.prisma.api.mutations
import com.prisma.api.ApiBaseSpec
import com.prisma.shared.models.OnDelete
import com.prisma.shared.project_dsl.SchemaDsl
import org.scalatest.{FlatSpec, Matchers}
class OnDeleteDirectiveSpec extends FlatSpec with Matchers with ApiBaseSpec {
"A relation with a onDelete SET_NULL directive" should "set the value on the other node to null" ignore {
val project = SchemaDsl() { schema =>
val modelB = schema.model("ModelB").field_!("b", _.String, isUnique = true)
val modelA = schema.model("ModelA").field_!("a", _.String, isUnique = true).oneToOneRelation("modelB", "modelA", modelB)
}
database.setup(project)
val res = server.executeQuerySimple(
s"""mutation {
| createModelB(data: {
| b: "b"
| modelA: {create: {a:"a"}}
| }
| ){
| b
| }
|}""".stripMargin,
project
)
res.toString should be(s"""{"data":{"createModelB":{"b":"b"}}}""")
val deleteRes = server.executeQuerySimple("""mutation{deleteModelB(where: { b: "b" }){b}}""", project)
deleteRes.toString should be(s"""{"data":{"deleteModelB":{"b":"b"}}}""")
server.executeQuerySimple("{modelBs{b}}", project).toString should be("""{"data":{"modelBs":[]}}""")
server.executeQuerySimple("{modelAs{a}}", project).toString should be("""{"data":{"modelAs":[{"a":"a"}]}}""")
}
"A relation with a onDelete CASCADE directive" should "delete the connected node" ignore {
val project = SchemaDsl() { schema =>
val modelB = schema.model("ModelB").field_!("b", _.String, isUnique = true)
val modelA = schema
.model("ModelA")
.field_!("a", _.String, isUnique = true)
.oneToOneRelation("modelB", "modelA", modelB, modelAOnDelete = OnDelete.Cascade, modelBOnDelete = OnDelete.Cascade)
}
database.setup(project)
val res = server.executeQuerySimple(
s"""mutation {
| createModelB(data: {
| b: "b"
| modelA: {create: {a:"a"}}
| }
| ){
| b
| }
|}""".stripMargin,
project
)
res.toString should be(s"""{"data":{"createModelB":{"b":"b"}}}""")
val deleteRes = server.executeQuerySimple("""mutation{deleteModelB(where: { b: "b" }){b}}""", project)
deleteRes.toString should be(s"""{"data":{"deleteModelB":{"b":"b"}}}""")
server.executeQuerySimple("{modelBs{b}}", project).toString should be("""{"data":{"modelBs":[]}}""")
server.executeQuerySimple("{modelAs{a}}", project).toString should be("""{"data":{"modelAs":[]}}""")
}
}
|
SELECT *
FROM Customers
ORDER BY CompanyName
LIMIT 5; |
#!/bin/bash
# Copyright Project Harbor Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -x
gsutil version -l
set +x
docker-compose version
## -------------------------------------------- Pre-condition --------------------------------------------
if [[ $DRONE_REPO != "goharbor/harbor" ]]; then
echo "Only run tests again Harbor Repo."
exit 1
fi
# It won't package an new harbor build against tag, just pick up a build which passed CI and push to release.
if [[ $DRONE_BUILD_EVENT == "tag" || $DRONE_BUILD_EVENT == "pull_request" ]]; then
echo "We do nothing against 'tag' and 'pull request'."
exit 0
fi
## --------------------------------------------- Init Env -------------------------------------------------
dpkg -l > package.list
# Start Xvfb for Chrome headlesss
Xvfb -ac :99 -screen 0 1280x1024x16 & export DISPLAY=:99
export DRONE_SERVER=$DRONE_SERVER
export DRONE_TOKEN=$DRONE_TOKEN
upload_build=false
nightly_run=false
upload_latest_build=false
upload_bundle_success=false
latest_build_file='latest.build'
publish_npm=true
harbor_build_bundle=""
harbor_logs_bucket="harbor-ci-logs"
harbor_builds_bucket="harbor-builds"
harbor_releases_bucket="harbor-releases"
harbor_ci_pipeline_store_bucket="harbor-ci-pipeline-store/latest"
harbor_target_bucket=""
if [[ $DRONE_BRANCH == "master" ]]; then
harbor_target_bucket=$harbor_builds_bucket
else
harbor_target_bucket=$harbor_releases_bucket/$DRONE_BRANCH
fi
# GC credentials
keyfile="/root/harbor-ci-logs.key"
botofile="/root/.boto"
echo -en $GS_PRIVATE_KEY > $keyfile
chmod 400 $keyfile
echo "[Credentials]" >> $botofile
echo "gs_service_key_file = $keyfile" >> $botofile
echo "gs_service_client_id = $GS_CLIENT_EMAIL" >> $botofile
echo "[GSUtil]" >> $botofile
echo "content_language = en" >> $botofile
echo "default_project_id = $GS_PROJECT_ID" >> $botofile
container_ip=`ip addr s eth0 |grep "inet "|awk '{print $2}' |awk -F "/" '{print $1}'`
echo $container_ip
## --------------------------------------------- Init Version -----------------------------------------------
buildinfo=$(drone build info goharbor/harbor $DRONE_BUILD_NUMBER)
echo $buildinfo
git_commit=$(git rev-parse --short=8 HEAD)
# the target release version is the version of next release(RC or GA). It needs to be updated on creating new release branch.
target_release_version=$(cat ./VERSION)
# the harbor ui version will be shown in the about dialog.
Harbor_UI_Version=$target_release_version-$git_commit
# the harbor package version is for both online and offline installer.
# harbor-offline-installer-v1.5.2-build.8.tgz
Harbor_Package_Version=$target_release_version-'build.'$DRONE_BUILD_NUMBER
# the harbor assets version is for tag of harbor images:
# 1, On master branch, it's same as package version.
# 2, On release branch(others), it would set to the target realese version so that we can rename the latest passed CI build to publish.
if [[ $DRONE_BRANCH == "master" ]]; then
Harbor_Assets_Version=$Harbor_Package_Version
else
Harbor_Assets_Version=$target_release_version
fi
export Harbor_UI_Version=$Harbor_UI_Version
export Harbor_Assets_Version=$Harbor_Assets_Version
# the env is for online and offline package.
export Harbor_Package_Version=$Harbor_Package_Version
export NPM_REGISTRY=$NPM_REGISTRY
echo "--------------------------------------------------"
echo "Harbor UI version: $Harbor_UI_Version"
echo "Harbor Package version: $Harbor_Package_Version"
echo "Harbor Assets version: $Harbor_Assets_Version"
echo "--------------------------------------------------"
# GS util
function uploader {
gsutil cp $1 gs://$2/$1
gsutil -D setacl public-read gs://$2/$1 &> /dev/null
}
function package_offline_installer {
echo "Package Harbor offline installer."
pybot --removekeywords TAG:secret --include Bundle tests/robot-cases/Group0-Distro-Harbor
harbor_build_bundle=$(basename harbor-offline-installer-*.tgz)
upload_build=true
echo "Package name is: $harbor_build_bundle"
du -ks $harbor_build_bundle | awk '{print $1 / 1024}' | { read x; echo $x MB; }
}
# publish images to Docker Hub
function publishImage {
echo "Publishing images to Docker Hub..."
echo "The images on the host:"
# for master, will use 'dev' as the tag name
# for release-*, will use 'release-*-dev' as the tag name, like release-v1.8.0-dev
if [[ $DRONE_BRANCH == "master" ]]; then
image_tag=dev
fi
if [[ $DRONE_BRANCH == "release-"* ]]; then
image_tag=$Harbor_Assets_Version-dev
fi
# rename the images with tag "dev" and push to Docker Hub
docker images
docker login -u $DOCKER_HUB_USERNAME -p $DOCKER_HUB_PASSWORD
docker images | sed -n "s|\(goharbor/[-._a-z0-9]*\)\s*\(.*$Harbor_Assets_Version\).*|docker tag \1:\2 \1:$image_tag;docker push \1:$image_tag|p" | bash
echo "Images are published successfully"
docker images
}
echo "--------------------------------------------------"
echo "Running CI for $DRONE_BUILD_EVENT on $DRONE_BRANCH"
echo "--------------------------------------------------"
##
# Any merge code(PUSH) on branch master, release-* will trigger package offline installer.
#
# Put code here is because that it needs clean code to build installer.
##
if [[ $DRONE_BRANCH == "master" || $DRONE_BRANCH == *"refs/tags"* || $DRONE_BRANCH == "release-"* ]]; then
if [[ $DRONE_BUILD_EVENT == "push" ]]; then
package_offline_installer
upload_latest_build=true
echo -en "$HARBOR_SIGN_KEY" | gpg --import
gpg -v -ab -u $HARBOR_SIGN_KEY_ID $harbor_build_bundle
fi
fi
## --------------------------------------------- Upload Harbor Bundle File ---------------------------------------
#
# Build storage structure:
#
# 1(master), harbor-builds/harbor-offline-installer-*.tgz
# latest.build
# harbor-offline-installer-latest.tgz
# 2(others), harbor-releases/${branch}/harbor-offline-installer-*.tgz
# latest.build
# harbor-offline-installer-latest.tgz
#
set -e
if [ $upload_build == true ]; then
cp ${harbor_build_bundle} harbor-offline-installer-latest.tgz
cp ${harbor_build_bundle}.asc harbor-offline-installer-latest.tgz.asc
uploader ${harbor_build_bundle} $harbor_target_bucket
uploader ${harbor_build_bundle}.asc $harbor_target_bucket
uploader harbor-offline-installer-latest.tgz $harbor_target_bucket
uploader harbor-offline-installer-latest.tgz.asc $harbor_target_bucket
upload_bundle_success=true
fi
## --------------------------------------------- Upload Harbor Dev Images ---------------------------------------
#
# Any merge code(PUSH) on branch master, release-* will trigger push dev images.
#
##
if [[ $DRONE_BRANCH == "master" || $DRONE_BRANCH == "release-"* ]]; then
if [[ $DRONE_BUILD_EVENT == "push" ]]; then
publishImage
fi
fi
## --------------------------------------------- Upload Harbor Latest Build File ----------------------------------
#
# latest.build file holds the latest offline installer url, it must be sure that the installer has been uploaded successfull.
#
if [ $upload_latest_build == true ] && [ $upload_bundle_success == true ]; then
echo 'https://storage.googleapis.com/'$harbor_target_bucket/$harbor_build_bundle > $latest_build_file
uploader $latest_build_file $harbor_target_bucket
fi
## --------------------------------------------- Upload securego results ------------------------------------------
#if [ $DRONE_BUILD_EVENT == "push" ]; then
# go get github.com/securego/gosec/cmd/gosec
# go get github.com/dghubble/sling
# make gosec -e GOSECRESULTS=harbor-gosec-results-latest.json
# echo $git_commit > ./harbor-gosec-results-latest-version
# uploader harbor-gosec-results-latest.json $harbor_target_bucket
# uploader harbor-gosec-results-latest-version $harbor_target_bucket
#fi
## ------------------------------------------------ Tear Down -----------------------------------------------------
if [ -f "$keyfile" ]; then
rm -f $keyfile
fi
|
import styled, { css } from '@xstyled/styled-components'
import { th, up } from '@xstyled/system'
export const Layout = styled.div`
display: flex;
flex-direction: column;
${th('docs.page')};
${up(
'md',
css`
flex-direction: row;
`
)};
`
|
#!/bin/sh
#
# Copyright (c) 2005 Junio C Hamano
#
test_description='git mailinfo and git mailsplit test'
. ./test-lib.sh
test_expect_success 'split sample box' \
'git mailsplit -o. "$TEST_DIRECTORY"/t5100/sample.mbox >last &&
last=`cat last` &&
echo total is $last &&
test `cat last` = 17'
check_mailinfo () {
mail=$1 opt=$2
mo="$mail$opt"
git mailinfo -u $opt msg$mo patch$mo <$mail >info$mo &&
test_cmp "$TEST_DIRECTORY"/t5100/msg$mo msg$mo &&
test_cmp "$TEST_DIRECTORY"/t5100/patch$mo patch$mo &&
test_cmp "$TEST_DIRECTORY"/t5100/info$mo info$mo
}
for mail in `echo 00*`
do
test_expect_success "mailinfo $mail" '
check_mailinfo $mail "" &&
if test -f "$TEST_DIRECTORY"/t5100/msg$mail--scissors
then
check_mailinfo $mail --scissors
fi &&
if test -f "$TEST_DIRECTORY"/t5100/msg$mail--no-inbody-headers
then
check_mailinfo $mail --no-inbody-headers
fi &&
if test -f "$TEST_DIRECTORY"/t5100/msg$mail--message-id
then
check_mailinfo $mail --message-id
fi
'
done
test_expect_success 'split box with rfc2047 samples' \
'mkdir rfc2047 &&
git mailsplit -orfc2047 "$TEST_DIRECTORY"/t5100/rfc2047-samples.mbox \
>rfc2047/last &&
last=`cat rfc2047/last` &&
echo total is $last &&
test `cat rfc2047/last` = 11'
for mail in `echo rfc2047/00*`
do
test_expect_success "mailinfo $mail" '
git mailinfo -u $mail-msg $mail-patch <$mail >$mail-info &&
echo msg &&
test_cmp "$TEST_DIRECTORY"/t5100/empty $mail-msg &&
echo patch &&
test_cmp "$TEST_DIRECTORY"/t5100/empty $mail-patch &&
echo info &&
test_cmp "$TEST_DIRECTORY"/t5100/rfc2047-info-$(basename $mail) $mail-info
'
done
test_expect_success 'respect NULs' '
git mailsplit -d3 -o. "$TEST_DIRECTORY"/t5100/nul-plain &&
test_cmp "$TEST_DIRECTORY"/t5100/nul-plain 001 &&
(cat 001 | git mailinfo msg patch) &&
test_line_count = 4 patch
'
test_expect_success 'Preserve NULs out of MIME encoded message' '
git mailsplit -d5 -o. "$TEST_DIRECTORY"/t5100/nul-b64.in &&
test_cmp "$TEST_DIRECTORY"/t5100/nul-b64.in 00001 &&
git mailinfo msg patch <00001 &&
test_cmp "$TEST_DIRECTORY"/t5100/nul-b64.expect patch
'
test_expect_success 'mailinfo on from header without name works' '
mkdir info-from &&
git mailsplit -oinfo-from "$TEST_DIRECTORY"/t5100/info-from.in &&
test_cmp "$TEST_DIRECTORY"/t5100/info-from.in info-from/0001 &&
git mailinfo info-from/msg info-from/patch \
<info-from/0001 >info-from/out &&
test_cmp "$TEST_DIRECTORY"/t5100/info-from.expect info-from/out
'
test_expect_success 'mailinfo finds headers after embedded From line' '
mkdir embed-from &&
git mailsplit -oembed-from "$TEST_DIRECTORY"/t5100/embed-from.in &&
test_cmp "$TEST_DIRECTORY"/t5100/embed-from.in embed-from/0001 &&
git mailinfo embed-from/msg embed-from/patch \
<embed-from/0001 >embed-from/out &&
test_cmp "$TEST_DIRECTORY"/t5100/embed-from.expect embed-from/out
'
test_expect_success 'mailinfo on message with quoted >From' '
mkdir quoted-from &&
git mailsplit -oquoted-from "$TEST_DIRECTORY"/t5100/quoted-from.in &&
test_cmp "$TEST_DIRECTORY"/t5100/quoted-from.in quoted-from/0001 &&
git mailinfo quoted-from/msg quoted-from/patch \
<quoted-from/0001 >quoted-from/out &&
test_cmp "$TEST_DIRECTORY"/t5100/quoted-from.expect quoted-from/msg
'
test_done
|
#!/bin/bash
MINGW_REPO="https://github.com/mirror/mingw-w64.git"
MINGW_COMMIT="b4da620ea9f159db0018d243d21fdf4fa8752f32"
ffbuild_enabled() {
[[ $TARGET == win* ]] || return -1
return 0
}
ffbuild_dockerstage() {
to_df "ADD $SELF /stage.sh"
to_df "RUN run_stage"
}
ffbuild_dockerbuild() {
git-mini-clone "$MINGW_REPO" "$MINGW_COMMIT" mingw
cd mingw/mingw-w64-headers
unset CFLAGS
unset CXXFLAGS
unset LDFLAGS
unset PKG_CONFIG_LIBDIR
autoreconf -i
local myconf=(
--prefix="/usr/$FFBUILD_TOOLCHAIN"
--host="$FFBUILD_TOOLCHAIN"
--with-default-win32-winnt="0x601"
--enable-idl
)
./configure "${myconf[@]}" || return -1
make -j$(nproc) || return -1
make install || return -1
cd ../mingw-w64-libraries/winpthreads
autoreconf -i
local myconf=(
--prefix="/usr/$FFBUILD_TOOLCHAIN"
--host="$FFBUILD_TOOLCHAIN"
--with-pic
--disable-shared
--enable-static
)
./configure "${myconf[@]}" || return -1
make -j$(nproc) || return -1
make install || return -1
cd ../../..
rm -rf mingw
}
ffbuild_configure() {
echo --disable-w32threads --enable-pthreads
}
|
package com.example.xyzreader;
import android.os.Bundle;
import android.support.v4.widget.NestedScrollView;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.widget.ImageView;
import com.bumptech.glide.Glide;
import com.example.xyzreader.api.ApiClient;
import com.example.xyzreader.model.ListResponsData;
import java.util.List;
import butterknife.Bind;
import butterknife.ButterKnife;
import retrofit.Callback;
import retrofit.RetrofitError;
import retrofit.client.Response;
public class ScrollingActivity extends AppCompatActivity {
@Bind(R.id.anim_toolbar)
Toolbar toolbar;
@Bind(R.id.content_fragment_article_details)
NestedScrollView content_fragment_article_details;
@Bind(R.id.imageView_details)
ImageView imageView_details;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scrolling);
ButterKnife.bind(this);
setSupportActionBar(toolbar);
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
}
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
ApiClient apiClient = new ApiClient();
ApiClient.getApiClientInstance(this).getBooksApiInterfaceMethod().getData(new Callback<List<ListResponsData>>() {
@Override
public void success(List<ListResponsData> listResponsDatas, Response response) {
for (ListResponsData l :
listResponsDatas) {
Log.d("TEST TEST", l.getThumb());
}
}
@Override
public void failure(RetrofitError error) {
Log.d("Retrofit Erro", error.getMessage());
}
});
Glide.with(this).load("https://dl.dropboxusercontent.com/u/231329/xyzreader_data/images/p014.jpg")
.into(imageView_details);
//imageView_details.setAspectRatio(Float.parseFloat("0.66667"));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
return super.onOptionsItemSelected(item);
}
private void hideViews() {
toolbar.animate().translationY(-toolbar.getHeight()).setInterpolator(new AccelerateInterpolator(2));
/* FrameLayout.LayoutParams lp = (FrameLayout.LayoutParams) mFabButton.getLayoutParams();
int fabBottomMargin = lp.bottomMargin;
mFabButton.animate().translationY(mFabButton.getHeight()+fabBottomMargin).setInterpolator(new AccelerateInterpolator(2)).start();
*/
}
private void showViews() {
toolbar.animate().translationY(0).setInterpolator(new DecelerateInterpolator(2));
// mFabButton.animate().translationY(0).setInterpolator(new DecelerateInterpolator(2)).start();
}
}
|
// (C) Copyright <NAME> 2011-2015
// Use, modification and distribution are subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt).
#include <boost/vmd/is_number.hpp>
#include <boost/detail/lightweight_test.hpp>
#include <boost/preprocessor/list/at.hpp>
#include <boost/preprocessor/seq/elem.hpp>
#include <boost/preprocessor/tuple/elem.hpp>
int main()
{
#if BOOST_PP_VARIADICS
#define A_TUPLE (4,241,zzz)
#define JDATA somevalue
#define KDATA 213
#define A_SEQ (num)(78)(42)
#define A_LIST (eeb,(grist,(152,BOOST_PP_NIL)))
#if BOOST_PP_LIMIT_MAG > 256
#define A_TUPLE_2 (374,511,zzz)
#define KDATA_2 269
#endif
#if BOOST_PP_LIMIT_MAG > 512
#define A_SEQ_2 (num)(781)(942)
#define A_LIST_2 (eeb,(grist,(1021,BOOST_PP_NIL)))
#endif
BOOST_TEST(!BOOST_VMD_IS_NUMBER(BOOST_PP_TUPLE_ELEM(2,A_TUPLE)));
BOOST_TEST(BOOST_VMD_IS_NUMBER(BOOST_PP_TUPLE_ELEM(1,A_TUPLE)));
BOOST_TEST(!BOOST_VMD_IS_NUMBER(JDATA));
BOOST_TEST(BOOST_VMD_IS_NUMBER(KDATA));
BOOST_TEST(!BOOST_VMD_IS_NUMBER(BOOST_PP_SEQ_ELEM(0,A_SEQ)));
BOOST_TEST(BOOST_VMD_IS_NUMBER(BOOST_PP_SEQ_ELEM(2,A_SEQ)));
BOOST_TEST(!BOOST_VMD_IS_NUMBER(BOOST_PP_LIST_AT(A_LIST,0)));
BOOST_TEST(BOOST_VMD_IS_NUMBER(BOOST_PP_LIST_AT(A_LIST,2)));
BOOST_TEST(!BOOST_VMD_IS_NUMBER(BOOST_PP_LIST_AT(A_LIST,1)));
BOOST_TEST(!BOOST_VMD_IS_NUMBER((XXX)));
BOOST_TEST(!BOOST_VMD_IS_NUMBER());
#if BOOST_PP_LIMIT_MAG > 256
BOOST_TEST(BOOST_VMD_IS_NUMBER(KDATA_2));
BOOST_TEST(BOOST_VMD_IS_NUMBER(BOOST_PP_TUPLE_ELEM(0,A_TUPLE_2)));
#endif
#if BOOST_PP_LIMIT_MAG > 512
BOOST_TEST(BOOST_VMD_IS_NUMBER(BOOST_PP_SEQ_ELEM(2,A_SEQ_2)));
BOOST_TEST(BOOST_VMD_IS_NUMBER(BOOST_PP_LIST_AT(A_LIST_2,2)));
#endif
#else
BOOST_ERROR("No variadic macro support");
#endif
return boost::report_errors();
}
|
/// <reference types="node" />
import { Stream } from 'stream';
import { EventEmitter } from 'events';
import { results, Message, X509, AuthenticationProvider } from 'azure-iot-common';
import { SharedAccessSignature as CommonSharedAccessSignature } from 'azure-iot-common';
import { RetryPolicy } from 'azure-iot-common';
import { BlobUploadClient } from './blob_upload';
import { DeviceMethodRequest, DeviceMethodResponse } from './device_method';
import { Twin, TwinProperties } from './twin';
import { DeviceClientOptions } from './interfaces';
/**
* IoT Hub device client used to connect a device with an Azure IoT hub.
*
* Users of the SDK should call one of the factory methods,
* {@link azure-iot-device.Client.fromConnectionString|fromConnectionString}
* or {@link azure-iot-device.Client.fromSharedAccessSignature|fromSharedAccessSignature}
* to create an IoT Hub device client.
*/
export declare class Client extends EventEmitter {
/**
* @private
*/
static sasRenewalInterval: number;
/**
* @private
*/
_transport: Client.Transport;
/**
* @private
*/
_twin: Twin;
/**
* @private
* Maximum timeout (in milliseconds) used to consider an operation failed.
* The operation will be retried according to the retry policy set with {@link azure-iot-device.Client.setRetryPolicy} method (or {@link azure-iot-common.ExponentialBackoffWithJitter} by default) until this value is reached.)
*/
private _maxOperationTimeout;
private _methodCallbackMap;
private _disconnectHandler;
private blobUploadClient;
private _c2dEnabled;
private _methodsEnabled;
private _retryPolicy;
/**
* @constructor
* @param {Object} transport An object that implements the interface
* expected of a transport object, e.g.,
* {@link azure-iot-device-http.Http|Http}.
* @param {string} connStr A connection string (optional: when not provided, updateSharedAccessSignature must be called to set the SharedAccessSignature token directly).
* @param {Object} blobUploadClient An object that is capable of uploading a stream to a blob.
*/
constructor(transport: Client.Transport, connStr?: string, blobUploadClient?: BlobUploadClient);
/**
* @method module:azure-iot-device.Client#onDeviceMethod
* @description Registers the `callback` to be invoked when a
* cloud-to-device method call is received by the client
* for the given `methodName`.
*
* @param {String} methodName The name of the method for which the callback
* is to be registered.
* @param {Function} callback The callback to be invoked when the C2D method
* call is received.
*
* @throws {ReferenceError} If the `methodName` or `callback` parameter
* is falsy.
* @throws {TypeError} If the `methodName` parameter is not a string
* or if the `callback` is not a function.
*/
onDeviceMethod(methodName: string, callback: (request: DeviceMethodRequest, response: DeviceMethodResponse) => void): void;
/**
* @method module:azure-iot-device.Client#updateSharedAccessSignature
* @description Updates the Shared Access Signature token used by the transport to authenticate with the IoT Hub service.
*
* @param {String} sharedAccessSignature The new SAS token to use.
* @param {Function} done The callback to be invoked when `updateSharedAccessSignature`
* completes execution.
*
* @throws {ReferenceError} If the sharedAccessSignature parameter is falsy.
* @throws {ReferenceError} If the client uses x509 authentication.
*/
updateSharedAccessSignature(sharedAccessSignature: string, updateSasCallback?: (err?: Error, result?: results.SharedAccessSignatureUpdated) => void): void;
/**
* @method module:azure-iot-device.Client#open
* @description Call the transport layer CONNECT function if the
* transport layer implements it
*
* @param {Function} openCallback The callback to be invoked when `open`
* completes execution.
*/
open(openCallback: (err?: Error, result?: results.Connected) => void): void;
/**
* @method module:azure-iot-device.Client#sendEvent
* @description The [sendEvent]{@link azure-iot-device.Client.sendEvent} method sends an event message
* to the IoT Hub as the device indicated by the connection string passed
* via the constructor.
*
* @param {azure-iot-common.Message} message The [message]{@link azure-iot-common.Message} to be sent.
* @param {Function} sendEventCallback The callback to be invoked when `sendEvent` completes execution.
*/
sendEvent(message: Message, sendEventCallback?: (err?: Error, result?: results.MessageEnqueued) => void): void;
/**
* @method module:azure-iot-device.Client#sendEventBatch
* @description The [sendEventBatch]{@link azure-iot-device.Client.sendEventBatch} method sends a list
* of event messages to the IoT Hub as the device indicated by the connection
* string passed via the constructor.
*
* @param {array<Message>} messages Array of [Message]{@link azure-iot-common.Message}
* objects to be sent as a batch.
* @param {Function} sendEventBatchCallback The callback to be invoked when
* `sendEventBatch` completes execution.
*/
sendEventBatch(messages: Message[], sendEventBatchCallback?: (err?: Error, result?: results.MessageEnqueued) => void): void;
/**
* @method module:azure-iot-device.Client#close
* @description The `close` method directs the transport to close the current connection to the IoT Hub instance
*
* @param {Function} closeCallback The callback to be invoked when the connection has been closed.
*/
close(closeCallback?: (err?: Error, result?: results.Disconnected) => void): void;
/**
* @deprecated Use Client.setOptions instead.
* @method module:azure-iot-device.Client#setTransportOptions
* @description The `setTransportOptions` method configures transport-specific options for the client and its underlying transport object.
*
* @param {Object} options The options that shall be set (see transports documentation).
* @param {Function} done The callback that shall be invoked with either an error or a result object.
*/
setTransportOptions(options: any, done?: (err?: Error, result?: results.TransportConfigured) => void): void;
/**
* @method module:azure-iot-device.Client#setOptions
* @description The `setOptions` method let the user configure the client.
*
* @param {Object} options The options structure
* @param {Function} done The callback that shall be called when setOptions is finished.
*
* @throws {ReferenceError} If the options structure is falsy
*/
setOptions(options: DeviceClientOptions, done?: (err?: Error, result?: results.TransportConfigured) => void): void;
/**
* @method module:azure-iot-device.Client#complete
* @description The `complete` method directs the transport to settle the message passed as argument as 'completed'.
*
* @param {Message} message The message to settle.
* @param {Function} completeCallback The callback to call when the message is completed.
*
* @throws {ReferenceError} If the message is falsy.
*/
complete(message: Message, completeCallback: (err?: Error, result?: results.MessageCompleted) => void): void;
/**
* @method module:azure-iot-device.Client#reject
* @description The `reject` method directs the transport to settle the message passed as argument as 'rejected'.
*
* @param {Message} message The message to settle.
* @param {Function} rejectCallback The callback to call when the message is rejected.
*
* @throws {ReferenceException} If the message is falsy.
*/
reject(message: Message, rejectCallback: (err?: Error, result?: results.MessageRejected) => void): void;
/**
* @method module:azure-iot-device.Client#abandon
* @description The `abandon` method directs the transport to settle the message passed as argument as 'abandoned'.
*
* @param {Message} message The message to settle.
* @param {Function} abandonCallback The callback to call when the message is abandoned.
*
* @throws {ReferenceException} If the message is falsy.
*/
abandon(message: Message, abandonCallback: (err?: Error, result?: results.MessageAbandoned) => void): void;
/**
* @method module:azure-iot-device.Client#uploadToBlob
* @description The `uploadToBlob` method uploads a stream to a blob.
*
* @param {String} blobName The name to use for the blob that will be created with the content of the stream.
* @param {Stream} stream The data to that should be uploaded to the blob.
* @param {Number} streamLength The size of the data to that should be uploaded to the blob.
* @param {Function} done The callback to call when the upload is complete.
*
* @throws {ReferenceException} If blobName or stream or streamLength is falsy.
*/
uploadToBlob(blobName: string, stream: Stream, streamLength: number, done: (err?: Error) => void): void;
/**
* @method module:azure-iot-device.Client#getTwin
* @description The `getTwin` method creates a Twin object and establishes a connection with the Twin service.
*
* @param {Function} done The callback to call when the connection is established.
*
*/
getTwin(done: (err?: Error, twin?: Twin) => void): void;
/**
* Sets the retry policy used by the client on all operations. The default is {@link azure-iot-common.ExponentialBackoffWithJitter|ExponentialBackoffWithJitter}.
* @param policy {RetryPolicy} The retry policy that should be used for all future operations.
*/
setRetryPolicy(policy: RetryPolicy): void;
private _validateDeviceMethodInputs(methodName, callback);
private _addMethodCallback(methodName, callback);
private _enableC2D(callback);
private _disableC2D(callback);
private _enableMethods(callback);
private _closeTransport(closeCallback);
/**
* @method module:azure-iot-device.Client.fromConnectionString
* @description Creates an IoT Hub device client from the given
* connection string using the given transport type.
*
* @param {String} connStr A connection string which encapsulates "device
* connect" permissions on an IoT hub.
* @param {Function} Transport A transport constructor.
*
* @throws {ReferenceError} If the connStr parameter is falsy.
*
* @returns {module:azure-iothub.Client}
*/
static fromConnectionString(connStr: string, transportCtor: any): Client;
/**
* @method module:azure-iot-device.Client.fromSharedAccessSignature
* @description Creates an IoT Hub device client from the given
* shared access signature using the given transport type.
*
* @param {String} sharedAccessSignature A shared access signature which encapsulates "device
* connect" permissions on an IoT hub.
* @param {Function} Transport A transport constructor.
*
* @throws {ReferenceError} If the connStr parameter is falsy.
*
* @returns {module:azure-iothub.Client}
*/
static fromSharedAccessSignature(sharedAccessSignature: string, transportCtor: any): Client;
/**
* @method module:azure-iot-device.Client.fromAuthenticationMethod
* @description Creates an IoT Hub device client from the given authentication method and using the given transport type.
* @param authenticationProvider Object used to obtain the authentication parameters for the IoT hub.
* @param transportCtor Transport protocol used to connect to IoT hub.
*/
static fromAuthenticationProvider(authenticationProvider: AuthenticationProvider, transportCtor: any): Client;
}
export declare namespace Client {
/**
* @private
* Configuration parameters used to authenticate and connect a Device Client with an Azure IoT hub.
*/
interface Config {
/**
* Device unique identifier (as it exists in the device registry).
*/
deviceId: string;
/**
* Hostname of the Azure IoT hub. (<IoT hub name>.azure-devices.net).
*/
host: string;
/**
* @deprecated This is not used anywhere anymore.
* Name of the Azure IoT hub. (The first section of the Azure IoT hub hostname)
*/
hubName?: string;
/**
* If using symmetric key authentication, this is used to generate the shared access signature tokens used to authenticate the connection.
*/
symmetricKey?: string;
/**
* The shared access signature token used to authenticate the connection with the Azure IoT hub.
*/
sharedAccessSignature?: string | CommonSharedAccessSignature;
/**
* Structure containing the certificate and associated key used to authenticate the connection if using x509 certificates as the authentication method.
*/
x509?: X509;
}
interface Transport extends EventEmitter {
on(type: 'error', func: (err: Error) => void): this;
on(type: 'disconnect', func: (err?: Error) => void): this;
connect(done: (err?: Error, result?: results.Connected) => void): void;
disconnect(done: (err?: Error, result?: results.Disconnected) => void): void;
setOptions?(options: DeviceClientOptions, done: (err?: Error, result?: results.TransportConfigured) => void): void;
updateSharedAccessSignature(sharedAccessSignature: string, done: (err?: Error, result?: results.SharedAccessSignatureUpdated) => void): void;
sendEvent(message: Message, done: (err?: Error, result?: results.MessageEnqueued) => void): void;
sendEventBatch(messages: Message[], done: (err?: Error, result?: results.MessageEnqueued) => void): void;
on(type: 'message', func: (msg: Message) => void): this;
complete(message: Message, done: (err?: Error, result?: results.MessageCompleted) => void): void;
reject(message: Message, done: (err?: Error, results?: results.MessageRejected) => void): void;
abandon(message: Message, done: (err?: Error, results?: results.MessageAbandoned) => void): void;
enableC2D(callback: (err?: Error) => void): void;
disableC2D(callback: (err?: Error) => void): void;
on(type: 'twinDesiredPropertiesUpdate', func: (desiredProps: any) => void): this;
getTwin(callback: (err?: Error, twin?: TwinProperties) => void): void;
updateTwinReportedProperties(patch: any, callback: (err?: Error) => void): void;
enableTwinDesiredPropertiesUpdates(callback: (err?: Error) => void): void;
disableTwinDesiredPropertiesUpdates(callback: (err?: Error) => void): void;
sendMethodResponse(response: DeviceMethodResponse, done?: (err?: Error, result?: any) => void): void;
onDeviceMethod(methodName: string, methodCallback: (request: MethodMessage, response: DeviceMethodResponse) => void): void;
enableMethods(callback: (err?: Error) => void): void;
disableMethods(callback: (err?: Error) => void): void;
}
interface BlobUpload {
uploadToBlob(blobName: string, stream: Stream, steamLength: number, done: (err?: Error) => void): void;
updateSharedAccessSignature(sharedAccessSignature: string): void;
}
/**
* @private
* @deprecated
*/
interface MethodMessage {
methods: {
methodName: string;
};
requestId: string;
properties: {
[key: string]: string;
};
body: Buffer;
}
type TransportCtor = new (config: Config) => Transport;
}
|
<filename>gen/pb-go/flyteidl/admin/schedule.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: flyteidl/admin/schedule.proto
package admin
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Represents a frequency at which to run a schedule.
type FixedRateUnit int32
const (
FixedRateUnit_MINUTE FixedRateUnit = 0
FixedRateUnit_HOUR FixedRateUnit = 1
FixedRateUnit_DAY FixedRateUnit = 2
)
var FixedRateUnit_name = map[int32]string{
0: "MINUTE",
1: "HOUR",
2: "DAY",
}
var FixedRateUnit_value = map[string]int32{
"MINUTE": 0,
"HOUR": 1,
"DAY": 2,
}
func (x FixedRateUnit) String() string {
return proto.EnumName(FixedRateUnit_name, int32(x))
}
func (FixedRateUnit) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_a71cf75647fcd25a, []int{0}
}
// Option for schedules run at a certain frequency, e.g. every 2 minutes.
type FixedRate struct {
Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
Unit FixedRateUnit `protobuf:"varint,2,opt,name=unit,proto3,enum=flyteidl.admin.FixedRateUnit" json:"unit,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *FixedRate) Reset() { *m = FixedRate{} }
func (m *FixedRate) String() string { return proto.CompactTextString(m) }
func (*FixedRate) ProtoMessage() {}
func (*FixedRate) Descriptor() ([]byte, []int) {
return fileDescriptor_a71cf75647fcd25a, []int{0}
}
func (m *FixedRate) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_FixedRate.Unmarshal(m, b)
}
func (m *FixedRate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_FixedRate.Marshal(b, m, deterministic)
}
func (m *FixedRate) XXX_Merge(src proto.Message) {
xxx_messageInfo_FixedRate.Merge(m, src)
}
func (m *FixedRate) XXX_Size() int {
return xxx_messageInfo_FixedRate.Size(m)
}
func (m *FixedRate) XXX_DiscardUnknown() {
xxx_messageInfo_FixedRate.DiscardUnknown(m)
}
var xxx_messageInfo_FixedRate proto.InternalMessageInfo
func (m *FixedRate) GetValue() uint32 {
if m != nil {
return m.Value
}
return 0
}
func (m *FixedRate) GetUnit() FixedRateUnit {
if m != nil {
return m.Unit
}
return FixedRateUnit_MINUTE
}
// Defines complete set of information required to trigger an execution on a schedule.
type Schedule struct {
// Types that are valid to be assigned to ScheduleExpression:
// *Schedule_CronExpression
// *Schedule_Rate
ScheduleExpression isSchedule_ScheduleExpression `protobuf_oneof:"ScheduleExpression"`
// Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off.
KickoffTimeInputArg string `protobuf:"bytes,3,opt,name=kickoff_time_input_arg,json=kickoffTimeInputArg,proto3" json:"kickoff_time_input_arg,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Schedule) Reset() { *m = Schedule{} }
func (m *Schedule) String() string { return proto.CompactTextString(m) }
func (*Schedule) ProtoMessage() {}
func (*Schedule) Descriptor() ([]byte, []int) {
return fileDescriptor_a71cf75647fcd25a, []int{1}
}
func (m *Schedule) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Schedule.Unmarshal(m, b)
}
func (m *Schedule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Schedule.Marshal(b, m, deterministic)
}
func (m *Schedule) XXX_Merge(src proto.Message) {
xxx_messageInfo_Schedule.Merge(m, src)
}
func (m *Schedule) XXX_Size() int {
return xxx_messageInfo_Schedule.Size(m)
}
func (m *Schedule) XXX_DiscardUnknown() {
xxx_messageInfo_Schedule.DiscardUnknown(m)
}
var xxx_messageInfo_Schedule proto.InternalMessageInfo
type isSchedule_ScheduleExpression interface {
isSchedule_ScheduleExpression()
}
type Schedule_CronExpression struct {
CronExpression string `protobuf:"bytes,1,opt,name=cron_expression,json=cronExpression,proto3,oneof"`
}
type Schedule_Rate struct {
Rate *FixedRate `protobuf:"bytes,2,opt,name=rate,proto3,oneof"`
}
func (*Schedule_CronExpression) isSchedule_ScheduleExpression() {}
func (*Schedule_Rate) isSchedule_ScheduleExpression() {}
func (m *Schedule) GetScheduleExpression() isSchedule_ScheduleExpression {
if m != nil {
return m.ScheduleExpression
}
return nil
}
func (m *Schedule) GetCronExpression() string {
if x, ok := m.GetScheduleExpression().(*Schedule_CronExpression); ok {
return x.CronExpression
}
return ""
}
func (m *Schedule) GetRate() *FixedRate {
if x, ok := m.GetScheduleExpression().(*Schedule_Rate); ok {
return x.Rate
}
return nil
}
func (m *Schedule) GetKickoffTimeInputArg() string {
if m != nil {
return m.KickoffTimeInputArg
}
return ""
}
// XXX_OneofWrappers is for the internal use of the proto package.
func (*Schedule) XXX_OneofWrappers() []interface{} {
return []interface{}{
(*Schedule_CronExpression)(nil),
(*Schedule_Rate)(nil),
}
}
func init() {
proto.RegisterEnum("flyteidl.admin.FixedRateUnit", FixedRateUnit_name, FixedRateUnit_value)
proto.RegisterType((*FixedRate)(nil), "flyteidl.admin.FixedRate")
proto.RegisterType((*Schedule)(nil), "flyteidl.admin.Schedule")
}
func init() { proto.RegisterFile("flyteidl/admin/schedule.proto", fileDescriptor_a71cf75647fcd25a) }
var fileDescriptor_a71cf75647fcd25a = []byte{
// 301 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x41, 0x4f, 0xc2, 0x30,
0x18, 0x86, 0x19, 0x20, 0xc2, 0x67, 0x40, 0x52, 0x89, 0xc1, 0x03, 0x09, 0xe1, 0x84, 0x26, 0xb6,
0x01, 0x7e, 0x01, 0x44, 0x0c, 0x1c, 0xd4, 0xa4, 0xc2, 0x41, 0x2f, 0xcb, 0xd8, 0xbe, 0x8d, 0x86,
0xad, 0x5d, 0x4a, 0x67, 0xe0, 0x67, 0xf9, 0x0f, 0x0d, 0x15, 0x30, 0x3b, 0x78, 0x6c, 0x9f, 0xaf,
0xef, 0xd3, 0xf6, 0x85, 0x4e, 0x18, 0xef, 0x0d, 0x8a, 0x20, 0x66, 0x5e, 0x90, 0x08, 0xc9, 0xb6,
0xfe, 0x1a, 0x83, 0x2c, 0x46, 0x9a, 0x6a, 0x65, 0x14, 0x69, 0x9c, 0x30, 0xb5, 0xb8, 0xb7, 0x80,
0xda, 0xb3, 0xd8, 0x61, 0xc0, 0x3d, 0x83, 0xa4, 0x05, 0x17, 0x5f, 0x5e, 0x9c, 0x61, 0xdb, 0xe9,
0x3a, 0xfd, 0x3a, 0xff, 0x5d, 0x90, 0x01, 0x94, 0x33, 0x29, 0x4c, 0xbb, 0xd8, 0x75, 0xfa, 0x8d,
0x61, 0x87, 0xe6, 0x13, 0xe8, 0xf9, 0xf8, 0x52, 0x0a, 0xc3, 0xed, 0x68, 0xef, 0xdb, 0x81, 0xea,
0xfb, 0x51, 0x4c, 0xee, 0xe1, 0xda, 0xd7, 0x4a, 0xba, 0xb8, 0x4b, 0x35, 0x6e, 0xb7, 0x42, 0x49,
0x9b, 0x5f, 0x9b, 0x15, 0x78, 0xe3, 0x00, 0xa6, 0xe7, 0x7d, 0xc2, 0xa0, 0xac, 0x3d, 0x83, 0x56,
0x75, 0x35, 0xbc, 0xfb, 0x57, 0x35, 0x2b, 0x70, 0x3b, 0x48, 0x46, 0x70, 0xbb, 0x11, 0xfe, 0x46,
0x85, 0xa1, 0x6b, 0x44, 0x82, 0xae, 0x90, 0x69, 0x66, 0x5c, 0x4f, 0x47, 0xed, 0xd2, 0x41, 0xc1,
0x6f, 0x8e, 0x74, 0x21, 0x12, 0x9c, 0x1f, 0xd8, 0x58, 0x47, 0x93, 0x16, 0x90, 0xd3, 0xe5, 0xfe,
0xdc, 0x0f, 0x14, 0xea, 0xb9, 0xa7, 0x10, 0x80, 0xca, 0xcb, 0xfc, 0x75, 0xb9, 0x98, 0x36, 0x0b,
0xa4, 0x0a, 0xe5, 0xd9, 0xdb, 0x92, 0x37, 0x1d, 0x72, 0x09, 0xa5, 0xa7, 0xf1, 0x47, 0xb3, 0x38,
0x19, 0x7d, 0x0e, 0x22, 0x61, 0xd6, 0xd9, 0x8a, 0xfa, 0x2a, 0x61, 0xf1, 0x3e, 0x34, 0xec, 0xfc,
0xf5, 0x11, 0x4a, 0x96, 0xae, 0x1e, 0x23, 0xc5, 0xf2, 0x6d, 0xac, 0x2a, 0xb6, 0x85, 0xd1, 0x4f,
0x00, 0x00, 0x00, 0xff, 0xff, 0x8f, 0x31, 0x4f, 0x38, 0xa6, 0x01, 0x00, 0x00,
}
|
from typing import Any, Dict, Set
from dagster import PipelineDefinition, PipelineRun, SolidDefinition, check
from dagster.core.definitions.dependency import Node, NodeHandle
from dagster.core.execution.context.compute import AbstractComputeExecutionContext
from dagster.core.execution.context.system import PlanExecutionContext, StepExecutionContext
from dagster.core.log_manager import DagsterLogManager
from dagster.core.system_config.objects import ResolvedRunConfig
class DagstermillExecutionContext(AbstractComputeExecutionContext):
"""Dagstermill-specific execution context.
Do not initialize directly: use :func:`dagstermill.get_context`.
"""
def __init__(
self,
pipeline_context: PlanExecutionContext,
pipeline_def: PipelineDefinition,
resource_keys_to_init: Set[str],
solid_name: str,
solid_handle: NodeHandle,
solid_config: Any = None,
):
self._pipeline_context = check.inst_param(
pipeline_context, "pipeline_context", PlanExecutionContext
)
self._pipeline_def = check.inst_param(pipeline_def, "pipeline_def", PipelineDefinition)
self._resource_keys_to_init = check.set_param(
resource_keys_to_init, "resource_keys_to_init", of_type=str
)
self.solid_name = check.str_param(solid_name, "solid_name")
self.solid_handle = check.inst_param(solid_handle, "solid_handle", NodeHandle)
self._solid_config = solid_config
def has_tag(self, key: str) -> bool:
"""Check if a logging tag is defined on the context.
Args:
key (str): The key to check.
Returns:
bool
"""
check.str_param(key, "key")
return self._pipeline_context.has_tag(key)
def get_tag(self, key: str) -> str:
"""Get a logging tag defined on the context.
Args:
key (str): The key to get.
Returns:
str
"""
check.str_param(key, "key")
return self._pipeline_context.get_tag(key)
@property
def run_id(self) -> str:
"""str: The run_id for the context."""
return self._pipeline_context.run_id
@property
def run_config(self) -> Dict[str, Any]:
"""dict: The run_config for the context."""
return self._pipeline_context.run_config
@property
def resolved_run_config(self) -> ResolvedRunConfig:
""":class:`dagster.ResolvedRunConfig`: The resolved_run_config for the context"""
return self._pipeline_context.resolved_run_config
@property
def logging_tags(self) -> Dict[str, str]:
"""dict: The logging tags for the context."""
return self._pipeline_context.logging_tags
@property
def pipeline_name(self) -> str:
return self._pipeline_context.pipeline_name
@property
def pipeline_def(self) -> PipelineDefinition:
""":class:`dagster.PipelineDefinition`: The pipeline definition for the context.
This will be a dagstermill-specific shim.
"""
return self._pipeline_def
@property
def resources(self) -> Any:
"""collections.namedtuple: A dynamically-created type whose properties allow access to
resources."""
return self._pipeline_context.scoped_resources_builder.build(
required_resource_keys=self._resource_keys_to_init,
)
@property
def pipeline_run(self) -> PipelineRun:
""":class:`dagster.PipelineRun`: The pipeline run for the context."""
return self._pipeline_context.pipeline_run
@property
def log(self) -> DagsterLogManager:
""":class:`dagster.DagsterLogManager`: The log manager for the context.
Call, e.g., ``log.info()`` to log messages through the Dagster machinery.
"""
return self._pipeline_context.log
@property
def solid_def(self) -> SolidDefinition:
""":class:`dagster.SolidDefinition`: The solid definition for the context.
In interactive contexts, this may be a dagstermill-specific shim, depending whether a
solid definition was passed to ``dagstermill.get_context``.
"""
return self.pipeline_def.solid_def_named(self.solid_name)
@property
def solid(self) -> Node:
""":class:`dagster.Node`: The solid for the context.
In interactive contexts, this may be a dagstermill-specific shim, depending whether a
solid definition was passed to ``dagstermill.get_context``.
"""
return self.pipeline_def.get_solid(self.solid_handle)
@property
def solid_config(self) -> Any:
"""collections.namedtuple: A dynamically-created type whose properties allow access to
solid-specific config."""
if self._solid_config:
return self._solid_config
solid_config = self.resolved_run_config.solids.get(self.solid_name)
return solid_config.config if solid_config else None
class DagstermillRuntimeExecutionContext(DagstermillExecutionContext):
def __init__(
self,
pipeline_context: PlanExecutionContext,
pipeline_def: PipelineDefinition,
resource_keys_to_init: Set[str],
solid_name: str,
step_context: StepExecutionContext,
solid_handle: NodeHandle,
solid_config: Any = None,
):
self._step_context = check.inst_param(step_context, "step_context", StepExecutionContext)
super().__init__(
pipeline_context,
pipeline_def,
resource_keys_to_init,
solid_name,
solid_handle,
solid_config,
)
|
// Define a structure to hold information about the drawn rectangles
struct DrawnRectangle {
ImVec2 start; // Starting position of the rectangle
ImVec2 end; // Ending position of the rectangle
ImColor fillColor; // Filled color of the rectangle
ImColor borderColor; // Border color of the rectangle
};
std::vector<DrawnRectangle> drawnRectangles; // Store the drawn rectangles
// Inside the ImGui rendering loop or appropriate callback
if (ImGui::InvisibleButton("canvas", canvas_size)) {
if (ImGui::IsItemActive() && ImGui::IsMouseDragging(0)) {
ImVec2 mousePos = ImGui::GetMousePos();
if (mousePos.x >= canvas_pos.x && mousePos.x <= canvas_pos.x + canvas_size.x &&
mousePos.y >= canvas_pos.y && mousePos.y <= canvas_pos.y + canvas_size.y) {
// Create a new rectangle and add it to the list of drawn rectangles
DrawnRectangle newRect;
newRect.start = ImVec2(ImGui::GetIO().MouseClickedPos[0].x - canvas_pos.x, ImGui::GetIO().MouseClickedPos[0].y - canvas_pos.y);
newRect.end = ImVec2(mousePos.x - canvas_pos.x, mousePos.y - canvas_pos.y);
newRect.fillColor = ImColor(rand() % 256, rand() % 256, rand() % 256); // Random filled color
newRect.borderColor = ImColor(255, 255, 255); // White border color
drawnRectangles.push_back(newRect);
}
}
}
// Draw the stored rectangles
for (const auto& rect : drawnRectangles) {
draw_list->AddRectFilled(rect.start + canvas_pos, rect.end + canvas_pos, rect.fillColor);
draw_list->AddRect(rect.start + canvas_pos, rect.end + canvas_pos, rect.borderColor);
} |
#!/bin/bash
# "stable" mode tests assume data is static
# "live" mode tests assume data dynamic
SCRIPT=$(basename ${BASH_SOURCE[0]})
TEST=""
QTD=1
SLEEP_TIMEOUT=5
TEST_QTD=1
#PORT AND RPC_PORT 3 initial digits, to be concat with a suffix later when node is initialized
RPC_PORT="854"
IP_ADDR="0.0.0.0"
MODE="rpc"
KEY="mykey"
CHAINID="ethermint_9000-1"
MONIKER="mymoniker"
## default port prefixes for ethermintd
NODE_P2P_PORT="2660"
NODE_PORT="2663"
NODE_RPC_PORT="2666"
usage() {
echo "Usage: $SCRIPT"
echo "Optional command line arguments"
echo "-t <string> -- Test to run. eg: rpc"
echo "-q <number> -- Quantity of nodes to run. eg: 3"
echo "-z <number> -- Quantity of nodes to run tests against eg: 3"
echo "-s <number> -- Sleep between operations in secs. eg: 5"
exit 1
}
while getopts "h?t:q:z:s:" args; do
case $args in
h|\?)
usage;
exit;;
t ) TEST=${OPTARG};;
q ) QTD=${OPTARG};;
z ) TEST_QTD=${OPTARG};;
s ) SLEEP_TIMEOUT=${OPTARG};;
esac
done
set -euxo pipefail
DATA_DIR=$(mktemp -d -t ethermint_9000-datadir.XXXXX)
if [[ ! "$DATA_DIR" ]]; then
echo "Could not create $DATA_DIR"
exit 1
fi
DATA_CLI_DIR=$(mktemp -d -t ethermint_9000-cli-datadir.XXXXX)
if [[ ! "$DATA_CLI_DIR" ]]; then
echo "Could not create $DATA_CLI_DIR"
exit 1
fi
# Compile ethermint
echo "compiling ethermint"
make build-ethermint
# PID array declaration
arr=()
# PID arraycli declaration
arrcli=()
init_func() {
echo "create and add new keys"
"$PWD"/build/ethermintd keys add $KEY"$i" --home "$DATA_DIR$i" --no-backup --chain-id $CHAINID --algo "eth_secp256k1" --keyring-backend test
echo "init Ethermint with moniker=$MONIKER and chain-id=$CHAINID"
"$PWD"/build/ethermintd init $MONIKER --chain-id $CHAINID --home "$DATA_DIR$i"
echo "prepare genesis: Allocate genesis accounts"
"$PWD"/build/ethermintd add-genesis-account \
"$("$PWD"/build/ethermintd keys show "$KEY$i" -a --home "$DATA_DIR$i" --keyring-backend test)" 1000000000000000000aphoton,1000000000000000000stake \
--home "$DATA_DIR$i" --keyring-backend test
echo "prepare genesis: Sign genesis transaction"
"$PWD"/build/ethermintd gentx $KEY"$i" 1000000000000000000stake --keyring-backend test --home "$DATA_DIR$i" --keyring-backend test --chain-id $CHAINID
echo "prepare genesis: Collect genesis tx"
"$PWD"/build/ethermintd collect-gentxs --home "$DATA_DIR$i"
echo "prepare genesis: Run validate-genesis to ensure everything worked and that the genesis file is setup correctly"
"$PWD"/build/ethermintd validate-genesis --home "$DATA_DIR$i"
}
start_func() {
echo "starting ethermint node $i in background ..."
"$PWD"/build/ethermintd start --pruning=nothing --rpc.unsafe \
--p2p.laddr tcp://$IP_ADDR:$NODE_P2P_PORT"$i" --address tcp://$IP_ADDR:$NODE_PORT"$i" --rpc.laddr tcp://$IP_ADDR:$NODE_RPC_PORT"$i" \
--json-rpc.address=$IP_ADDR:$RPC_PORT"$i" \
--keyring-backend test --home "$DATA_DIR$i" \
>"$DATA_DIR"/node"$i".log 2>&1 & disown
ETHERMINT_PID=$!
echo "started ethermint node, pid=$ETHERMINT_PID"
# add PID to array
arr+=("$ETHERMINT_PID")
}
# Run node with static blockchain database
# For loop N times
for i in $(seq 1 "$QTD"); do
init_func "$i"
start_func "$i"
sleep 1
echo "sleeping $SLEEP_TIMEOUT seconds for startup"
sleep "$SLEEP_TIMEOUT"
echo "done sleeping"
done
echo "sleeping $SLEEP_TIMEOUT seconds before running tests ... "
sleep "$SLEEP_TIMEOUT"
echo "done sleeping"
set +e
if [[ -z $TEST || $TEST == "rpc" ]]; then
for i in $(seq 1 "$TEST_QTD"); do
HOST_RPC=http://$IP_ADDR:$RPC_PORT"$i"
echo "going to test ethermint node $HOST_RPC ..."
MODE=$MODE HOST=$HOST_RPC go test ./tests/e2e/... -timeout=300s -v -short
MODE=$MODE HOST=$HOST_RPC go test ./tests/rpc/... -timeout=300s -v -short
RPC_FAIL=$?
done
fi
stop_func() {
ETHERMINT_PID=$i
echo "shutting down node, pid=$ETHERMINT_PID ..."
# Shutdown ethermint node
kill -9 "$ETHERMINT_PID"
wait "$ETHERMINT_PID"
}
for i in "${arrcli[@]}"; do
stop_func "$i"
done
for i in "${arr[@]}"; do
stop_func "$i"
done
if [[ (-z $TEST || $TEST == "rpc") && $RPC_FAIL -ne 0 ]]; then
exit $RPC_FAIL
else
exit 0
fi
|
#!/bin/bash
#
# Library for file system actions
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/liblog.sh
# Functions
########################
# Ensure a file/directory is owned (user and group) but the given user
# Arguments:
# $1 - filepath
# $2 - owner
# Returns:
# None
#########################
owned_by() {
local path="${1:?path is missing}"
local owner="${2:?owner is missing}"
local group="${3:-}"
if [[ -n $group ]]; then
chown "$owner":"$group" "$path"
else
chown "$owner":"$owner" "$path"
fi
}
########################
# Ensure a directory exists and, optionally, is owned by the given user
# Arguments:
# $1 - directory
# $2 - owner
# Returns:
# None
#########################
ensure_dir_exists() {
local dir="${1:?directory is missing}"
local owner_user="${2:-}"
local owner_group="${3:-}"
mkdir -p "${dir}"
if [[ -n $owner_user ]]; then
owned_by "$dir" "$owner_user" "$owner_group"
fi
}
########################
# Checks whether a directory is empty or not
# arguments:
# $1 - directory
# returns:
# boolean
#########################
is_dir_empty() {
local -r path="${1:?missing directory}"
# Calculate real path in order to avoid issues with symlinks
local -r dir="$(realpath "$path")"
if [[ ! -e "$dir" ]] || [[ -z "$(ls -A "$dir")" ]]; then
true
else
false
fi
}
########################
# Checks whether a mounted directory is empty or not
# arguments:
# $1 - directory
# returns:
# boolean
#########################
is_mounted_dir_empty() {
local -r path="${1:?missing directory}"
# Calculate real path in order to avoid issues with symlinks
local -r dir="$(realpath "$path")"
if is_dir_empty "$dir" || find "$dir" -mindepth 1 -maxdepth 1 -not -name ".snapshot" -not -name "lost+found" -exec false {} +; then
true
else
false
fi
}
########################
# Checks whether a file can be written to or not
# arguments:
# $1 - file
# returns:
# boolean
#########################
is_file_writable() {
local file="${1:?missing file}"
local dir
dir="$(dirname "$file")"
if [[ (-f "$file" && -w "$file") || (! -f "$file" && -d "$dir" && -w "$dir") ]]; then
true
else
false
fi
}
########################
# Relativize a path
# arguments:
# $1 - path
# $2 - base
# returns:
# None
#########################
relativize() {
local -r path="${1:?missing path}"
local -r base="${2:?missing base}"
pushd "$base" >/dev/null || exit
realpath -q --no-symlinks --relative-base="$base" "$path" | sed -e 's|^/$|.|' -e 's|^/||'
popd >/dev/null || exit
}
########################
# Configure permisions and ownership recursively
# Globals:
# None
# Arguments:
# $1 - paths (as a string).
# Flags:
# -f|--file-mode - mode for directories.
# -d|--dir-mode - mode for files.
# -u|--user - user
# -g|--group - group
# Returns:
# None
#########################
configure_permissions_ownership() {
local -r paths="${1:?paths is missing}"
local dir_mode=""
local file_mode=""
local user=""
local group=""
# Validate arguments
shift 1
while [ "$#" -gt 0 ]; do
case "$1" in
-f | --file-mode)
shift
file_mode="${1:?missing mode for files}"
;;
-d | --dir-mode)
shift
dir_mode="${1:?missing mode for directories}"
;;
-u | --user)
shift
user="${1:?missing user}"
;;
-g | --group)
shift
group="${1:?missing group}"
;;
*)
echo "Invalid command line flag $1" >&2
return 1
;;
esac
shift
done
read -r -a filepaths <<<"$paths"
for p in "${filepaths[@]}"; do
if [[ -e "$p" ]]; then
if [[ -n $dir_mode ]]; then
find -L "$p" -type d -exec chmod "$dir_mode" {} \;
fi
if [[ -n $file_mode ]]; then
find -L "$p" -type f -exec chmod "$file_mode" {} \;
fi
if [[ -n $user ]] && [[ -n $group ]]; then
chown -LR "$user":"$group" "$p"
elif [[ -n $user ]] && [[ -z $group ]]; then
chown -LR "$user" "$p"
elif [[ -z $user ]] && [[ -n $group ]]; then
chgrp -LR "$group" "$p"
fi
else
stderr_print "$p does not exist"
fi
done
}
|
const bot = require('./telegram')
const coin_service = require('../services/coin_service')
const inline_keyboard = [
[
{ text: "Convert coins", callback_data: "CONVERT_COIN" },
{ text: "Get coin info", callback_data: "COIN_INFO" },
{ text: "Get coin prices", callback_data: "COIN_PRICE"}
]
]
bot.onText(/\/start/, (message) => {
bot.sendMessage(message.chat.id, 'Hello, I can do these things', {
reply_markup: { inline_keyboard }
})
})
bot.on('callback_query', query => {
const { message: { chat, message_id, text } = {} } = query
switch (query.data) {
case 'CONVERT_COIN':
conversion_path(query)
break;
case 'COIN_INFO':
info_path(query)
break;
case 'COIN_PRICE':
price_path(query)
break;
default:
}
})
const conversion_path = function (query) {
bot.sendMessage(query.message.chat.id,`
which coin you want to convert?
you can enter the ammount too
you can also use 'USD' for converting to and from us dollars
eg:- 1.12 eth
`, {
reply_markup: { force_reply: true }
})
.then(from_coin => {
bot.onReplyToMessage(from_coin.chat.id, from_coin.message_id, from_coin_message => {
const from_coin_text = from_coin_message.text
bot.sendMessage(from_coin.chat.id, 'To which coin you want to convert to ?', {
reply_markup: {
force_reply: true
}
})
.then(to_coin => {
bot.onReplyToMessage(to_coin.chat.id, to_coin.message_id, async function (to_coin_message) {
const to_coin_text = to_coin_message.text
const result = await coin_service.convert(from_coin_text, to_coin_text)
let message_body = ""
if (typeof result.result !== 'Error') {
message_body = `${result.from.amount} ${result.from.ticker} is equal to ${result.result} ${result.to.ticker}`
} else {
message_body = result.result.message
}
bot.sendMessage(to_coin.chat.id, message_body, {
reply_markup: { inline_keyboard }
})
})
})
.catch(console.log)
})
})
.catch(console.log)
}
const info_path = function (query) {
bot.sendMessage(query.message.chat.id, `About which crypto you want to know more about`, {
reply_markup: {
force_reply: true
}
})
.then((info_coin) => {
bot.onReplyToMessage(info_coin.chat.id, info_coin.message_id, async function (info_coin_message) {
const coin = info_coin_message.text
const info = await coin_service.info(coin)
let message_body = ""
if (typeof info !== 'Error') {
message_body = `
Info for ${info.id} are
Name : ${info.name}
Symbol : ${info.symbol}
Rank : ${info.rank}
Price USD : ${info.price_usd}
price BTC : ${info.price_btc}
Market Cap : ${info.market_cap_usd}
Max Supply : ${info.max_supply}
`
} else {
message_body = info.message
}
bot.sendMessage(info_coin.chat.id, message_body, {
reply_markup: { inline_keyboard }
})
})
})
.catch(console.log);
}
const price_path = function (query) {
bot.sendMessage(query.message.chat.id, "About which crypto's you want to know more about", {
reply_markup: {
force_reply: true
}
})
.then((info_coin) => {
bot.onReplyToMessage(info_coin.chat.id, info_coin.message_id, async function (info_coin_message) {
const coin = info_coin_message.text
const info = await coin_service.info(coin)
let message_body = ""
if (typeof info !== 'Error') {
message_body = `
Info for ${info.id} are
Price USD : ${info.price_usd}
price BTC : ${info.price_btc}
`
} else {
message_body = info.message
}
bot.sendMessage(info_coin.chat.id, message_body, {
reply_markup: { inline_keyboard }
})
})
})
.catch(console.log);
}
module.exports = bot |
<reponame>PrakarshJhajharia/Learn-India-App
import firebase from 'firebase';
require('@firebase/firestore')
var firebaseConfig = {
apiKey: "<KEY>",
authDomain: "book-santa-app-34b49.firebaseapp.com",
databaseURL: "https://book-santa-app-34b49.firebaseio.com",
projectId: "book-santa-app-34b49",
storageBucket: "book-santa-app-34b49.appspot.com",
messagingSenderId: "801811624161",
appId: "1:801811624161:web:53354bc728f492357407a0"
};
// Initialize Firebase
firebase.initializeApp(firebaseConfig);
export default firebase.firestore(); |
class CustomTrainingLoop:
def __init__(self):
self.epoch = 0
def before_train_iter(self):
# Perform actions before each training iteration
# Example: Reset gradients, update learning rate, etc.
pass
def after_train_iter(self):
# Perform actions after each training iteration
# Example: Log training metrics, update progress bar, etc.
pass
def before_train_epoch(self):
# Perform actions before each training epoch
# Example: Shuffle training data, reset epoch-specific state, etc.
pass
def after_train_epoch(self):
# Perform actions after each training epoch
# Example: Evaluate model on validation data, save model checkpoints, etc.
self.epoch += 1
pass
@property
def property_example(self):
# Example of a property method
# This can be used to access or compute properties of the class
pass |
import React from "react";
import {connect} from "react-redux";
import PostListElement from "./PostListElement";
import { FormGroup, ControlLabel, FormControl } from "react-bootstrap";
export class PostList extends React.Component {
constructor(props) {
super(props);
this.filterByStatus = this.filterByStatus.bind(this);
}
componentWillMount() {
this.props.dispatch({type: 'FILTERED_POSTS_FILTER_BY_STATUS', posts: this.props.posts});
}
render() {
let filtered_posts = this.props.filtered_posts;
let sorted_posts = filtered_posts.slice();
sorted_posts.sort((a, b) => new Date(...b.created_at.split('-')) - new Date(...a.created_at.split('-')));
let limited_posts = sorted_posts.slice(0, 15);
return (
<div>
<FormGroup controlId="formControlsSelect">
<ControlLabel>Филтрирай по:</ControlLabel>
<FormControl componentClass="select" placeholder="избери..." onChange={this.filterByStatus}>
<option value="">всички</option>
<option value="activated">активирани</option>
<option value="deactivated">неактивирани</option>
</FormControl>
</FormGroup>
{limited_posts.map((post, index) => {
return (
<PostListElement key={index} post={post}/>
);
})}
</div>
);
}
filterByStatus(e) {
this.props.dispatch({
type: 'POSTS_FILTER_BY_STATUS',
posts: this.props.posts,
status: e.target.value,
});
}
}
function mapStateToProps(state) {
return {
posts: state.posts,
filtered_posts: state.filtered_posts,
};
}
export default connect(mapStateToProps)(PostList);
|
<reponame>mollerse/node-fsharp
'use strict';
var os = require('os');
var path = require('path');
var which = require('which').sync;
var fs = require('fs');
var Duplex = require('stream').Duplex;
var cp = require('child_process');
var util = require('util');
var nixExecutable = 'fsharpi';
var windowsExecutable = 'fsi';
var windowsDefaultInstallLocation = [
process.env['PROGRAMFILES(X86)'],
'Microsoft SDKs',
'F#',
'3.0',
'Framework',
'v4.0',
'fsi.exe'
].join(path.sep);
var locateExecutable = function () {
var executable;
switch (os.type()) {
case 'Linux':
case 'Darwin':
executable = which(nixExecutable);
break;
case 'Windows_NT':
//First try to find fsi on path
try {
executable = which(windowsExecutable);
} catch (err) {
//Try to check default install location
if (fs.existsSync(windowsDefaultInstallLocation)) {
executable = windowsDefaultInstallLocation;
} else {
throw err;
}
}
break;
default:
throw new Error('OS not supported: ' + os.type());
}
return executable;
};
var spawnFsharpi = function (executable, args) {
return cp.spawn(executable, args);
};
var Fsharp = function (param) {
//Allow not using new
if (!(this instanceof Fsharp)) {
return new Fsharp(param);
}
if (!param || (typeof param !== 'string' && !param.path)) {
throw new Error('Path to script (*.fsx file) is required');
}
var opts = (typeof param === 'string' ? {path: param} : util._extend({}, param));
opts.executable = opts.executable || locateExecutable();
opts.args = opts.args || [];
Duplex.call(this, opts);
var fsharpi = spawnFsharpi(opts.executable, [opts.path].concat(opts.args));
var readable = this._readable = fsharpi.stdout;
var writable = this._writable = fsharpi.stdin;
var _this = this;
var err = '';
fsharpi.stderr.on('data', function (buf) {
err += buf;
});
writable.once('finish', function () {
_this.end();
});
this.once('finish', function () {
writable.end();
});
readable.on('data', function (e) {
if (!_this.push(e)) {
readable.pause();
}
});
readable.once('end', function () {
return _this.push(null);
});
writable.on('error', function (err) {
return _this.emit('error', err);
});
readable.on('error', function (err) {
return _this.emit('error', err);
});
writable.on('drain', function () {
return _this.emit('drain');
});
fsharpi.on('close', function (code) {
if (code === 0) { return; }
return _this.emit('error', new Error('non-zero exit code ' + code + '\n running: ' + opts.executable + ' ' + [opts.path].concat(opts.args).join(' ') + '\n\n ' + err));
});
};
util.inherits(Fsharp, Duplex);
Fsharp.prototype._write = function (input, encoding, done) {
this._writable.write(input, encoding, done);
};
Fsharp.prototype._read = function () {
this._readable.resume();
};
module.exports = Fsharp; |
<filename>src/training/design/E855_Medium_ExamRoom.java
package training.design;
import org.junit.jupiter.api.Test;
import java.util.*;
import java.util.function.IntFunction;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 855. 考场就座: https://leetcode-cn.com/problems/exam-room/
*
* 在考场里,一排有 N 个座位,分别编号为 0, 1, 2, ..., N-1。
*
* 当学生进入考场后,他必须坐在能够使他与离他最近的人之间的距离达到最大化的座位上。如果有多个这样的座位,
* 他会坐在编号最小的座位上。(另外,如果考场里没有人,那么学生就坐在 0 号座位上。)
*
* 返回 ExamRoom(int N) 类,它有两个公开的函数:
* - 其中,函数 ExamRoom.seat() 会返回一个 int(整型数据),代表学生坐的位置;
* - 函数 ExamRoom.leave(int p) 代表坐在座位 p 上的学生现在离开了考场。每次调用
* ExamRoom.leave(p) 时都保证有学生坐在座位 p 上。
*
* 例 1:
* 输入:["ExamRoom","seat","seat","seat","seat","leave","seat"], [[10],[],[],[],[],[4],[]]
* 输出:[null,0,9,4,2,null,5]
* 解释:
* ExamRoom(10) -> null
* seat() -> 0,没有人在考场里,那么学生坐在 0 号座位上。
* seat() -> 9,学生最后坐在 9 号座位上。
* seat() -> 4,学生最后坐在 4 号座位上。
* seat() -> 2,学生最后坐在 2 号座位上。
* leave(4) -> null
* seat() -> 5,学生最后坐在 5 号座位上。
*
* 约束:
* - 1 <= N <= 10^9
* - 在所有的测试样例中 ExamRoom.seat() 和 ExamRoom.leave() 最多被调用 10^4 次。
* - 保证在调用 ExamRoom.leave(p) 时有学生正坐在座位 p 上。
*/
public class E855_Medium_ExamRoom {
static void test(IntFunction<IExamRoom> factory) {
IExamRoom examRoom = factory.apply(10);
assertEquals(0, examRoom.seat());
assertEquals(9, examRoom.seat());
assertEquals(4, examRoom.seat());
assertEquals(2, examRoom.seat());
examRoom.leave(4);
assertEquals(5, examRoom.seat());
examRoom = factory.apply(10);
assertEquals(0, examRoom.seat());
assertEquals(9, examRoom.seat());
assertEquals(4, examRoom.seat());
examRoom.leave(0);
examRoom.leave(4);
assertEquals(0, examRoom.seat());
assertEquals(4, examRoom.seat());
assertEquals(2, examRoom.seat());
assertEquals(6, examRoom.seat());
assertEquals(1, examRoom.seat());
assertEquals(3, examRoom.seat());
assertEquals(5, examRoom.seat());
assertEquals(7, examRoom.seat());
assertEquals(8, examRoom.seat());
examRoom.leave(0);
examRoom.leave(4);
assertEquals(0, examRoom.seat());
assertEquals(4, examRoom.seat());
examRoom.leave(7);
assertEquals(7, examRoom.seat());
examRoom.leave(3);
assertEquals(3, examRoom.seat());
examRoom.leave(0);
examRoom.leave(8);
assertEquals(0, examRoom.seat());
assertEquals(8, examRoom.seat());
}
@Test
public void testExamRoom() {
test(ExamRoom::new);
}
@Test
public void testBetterExamRoom() {
test(BetterExamRoom::new);
}
}
interface IExamRoom {
int seat();
void leave(int p);
}
/**
* LeetCode 耗时:33 ms - 44.26%
* 内存消耗:39.4 MB - 21.31%
*/
class ExamRoom implements IExamRoom {
private TreeSet<Integer> seats;
private int n;
public ExamRoom(int n) {
this.n = n;
seats = new TreeSet<>();
}
@Override
public int seat() {
// 没有人做,就坐在 0 号位置上
if (seats.size() == 0) {
seats.add(0);
return 0;
}
int result;
// 最小位置离 0 号位置的距离,最大位置离 n-1 号位置的距离
int distToStart = seats.first(), distToEnd = n - 1 - seats.last();
// 遍历所有位置,选择它们中间隔最大的位置
Iterator<Integer> iter = seats.iterator();
int lastSeat = iter.next(), offset = 0, maxCenterDist = 0;
while (iter.hasNext()) {
int seat = iter.next(), curDist = (seat - lastSeat) / 2;
if (curDist > maxCenterDist) {
offset = lastSeat;
maxCenterDist = curDist;
}
lastSeat = seat;
}
// 从最大间隔位置、0、n-1 中选择具有最大距离的位置
// 注意,因为是选座位号最小的,所以下面的判断是 "<="
result = offset + maxCenterDist;
if (maxCenterDist <= distToStart) {
result = 0;
maxCenterDist = distToStart;
}
if (maxCenterDist < distToEnd) {
result = n - 1;
}
seats.add(result);
return result;
}
@Override
public void leave(int p) {
seats.remove(p);
}
}
/**
* 思想来源:https://labuladong.gitee.io/algo/5/42/
*
* 如果将每两个相邻的考生看做线段的两端点,新安排考生就是找最长的线段,然后让该考生在中间把这个线段「二分」,
* 中点就是给他分配的座位。leave(p) 其实就是去除端点 p,使得相邻两个线段合并为一个。
*
* LeetCode 耗时:18 ms - 69.18%
* 内存消耗:39.3 MB - 25.90%
*/
class BetterExamRoom implements IExamRoom {
// 将端点 p 映射到以 p 为左端点的线段
private Map<Integer, int[]> starts;
// 将端点 p 映射到以 p 为右端点的线段
private Map<Integer, int[]> ends;
// 根据线段长度从小到大存放所有线段
private TreeSet<int[]> lines;
private int n;
public BetterExamRoom(int n) {
this.n = n;
starts = new HashMap<>();
ends = new HashMap<>();
lines = new TreeSet<>((a, b) -> {
int aDist = distance(a);
int bDist = distance(b);
// 两个距离相同,则座位号越小的越大
if (aDist == bDist)
return -Integer.compare(a[0], b[0]);
return Integer.compare(aDist, bDist);
});
// 在有序集合中先放一个虚拟线段,使算法正常启动
int[] initialLine = {-1, n};
lines.add(initialLine);
}
@Override
public int seat() {
int result;
int[] maxLine = lines.last();
if (maxLine[0] == -1) {
result = 0;
} else if (maxLine[1] == n) {
result = n - 1;
} else {
result = maxLine[0] + (maxLine[1] - maxLine[0]) / 2;
}
// 将最长的线段一分为二
removeLine(maxLine);
addLine(new int[]{maxLine[0], result});
addLine(new int[]{result, maxLine[1]});
return result;
}
@Override
public void leave(int p) {
int[] right = starts.get(p);
int[] left = ends.get(p);
// 合并两个线段为一个线段
removeLine(right);
removeLine(left);
addLine(new int[]{left[0], right[1]});
}
// 增加一个线段
private void addLine(int[] line) {
lines.add(line);
starts.put(line[0], line);
ends.put(line[1], line);
}
// 去除一个线段
private void removeLine(int[] line) {
lines.remove(line);
starts.remove(line[0]);
ends.remove(line[1]);
}
// 计算线段长度
private int distance(int[] line) {
if (line[0] == -1) {
return line[1];
} else if (line[1] == n) {
return n - line[0] - 1;
} else {
// 中点和端点之间的长度
return (line[1] - line[0]) / 2;
}
}
}
|
//let { AsyncSeriesHook } = require('tapable');
class AsyncSeriesHook {
constructor() {
this.hooks = [];
}
tapAsync(name, fn) {
this.hooks.push(fn);
}
callAsync() {
let args = Array.from(arguments);
let done = args.pop();
let idx = 0;
let that = this;
function next(err) {
if (err) return done();
let fn = that.hooks[idx++];
fn ? fn(...args, next) : done();
}
next();
}
}
//异步串行执行
let queue = new AsyncSeriesHook(['name']);
console.time('cost');
queue.tapAsync('1', function (name, cb) {
setTimeout(function () {
console.log(1, name);
// cb('')
cb('Wrong');//后面停止执行
}, 1000);
});
queue.tapAsync('2', function (name, cb) {
setTimeout(function () {
console.log(2, name);
cb();
}, 2000);
});
queue.tapAsync('3', function (name, cb) {
setTimeout(function () {
console.log(3, name);
cb();
}, 3000);
});
queue.callAsync('zfpx', () => {
console.timeEnd('cost');
});
|
list_of_numbers = [7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98] |
from app import st
from global_covid_tracker.dataframes import countries_deaths
from global_covid_tracker.plotting import plot_total_deaths, \
plot_deaths_by_country
from global_covid_tracker.content import deaths_content
def deaths():
st.subheader('Death Data')
st.markdown(deaths_content.introduction)
st.subheader(deaths_content.death_count_data)
nations = st.multiselect('Countries', countries_deaths,
default=['United States'], key='deaths')
per_million = st.radio(
'How to view deaths?', ['Raw Count', 'Per Million'], key='deaths'
) == 'Per Million'
plot_deaths = plot_total_deaths(nations, per_million)
st.plotly_chart(plot_deaths)
st.subheader(deaths_content.death_country_data)
country = st.selectbox('Country', countries_deaths, key='deaths')
plot_daily_deaths = plot_deaths_by_country(country)
st.plotly_chart(plot_daily_deaths)
|
<filename>frontend/component/three2.tsx<gh_stars>0
import * as THREE from 'three'
import { render } from 'react-dom'
import React, { useRef, useState, useMemo, useEffect } from 'react'
import { Canvas, extend, useThree, useFrame } from '@react-three/fiber'
import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer'
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass'
import { UnrealBloomPass } from 'three/examples/jsm/postprocessing/UnrealBloomPass'
extend({ EffectComposer, RenderPass, UnrealBloomPass })
function Sphere({ geometry, x, y, z, s, color }) {
const ref = useRef()
useFrame((state) => {
ref.current.position.x = x - state.mouse.x * 2
ref.current.position.y = y - state.mouse.y * 2
ref.current.position.z = z + Math.sin((state.clock.getElapsedTime() * s) / 2)
})
return (
<mesh ref={ref} position={[x, y, z]} scale={[s, s, s]} geometry={geometry} color={color} >
<meshStandardMaterial color={color} roughness={1} />
</mesh>
)
}
function RandomSpheres({color, amount, startSize}) {
const [geometry] = useState(() => new THREE.SphereGeometry(startSize, 32, 32), [])
const data = useMemo(() => {
return new Array(amount).fill().map((_, i) => ({
x: Math.random() * 100 - 50,
y: Math.random() * 100 - 50,
z: Math.random() * 100 - 50,
s: Math.random() + 10,
}))
}, [])
return data.map((props, i) => <Sphere key={i} {...props} geometry={geometry} color={color} />)
}
function Bloom({ children }) {
const { gl, camera, size } = useThree()
const [scene, setScene] = useState()
const composer = useRef()
useEffect(() => void scene && composer.current.setSize(size.width, size.height), [size])
useFrame(() => scene && composer.current.render(), 1)
return (
<>
<scene ref={setScene}>{children}</scene>
<effectComposer ref={composer} args={[gl]}>
<renderPass attachArray="passes" scene={scene} camera={camera} />
<unrealBloomPass attachArray="passes" args={[undefined, 1.5, 1, 0]} />
</effectComposer>
</>
)
}
function Main({ children }) {
const scene = useRef()
const { gl } = useThree()
const width = window.innerWidth;
const height = window.innerHeight;
const camera = new THREE.PerspectiveCamera(90, width / height, 0.1, 1000);
useFrame(() => {
gl.autoClear = false
gl.clearDepth()
gl.render(scene.current, camera)
}, 2)
return <scene ref={scene}>{children}</scene>
}
export default function Dots() {
return(
<Canvas linear camera={{ position: [0, 0, 120] }}>
<Main>
<pointLight />
<ambientLight />
<RandomSpheres color="rgba(20, 202, 255)" amount={100} startSize={0.5} />
</Main>
<Bloom>
<ambientLight />
<RandomSpheres color="rgba(20, 202, 255)" amount={100} startSize={0.5} />
</Bloom>
</Canvas>
)
}
|
#!/bin/bash
sudo rm /usr/share/X11/xkb/symbols/se
sudo cp dvorakep /usr/share/X11/xkb/symbols/se
setxkbmap se dvorak_ep
|
<reponame>MISAKIGA/husky-blog<filename>business/business-oauth2/business-service-oauth2/src/main/java/com/misakiga/husky/business/OAuthBootstrap.java
package com.misakiga.husky.business;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableOAuth2Client;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
/**
* 单点登录,oauth授权服务器
* @author MISAKIGA
*/
@SpringBootApplication(scanBasePackageClasses = {OAuthBootstrap.class},scanBasePackages = "com.misakiga.husky.cloud.feign")
@EnableDiscoveryClient
@EnableCaching
@EnableResourceServer
@EnableFeignClients
public class OAuthBootstrap {
public static void main(String[] args) {
SpringApplication.run(OAuthBootstrap.class,args);
}
}
|
/*
* eGov SmartCity eGovernance suite aims to improve the internal efficiency,transparency,
* accountability and the service delivery of the government organizations.
*
* Copyright (C) 2017 eGovernments Foundation
*
* The updated version of eGov suite of products as by eGovernments Foundation
* is available at http://www.egovernments.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/ or
* http://www.gnu.org/licenses/gpl.html .
*
* In addition to the terms of the GPL license to be adhered to in using this
* program, the following additional terms are to be complied with:
*
* 1) All versions of this program, verbatim or modified must carry this
* Legal Notice.
* Further, all user interfaces, including but not limited to citizen facing interfaces,
* Urban Local Bodies interfaces, dashboards, mobile applications, of the program and any
* derived works should carry eGovernments Foundation logo on the top right corner.
*
* For the logo, please refer http://egovernments.org/html/logo/egov_logo.png.
* For any further queries on attribution, including queries on brand guidelines,
* please contact <EMAIL>
*
* 2) Any misrepresentation of the origin of the material is prohibited. It
* is required that all modified versions of this material be marked in
* reasonable ways as different from the original version.
*
* 3) This license does not grant any rights to any user of the program
* with regards to rights under trademark law for use of the trade names
* or trademarks of eGovernments Foundation.
*
* In case of any queries, you can reach eGovernments Foundation at <EMAIL>.
*
*/
package org.egov.ptis.domain.service.property;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.egov.infra.admin.master.entity.User;
import org.egov.infra.admin.master.service.CityService;
import org.egov.infra.admin.master.service.UserService;
import org.egov.infra.config.core.ApplicationThreadLocals;
import org.egov.infra.persistence.entity.Address;
import org.egov.infra.persistence.entity.enums.UserType;
import org.egov.infra.reporting.engine.ReportFormat;
import org.egov.infra.reporting.engine.ReportOutput;
import org.egov.infra.reporting.engine.ReportRequest;
import org.egov.infra.reporting.engine.ReportService;
import org.egov.infra.utils.DateUtils;
import org.egov.infstr.services.PersistenceService;
import org.egov.portal.entity.Citizen;
import org.egov.ptis.client.util.PropertyTaxUtil;
import org.egov.ptis.domain.entity.property.AmalgamationOwner;
import org.egov.ptis.domain.entity.property.BasicProperty;
import org.egov.ptis.domain.entity.property.Property;
import org.egov.ptis.domain.entity.property.PropertyImpl;
import org.egov.ptis.domain.entity.property.PropertyOwnerInfo;
import org.egov.ptis.report.bean.PropertyAckNoticeInfo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.BindingResult;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class PropertyPersistenceService extends PersistenceService<BasicProperty, Long> {
private static final String FROM_USER_WHERE_NAME_AND_MOBILE_NUMBER_AND_GENDER = "From User where name = ? and mobileNumber = ? and gender = ? ";
private static final Logger LOGGER = Logger.getLogger(PropertyPersistenceService.class);
private static final String CREATE_ACK_TEMPLATE = "mainCreatePropertyAck";
@Autowired
private UserService userService;
@Autowired
private PropertyTaxUtil propertyTaxUtil;
@Autowired
private ReportService reportService;
@Autowired
private CityService cityService;
public PropertyPersistenceService() {
super(BasicProperty.class);
}
public PropertyPersistenceService(final Class<BasicProperty> type) {
super(type);
}
public void createOwners(final Property property, final BasicProperty basicProperty, final Address ownerAddress) {
LOGGER.debug("createOwners for property: " + property + ", basicProperty: " + basicProperty
+ ", ownerAddress: " + ownerAddress);
int orderNo = 0;
basicProperty.getPropertyOwnerInfo().clear();
for (final PropertyOwnerInfo ownerInfo : property.getBasicProperty().getPropertyOwnerInfoProxy()) {
orderNo++;
if (ownerInfo != null) {
User user = null;
/*if (StringUtils.isNotBlank(ownerInfo.getOwner().getAadhaarNumber()))
user = userService.getUserByAadhaarNumber(ownerInfo.getOwner().getAadhaarNumber());
else*/
if (StringUtils.isNotBlank(ownerInfo.getOwner().getMobileNumber())) {
user = (User) find(FROM_USER_WHERE_NAME_AND_MOBILE_NUMBER_AND_GENDER, ownerInfo
.getOwner().getName(), ownerInfo.getOwner().getMobileNumber(), ownerInfo.getOwner()
.getGender());
}
if (user == null) {
final Citizen newOwner = new Citizen();
user = createNewOwner(ownerInfo, newOwner);
persistUponPaymentResponse(basicProperty);
ownerInfo.setBasicProperty(basicProperty);
ownerInfo.setOwner(user);
ownerInfo.setOrderNo(orderNo);
ownerInfo.getOwner().addAddress(ownerAddress);
} else {
// If existing user, then do not add correspondence address
user.setEmailId(ownerInfo.getOwner().getEmailId());
user.setGuardian(ownerInfo.getOwner().getGuardian());
user.setGuardianRelation(ownerInfo.getOwner().getGuardianRelation());
ownerInfo.setOwner(user);
ownerInfo.setOrderNo(orderNo);
ownerInfo.setBasicProperty(basicProperty);
ownerInfo.getOwner().addAddress(ownerAddress);
}
}
basicProperty.addPropertyOwners(ownerInfo);
}
}
public void createOwnersForAppurTenant(final Property property, final BasicProperty basicProperty,
final Address ownerAddress) {
int orderNo = 0;
basicProperty.getPropertyOwnerInfo().clear();
for (final PropertyOwnerInfo ownerInfo : property.getBasicProperty().getPropertyOwnerInfoProxy()) {
PropertyOwnerInfo owner = new PropertyOwnerInfo();
orderNo++;
if (ownerInfo != null) {
User user;
if (StringUtils.isNotBlank(ownerInfo.getOwner().getAadhaarNumber()))
user = userService.getUserByAadhaarNumber(ownerInfo.getOwner().getAadhaarNumber());
else
user = (User) find(FROM_USER_WHERE_NAME_AND_MOBILE_NUMBER_AND_GENDER, ownerInfo
.getOwner().getName(), ownerInfo.getOwner().getMobileNumber(), ownerInfo.getOwner()
.getGender());
if (user == null) {
final Citizen newOwner = new Citizen();
user = createNewOwner(ownerInfo, newOwner);
persistUponPaymentResponse(basicProperty);
owner.setBasicProperty(basicProperty);
owner.setOwner(user);
owner.setOrderNo(orderNo);
owner.setOwnerType(ownerInfo.getOwnerType());
owner.setSource(ownerInfo.getSource());
LOGGER.debug("createOwners: OwnerAddress: " + ownerAddress);
owner.getOwner().addAddress(ownerAddress);
} else {
// If existing user, then do not add correspondence address
user.setEmailId(ownerInfo.getOwner().getEmailId());
user.setGuardian(ownerInfo.getOwner().getGuardian());
user.setGuardianRelation(ownerInfo.getOwner().getGuardianRelation());
owner.setOwner(user);
owner.setOrderNo(orderNo);
owner.setBasicProperty(basicProperty);
owner.setOwnerType(ownerInfo.getOwnerType());
owner.setSource(ownerInfo.getSource());
}
}
basicProperty.addPropertyOwners(owner);
}
}
private User createNewOwner(final PropertyOwnerInfo ownerInfo, final Citizen newOwner) {
newOwner.setAadhaarNumber(StringUtils.isNotBlank(ownerInfo.getOwner().getAadhaarNumber()) ? ownerInfo.getOwner().getAadhaarNumber() : null);
newOwner.setMobileNumber(ownerInfo.getOwner().getMobileNumber());
newOwner.setEmailId(ownerInfo.getOwner().getEmailId());
newOwner.setGender(ownerInfo.getOwner().getGender());
newOwner.setGuardian(ownerInfo.getOwner().getGuardian());
newOwner.setGuardianRelation(ownerInfo.getOwner().getGuardianRelation());
newOwner.setName(ownerInfo.getOwner().getName());
newOwner.setSalutation(ownerInfo.getOwner().getSalutation());
newOwner.setPassword("<PASSWORD>");
newOwner.setUsername(propertyTaxUtil.generateUserName(ownerInfo.getOwner().getMobileNumber()));
return userService.createUser(newOwner);
}
public BasicProperty persistUponPaymentResponse(final BasicProperty basicProperty) {
return basicProperty;
}
public BasicProperty createBasicProperty(final BasicProperty basicProperty, final HashMap meesevaParams) {
return persist(basicProperty);
}
public ReportOutput propertyAcknowledgement(final PropertyImpl property) {
final Map<String, Object> reportParams = new HashMap<>();
final PropertyAckNoticeInfo ackBean = new PropertyAckNoticeInfo();
ackBean.setOwnerName(property.getBasicProperty().getFullOwnerName());
ackBean.setOwnerAddress(property.getBasicProperty().getAddress().toString());
ackBean.setApplicationDate(new SimpleDateFormat("dd/MM/yyyy").format(property.getBasicProperty()
.getCreatedDate()));
ackBean.setApplicationNo(property.getApplicationNo());
ackBean.setApprovedDate(new SimpleDateFormat("dd/MM/yyyy").format(property.getState().getCreatedDate()));
final Date tempNoticeDate = DateUtils.add(property.getState().getCreatedDate(), Calendar.DAY_OF_MONTH, 15);
ackBean.setNoticeDueDate(tempNoticeDate);
reportParams.put("logoPath", cityService.getCityLogoAsBytes());
reportParams.put("cityName", cityService.getMunicipalityName());
if (Arrays.asList(UserType.BUSINESS, UserType.EMPLOYEE)
.contains(userService.getUserById(ApplicationThreadLocals.getUserId()).getType()))
reportParams.put("loggedInUsername", userService.getUserById(ApplicationThreadLocals.getUserId()).getName());
final ReportRequest reportInput = new ReportRequest(CREATE_ACK_TEMPLATE, ackBean, reportParams);
reportInput.setReportFormat(ReportFormat.PDF);
return reportService.createReport(reportInput);
}
public String updateOwners(final Property property, final BasicProperty basicProperty, final String doorNumber,
final BindingResult errors) {
LOGGER.debug("Update Owner and door number for property: " + property + ", basicProperty: " + basicProperty
+ ", doorNumber: " + doorNumber);
basicProperty.getAddress().setHouseNoBldgApt(doorNumber);
final StringBuilder errorMesg = new StringBuilder();
for (final PropertyOwnerInfo ownerInfo : basicProperty.getPropertyOwnerInfo())
if (ownerInfo != null) {
User user = null;
if (StringUtils.isNotBlank(ownerInfo.getOwner().getAadhaarNumber()))
user = userService.getUserByAadhaarNumber(ownerInfo.getOwner().getAadhaarNumber());
if (user == null || user.getId().equals(ownerInfo.getOwner().getId()))
userService.updateUser(ownerInfo.getOwner());
else {
final BasicProperty basicProp = find("select basicProperty from PropertyOwnerInfo where owner = ?",
user.getId());
errorMesg.append("With entered aadhar number - ").append(ownerInfo.getOwner().getAadhaarNumber())
.append(" there is already owner present with owner name: ")
.append(user.getName());
if (basicProp != null)
errorMesg.append(" for assessment number : ").append(basicProp.getUpicNo());
break;
}
}
persist(basicProperty);
LOGGER.debug("Exit from updateOwners");
return errorMesg.toString();
}
/**
* Update the owners for a property
* @param property
* @param basicProp
* @param ownerAddress
*/
public void updateOwners(final Property property, final BasicProperty basicProp, final Address ownerAddress) {
int orderNo = 0;
basicProp.getPropertyOwnerInfo().clear();
for (final PropertyOwnerInfo ownerInfo : property.getBasicProperty().getPropertyOwnerInfoProxy()) {
if (ownerInfo != null) {
User user;
if (StringUtils.isNotBlank(ownerInfo.getOwner().getAadhaarNumber()))
user = userService.getUserByAadhaarNumber(ownerInfo.getOwner().getAadhaarNumber());
else
user = (User) find(FROM_USER_WHERE_NAME_AND_MOBILE_NUMBER_AND_GENDER, ownerInfo
.getOwner().getName(), ownerInfo.getOwner().getMobileNumber(), ownerInfo.getOwner()
.getGender());
if (user == null) {
orderNo++;
Citizen newOwner = new Citizen();
user = createNewOwner(ownerInfo, newOwner);
ownerInfo.setBasicProperty(basicProp);
ownerInfo.setOwner(user);
ownerInfo.setOrderNo(orderNo);
LOGGER.debug("createOwners: OwnerAddress: " + ownerAddress);
ownerInfo.getOwner().addAddress(ownerAddress);
} else {
// If existing user, then update the address
user.setAadhaarNumber(ownerInfo.getOwner().getAadhaarNumber());
user.setMobileNumber(ownerInfo.getOwner().getMobileNumber());
user.setName(ownerInfo.getOwner().getName());
user.setGender(ownerInfo.getOwner().getGender());
user.setEmailId(ownerInfo.getOwner().getEmailId());
user.setGuardian(ownerInfo.getOwner().getGuardian());
user.setGuardianRelation(ownerInfo.getOwner().getGuardianRelation());
ownerInfo.setOwner(user);
ownerInfo.setBasicProperty(basicProp);
}
}
basicProp.addPropertyOwners(ownerInfo);
}
}
public BasicProperty updateBasicProperty(final BasicProperty basicProperty, final HashMap<String, String> meesevaParams) {
return update(basicProperty);
}
public void createAmalgamatedOwners(final BasicProperty basicProperty) {
int orderNo = 0;
List<Long> parentOwners = new ArrayList<>();
for (PropertyOwnerInfo ownerInfo : basicProperty.getPropertyOwnerInfo()) {
if (ownerInfo.getOrderNo() != null && orderNo < ownerInfo.getOrderNo())
orderNo = ownerInfo.getOrderNo();
parentOwners.add(ownerInfo.getOwner().getId());
}
for (AmalgamationOwner ownerInfo : basicProperty.getWFProperty().getAmalgamationOwners()) {
orderNo++;
if (ownerInfo != null && !parentOwners.contains(ownerInfo.getOwner().getId())) {
PropertyOwnerInfo childOwnerInfo = new PropertyOwnerInfo();
childOwnerInfo.setOwner(ownerInfo.getOwner());
childOwnerInfo.setOrderNo(orderNo);
childOwnerInfo.setBasicProperty(basicProperty);
basicProperty.addPropertyOwners(childOwnerInfo);
}
}
}
}
|
import os
import json
import logging
import logging.config
from flask import Flask
from flask_cors import CORS
def configure_logging_and_cors(config_file_path):
if os.path.exists(config_file_path):
with open(config_file_path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
app = Flask(__name__)
CORS(app)
# Enable CORS for the Flask application
cors = CORS(app, resources={r"/*": {"origins": "*"}})
return app |
package com.ice.restring.activity;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import com.ice.restring.R;
import com.ice.restring.Restring;
public class TestActivity extends AppCompatActivity {
@Override
protected void attachBaseContext(Context newBase) {
super.attachBaseContext(Restring.wrapContext(newBase));
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setTheme(R.style.Theme_AppCompat);
setContentView(R.layout.test_layout);
}
}
|
<reponame>nortal/spring-mvc-component-web<gh_stars>0
package com.nortal.spring.cw.core.web.component.list.header;
/**
* Vaikimisi listi päise elemendi initsialiseerimine ning path määramine
*
* @author margush
*
*/
public class DefaultListHeaderBuilder implements ListHeaderBuilder {
private static final long serialVersionUID = 1L;
public static final ListHeaderBuildStrategy listHeaderBuildStrategy = new DefaultListHeaderBuildStrategy();
public void init(final ListHeaderBuildStrategy buildStrategy, ListHeader header, final int cellNumber) {
header.initComponent();
customizeHeader(header);
}
// = Extension points for modifying ListHeader =
/**
* Called after ListHeader is initiated
*
* @param header
*/
public void customizeHeader(ListHeader header) {
// By default we do nothing
}
}
|
npm install
node compile-sass-and-js.js |
public class Product
{
public DateTimeOffset DateAdded { get; set; }
public string Image { get; set; }
public decimal Weight { get; set; }
public decimal Price { get; set; }
public decimal CalculateTotalPrice(int quantity, decimal discountPercentage)
{
decimal totalPrice = (Price * quantity) - (discountPercentage * (Price * quantity));
return totalPrice;
}
} |
<reponame>camplight/hylo-evo
import { getComments } from './getComments'
import orm from 'store/models'
import extractModelsFromAction from 'store/reducers/ModelExtractor/extractModelsFromAction'
describe('getComments', () => {
it("returns an empty array if post doesn't exist", () => {
const session = orm.session(orm.getEmptyState())
expect(getComments(session.state, { postId: '1' })).toEqual([])
})
it('returns images', () => {
const session = orm.session(orm.getEmptyState())
const setupActions = [
{
payload: {
data: {
post: {
id: 1,
comments: {
items: [
{
id: 11,
text: 'eleven',
attachments: [
{
id: 111,
url: 'foo.png'
}
]
},
{
id: 12,
text: 'twelve',
attachments: [
{
id: 121,
url: 'bar.png'
}
]
}
]
}
}
}
},
meta: { extractModel: 'Post' }
},
{
payload: {
data: {
post: {
id: 2,
comments: {
items: [
{
id: 21,
text: 'twentyone',
attachments: [
{
id: 211,
url: 'foo2.png'
}
]
},
{
id: 22,
text: 'twentytwo',
attachments: [
{
id: 221,
url: 'bar2.png'
}
]
}
]
}
}
}
},
meta: { extractModel: 'Post' }
}
]
extractModelsFromAction(setupActions[0], session)
extractModelsFromAction(setupActions[1], session)
const state = {
orm: session.state
}
const comments = getComments(state, { postId: 1 })
expect(comments.length).toEqual(2)
expect(comments.map(c => c.text)).toEqual(['eleven', 'twelve'])
expect(comments.map(c => c.attachments.map(a => a.url))).toEqual([['foo.png'], ['bar.png']])
})
it('returns comments for post, ordered by id', () => {
const session = orm.session(orm.getEmptyState())
const { Post, Comment } = session;
[
{ model: Comment, attrs: { id: '4', post: '1' } },
{ model: Comment, attrs: { id: '5', post: '2' } },
{ model: Comment, attrs: { id: '1', post: '1' } },
{ model: Comment, attrs: { id: '3', post: '2' } },
{ model: Comment, attrs: { id: '2', post: '1' } },
{ model: Post, attrs: { id: '1' } },
{ model: Post, attrs: { id: '2' } }
].forEach(({ model, attrs }) => model.create(attrs))
expect(getComments({ orm: session.state }, { postId: '1' }).map(c => c.id))
.toEqual(['1', '2', '4'])
})
})
|
import threading
import time
class JobScheduler:
def __init__(self):
self.tasks = []
self.lock = threading.Lock()
def addTask(self, taskId, duration):
self.tasks.append((taskId, duration))
def executeTask(self, taskId, duration):
time.sleep(duration)
print(f"Task {taskId} completed")
def execute(self):
for taskId, duration in self.tasks:
thread = threading.Thread(target=self.executeTask, args=(taskId, duration))
thread.start()
thread.join() |
<reponame>fabriziofranco/Laboratorio7.2
import numpy as np
def score1(q, doc):
return np.dot(q, doc)
def score2(q, doc):
return np.dot(q, doc) / (np.linalg.norm(q) * np.linalg.norm(doc))
def retrieval(collection, query, func_score):
result = []
for i in range(len(collection)):
sim = func_score(query, collection[i])
result.append( (i+1, sim) )#[ (doc1, sc1), (doc, sc2) ]
result.sort(key = lambda tup: tup[1])
return result
##################### main ##############################
collection = [
np.array([15,5, 20,25]),
np.array([30,0,22,0])
]
query = np.array([115, 10, 2, 0])
query = np.log10(1 + query)
for i in range(len(collection)):
collection[i] = np.log10(1 + collection[i])
### aplicar score 1
result = retrieval(collection, query, score1)
print(result) |
<reponame>ryanseipp/aoc<filename>1/go/aocday1_test.go
package aocday1
import (
"testing"
)
func TestCountLargerMeasurements(t *testing.T) {
depths := []int32{199, 200, 208, 210, 200, 207, 240, 269, 260, 263}
result := countLargerMeasurements(depths)
if result != 7 {
t.Fatalf("%d != 7", result)
}
}
func TestCountThreeLarger(t *testing.T) {
depths := []int32{199, 200, 208, 210, 200, 207, 240, 269, 260, 263}
result := countThreeLarger(depths)
if result != 5 {
t.Fatalf("%d != 5", result)
}
}
func BenchmarkCountLargerMeasurements(b *testing.B) {
depths := []int32{143, 147, 150, 166, 180, 187, 188, 185, 199, 198, 199, 197, 218, 221, 238, 239, 233, 234,
217, 212, 215, 216, 215, 217, 220, 241, 245, 232, 230, 234, 206, 251, 272, 273, 278, 279,
285, 292, 288, 286, 289, 290, 294, 301, 298, 300, 297, 305, 306, 310, 340, 343, 352, 356,
362, 376, 380, 383, 382, 385, 372, 391, 384, 385, 398, 430, 426, 422, 423, 433, 419, 421,
431, 424, 442, 450, 453, 449, 487, 491, 462, 464, 466, 467, 468, 466, 482, 484, 501, 500,
501, 497, 498, 497, 508, 516, 521, 540, 541, 536, 537, 536, 542, 549, 535, 541, 548, 549,
551, 570, 572, 606, 605, 601, 608, 609, 611, 605, 631, 618, 635, 636, 635, 636, 637, 638,
676, 673, 668, 674, 683, 684, 683, 680, 687, 688, 698, 700, 706, 696, 713, 707, 708, 719,
721, 728, 719, 723, 715, 712, 722, 721, 709, 680, 681, 680, 681, 690, 686, 687, 682, 683,
680, 693, 708, 718, 730, 743, 726, 753, 747, 748, 751, 772, 801, 805, 797, 789, 790, 797,
812, 806, 813, 815, 813, 815, 822, 835, 843, 854, 858, 859, 860, 869, 872, 870, 864, 876,
880, 882, 865, 869, 876, 882, 886, 885, 898, 900, 909, 913, 902, 910, 914, 916, 920, 898,
899, 931, 936, 937, 938, 941, 950, 939, 942, 935, 932, 941, 945, 944, 957, 959, 967, 975,
990, 1011, 1008, 1011, 1015, 1017, 1018, 1011, 1010, 1035, 1036, 1040, 1041, 1046, 1048,
1027, 1047, 1069, 1079, 1074, 1075, 1080, 1079, 1083, 1071, 1077, 1080, 1081, 1082, 1083,
1085, 1087, 1089, 1095, 1096, 1099, 1110, 1112, 1119, 1122, 1124, 1130, 1133, 1143, 1139,
1141, 1142, 1144, 1176, 1175, 1159, 1165, 1172, 1182, 1203, 1209, 1193, 1194, 1193, 1191,
1196, 1201, 1202, 1205, 1216, 1219, 1220, 1213, 1212, 1220, 1226, 1233, 1234, 1236, 1239,
1240, 1242, 1237, 1261, 1259, 1257, 1245, 1251, 1252, 1272, 1276, 1277, 1279, 1284, 1285,
1286, 1285, 1315, 1316, 1331, 1333, 1335, 1347, 1344, 1343, 1342, 1340, 1354, 1359, 1367,
1368, 1378, 1380, 1358, 1370, 1371, 1387, 1388, 1402, 1404, 1406, 1401, 1403, 1405, 1407,
1408, 1428, 1432, 1433, 1434, 1435, 1446, 1460, 1461, 1460, 1468, 1471, 1472, 1474, 1465,
1473, 1496, 1507, 1510, 1506, 1502, 1509, 1512, 1515, 1516, 1524, 1526, 1527, 1528, 1527,
1541, 1544, 1545, 1543, 1548, 1551, 1544, 1545, 1550, 1554, 1556, 1557, 1558, 1575, 1578,
1581, 1587, 1586, 1590, 1584, 1571, 1576, 1574, 1575, 1585, 1601, 1604, 1612, 1613, 1600,
1587, 1591, 1596, 1595, 1601, 1608, 1614, 1615, 1614, 1634, 1636, 1637, 1639, 1642, 1629,
1617, 1618, 1617, 1612, 1613, 1614, 1615, 1617, 1608, 1609, 1611, 1608, 1634, 1636, 1641,
1637, 1657, 1665, 1668, 1679, 1680, 1695, 1697, 1698, 1669, 1670, 1669, 1673, 1672, 1678,
1670, 1671, 1673, 1689, 1683, 1685, 1691, 1692, 1669, 1670, 1679, 1672, 1684, 1704, 1700,
1692, 1714, 1715, 1716, 1712, 1714, 1716, 1714, 1718, 1719, 1713, 1717, 1710, 1711, 1721,
1745, 1752, 1767, 1762, 1787, 1785, 1786, 1790, 1796, 1814, 1843, 1859, 1871, 1876, 1890,
1891, 1892, 1894, 1912, 1914, 1939, 1955, 1956, 1960, 1964, 1967, 1970, 1971, 1972, 1969,
1970, 1984, 1996, 2017, 2018, 2020, 2005, 2006, 2005, 2007, 2005, 2006, 2027, 2031, 2032,
2012, 2014, 2013, 2016, 2014, 2018, 2040, 2042, 2061, 2064, 2063, 2080, 2086, 2094, 2063,
2071, 2092, 2077, 2078, 2079, 2080, 2099, 2100, 2103, 2118, 2119, 2124, 2128, 2135, 2136,
2135, 2136, 2137, 2135, 2163, 2168, 2162, 2164, 2160, 2161, 2162, 2165, 2169, 2168, 2185,
2191, 2197, 2212, 2224, 2228, 2229, 2230, 2243, 2250, 2251, 2252, 2255, 2256, 2247, 2253,
2262, 2296, 2317, 2324, 2327, 2299, 2300, 2290, 2291, 2303, 2292, 2293, 2294, 2297, 2298,
2308, 2309, 2312, 2337, 2340, 2349, 2354, 2345, 2341, 2340, 2341, 2344, 2345, 2341, 2346,
2352, 2357, 2340, 2339, 2336, 2339, 2351, 2356, 2358, 2364, 2366, 2367, 2370, 2376, 2377,
2375, 2378, 2391, 2394, 2389, 2391, 2388, 2389, 2407, 2420, 2417, 2416, 2424, 2443, 2455,
2457, 2446, 2449, 2458, 2466, 2479, 2486, 2478, 2491, 2503, 2507, 2513, 2518, 2500, 2502,
2504, 2514, 2516, 2517, 2518, 2529, 2532, 2543, 2544, 2543, 2545, 2553, 2548, 2559, 2563,
2573, 2587, 2584, 2581, 2579, 2582, 2577, 2588, 2589, 2614, 2617, 2620, 2622, 2615, 2619,
2618, 2610, 2616, 2615, 2618, 2616, 2627, 2630, 2641, 2659, 2662, 2663, 2661, 2669, 2662,
2687, 2690, 2691, 2693, 2696, 2728, 2746, 2743, 2746, 2745, 2746, 2742, 2749, 2756, 2750,
2738, 2743, 2744, 2746, 2750, 2753, 2762, 2770, 2767, 2766, 2778, 2780, 2781, 2776, 2763,
2762, 2763, 2766, 2762, 2753, 2761, 2768, 2772, 2746, 2762, 2767, 2774, 2795, 2806, 2809,
2816, 2830, 2821, 2837, 2836, 2838, 2842, 2831, 2815, 2816, 2818, 2817, 2822, 2817, 2823,
2834, 2825, 2818, 2821, 2847, 2846, 2845, 2847, 2848, 2849, 2863, 2872, 2866, 2874, 2875,
2885, 2886, 2892, 2898, 2897, 2917, 2922, 2916, 2921, 2922, 2921, 2940, 2945, 2935, 2950,
2946, 2949, 2951, 2948, 2940, 2941, 2940, 2939, 2955, 2943, 2941, 2942, 2945, 2961, 2962,
2965, 2975, 2970, 2971, 2960, 2961, 2956, 2960, 2961, 2967, 2966, 2979, 2976, 2983, 3012,
3024, 3021, 3041, 3040, 3041, 3042, 3048, 3077, 3088, 3091, 3109, 3107, 3116, 3111, 3115,
3121, 3095, 3097, 3077, 3059, 3065, 3068, 3061, 3062, 3061, 3063, 3068, 3070, 3073, 3057,
3075, 3082, 3089, 3086, 3090, 3092, 3106, 3112, 3118, 3120, 3119, 3121, 3119, 3126, 3133,
3135, 3163, 3166, 3167, 3181, 3185, 3191, 3197, 3198, 3199, 3194, 3210, 3216, 3224, 3225,
3235, 3237, 3245, 3253, 3259, 3248, 3256, 3273, 3265, 3248, 3249, 3258, 3257, 3256, 3255,
3250, 3252, 3260, 3261, 3267, 3268, 3269, 3284, 3294, 3334, 3337, 3340, 3307, 3316, 3320,
3328, 3330, 3350, 3357, 3363, 3364, 3361, 3396, 3397, 3410, 3411, 3433, 3452, 3453, 3465,
3467, 3469, 3470, 3467, 3480, 3494, 3517, 3519, 3527, 3542, 3546, 3547, 3544, 3570, 3571,
3596, 3604, 3603, 3587, 3588, 3591, 3589, 3590, 3591, 3600, 3598, 3599, 3600, 3599, 3600,
3609, 3608, 3610, 3611, 3632, 3633, 3632, 3633, 3635, 3642, 3643, 3656, 3652, 3653, 3657,
3676, 3677, 3687, 3690, 3693, 3694, 3697, 3702, 3703, 3722, 3729, 3730, 3731, 3737, 3744,
3747, 3751, 3757, 3758, 3761, 3769, 3781, 3796, 3798, 3797, 3798, 3812, 3848, 3849, 3850,
3854, 3861, 3859, 3863, 3867, 3870, 3882, 3884, 3876, 3879, 3909, 3908, 3910, 3909, 3908,
3886, 3888, 3889, 3891, 3920, 3921, 3922, 3913, 3955, 3956, 3974, 3975, 3976, 3988, 3992,
3999, 4009, 4010, 4019, 4023, 4024, 4035, 4045, 4044, 4046, 4072, 4074, 4093, 4101, 4105,
4111, 4141, 4146, 4147, 4160, 4162, 4161, 4162, 4156, 4158, 4160, 4168, 4169, 4168, 4169,
4192, 4191, 4222, 4239, 4227, 4228, 4226, 4228, 4227, 4228, 4221, 4222, 4223, 4248, 4249,
4258, 4249, 4257, 4260, 4258, 4257, 4259, 4260, 4267, 4284, 4285, 4286, 4283, 4280, 4281,
4280, 4279, 4282, 4289, 4290, 4295, 4300, 4289, 4290, 4291, 4300, 4310, 4314, 4319, 4320,
4321, 4324, 4330, 4326, 4340, 4326, 4327, 4332, 4333, 4334, 4339, 4361, 4369, 4370, 4374,
4372, 4382, 4398, 4399, 4395, 4396, 4397, 4400, 4404, 4401, 4405, 4415, 4427, 4433, 4446,
4447, 4455, 4456, 4457, 4458, 4467, 4471, 4463, 4470, 4460, 4458, 4459, 4457, 4468, 4470,
4448, 4449, 4453, 4474, 4470, 4475, 4474, 4473, 4472, 4474, 4476, 4478, 4479, 4483, 4484,
4485, 4481, 4483, 4492, 4505, 4513, 4529, 4530, 4531, 4503, 4504, 4505, 4499, 4500, 4516,
4517, 4513, 4497, 4493, 4494, 4524, 4537, 4543, 4544, 4551, 4552, 4555, 4565, 4567, 4566,
4567, 4566, 4590, 4591, 4596, 4597, 4588, 4572, 4575, 4572, 4574, 4575, 4576, 4584, 4572,
4578, 4579, 4593, 4600, 4604, 4584, 4586, 4583, 4599, 4601, 4603, 4625, 4624, 4625, 4627,
4623, 4624, 4620, 4618, 4617, 4616, 4617, 4612, 4623, 4634, 4635, 4623, 4628, 4629, 4631,
4633, 4654, 4659, 4669, 4674, 4677, 4678, 4694, 4699, 4701, 4713, 4714, 4722, 4724, 4725,
4761, 4785, 4791, 4783, 4790, 4791, 4792, 4809, 4835, 4840, 4845, 4852, 4851, 4852, 4846,
4848, 4849, 4854, 4856, 4862, 4856, 4875, 4876, 4878, 4892, 4893, 4903, 4891, 4909, 4923,
4938, 4939, 4944, 4953, 4954, 4975, 4977, 4978, 4984, 4987, 4989, 4987, 4995, 4994, 4996,
5000, 5008, 5018, 5024, 5027, 5028, 5031, 5035, 5051, 5059, 5061, 5065, 5069, 5090, 5110,
5111, 5113, 5115, 5129, 5128, 5139, 5140, 5141, 5142, 5149, 5148, 5147, 5153, 5157, 5178,
5189, 5209, 5217, 5211, 5215, 5221, 5243, 5244, 5275, 5287, 5290, 5300, 5301, 5297, 5300,
5289, 5311, 5314, 5303, 5304, 5306, 5307, 5316, 5329, 5330, 5333, 5334, 5335, 5343, 5359,
5380, 5382, 5383, 5392, 5403, 5409, 5410, 5420, 5427, 5438, 5448, 5455, 5458, 5489, 5490,
5491, 5492, 5494, 5497, 5522, 5523, 5525, 5536, 5537, 5542, 5522, 5529, 5534, 5535, 5545,
5546, 5544, 5538, 5547, 5551, 5550, 5551, 5552, 5553, 5565, 5578, 5589, 5590, 5589, 5592,
5601, 5614, 5615, 5614, 5615, 5616, 5626, 5627, 5629, 5647, 5649, 5650, 5645, 5646, 5647,
5672, 5690, 5692, 5697, 5701, 5697, 5702, 5705, 5718, 5720, 5725, 5747, 5749, 5751, 5756,
5762, 5764, 5771, 5772, 5771, 5772, 5809, 5822, 5823, 5840, 5841, 5839, 5838, 5839, 5840,
5838, 5852, 5853, 5859, 5865, 5879, 5885, 5899, 5907, 5910, 5936, 5943, 5957, 5953, 5958,
5968, 5979, 5971, 5966, 5967, 5971, 5991, 5993, 5994, 5993, 5996, 5993, 6004, 6010, 6017,
6018, 6017, 6038, 6036, 6035, 6036, 6039, 6042, 6037, 6059, 6062, 6067, 6068, 6084, 6077,
6080, 6081, 6079, 6093, 6095, 6098, 6111, 6107, 6114, 6105, 6104, 6111, 6112, 6120, 6126,
6140, 6143, 6142, 6148, 6152, 6159, 6150, 6152, 6168, 6169, 6167, 6161, 6160, 6159, 6160,
6168, 6169, 6188, 6200, 6220, 6221, 6219, 6221, 6220, 6232, 6233, 6234, 6242, 6247, 6246,
6247, 6253, 6258, 6256, 6249, 6228, 6225, 6223, 6227, 6228, 6259, 6265, 6268, 6270, 6271,
6275, 6270, 6263, 6265, 6279, 6323, 6365, 6374, 6397, 6415, 6416, 6419, 6403, 6405, 6410,
6417, 6424, 6425, 6422, 6432, 6442, 6451, 6459, 6458, 6463, 6485, 6486, 6509, 6508, 6532,
6535, 6551, 6562, 6563, 6572, 6571, 6590, 6628, 6629, 6627, 6625, 6620, 6619, 6625, 6616,
6615, 6646, 6648, 6649, 6651, 6653, 6645, 6651, 6652, 6665, 6660, 6664, 6665, 6684, 6685,
6694, 6695, 6696, 6693, 6694, 6703, 6702, 6696, 6693, 6694, 6691, 6725, 6728, 6715, 6722,
6728, 6729, 6730, 6742, 6753, 6754, 6752, 6753, 6761, 6771, 6773, 6778, 6779, 6795, 6819,
6797, 6799, 6798, 6807, 6809, 6837, 6861, 6864, 6865, 6868, 6869, 6867, 6861, 6871, 6874,
6872, 6873, 6875, 6867, 6877, 6876, 6898, 6900, 6902, 6904, 6905, 6915, 6908, 6927, 6928,
6935, 6942, 6943, 6945, 6948, 6943, 6947, 6990, 6993, 7013, 7014, 6997, 6976, 6972, 6975,
6976, 6982, 6991, 7006, 7009, 7015, 7019, 7018, 7008, 7011, 7018, 7023, 7027, 7044, 7054,
7060, 7058, 7057, 7061, 7038, 7036, 7040, 7041, 7039, 7042, 7039, 7037, 7049, 7056, 7057,
7072, 7095, 7101, 7102, 7104, 7103, 7106, 7107, 7108, 7112, 7117, 7119, 7117, 7132, 7130,
7137, 7143, 7135, 7138, 7152, 7156, 7180, 7181, 7182, 7179, 7180, 7174, 7172, 7162, 7177,
7180, 7187, 7153, 7159, 7145, 7139, 7140, 7137, 7156, 7160, 7161, 7162, 7163, 7170, 7163,
7171, 7167, 7166, 7167, 7169, 7170, 7178, 7179, 7166, 7177, 7176, 7203, 7194, 7187, 7191,
7202, 7198, 7199, 7205, 7206, 7207, 7208, 7214, 7215, 7214, 7215, 7217, 7208, 7205, 7214,
7215, 7216, 7212, 7214, 7222, 7226, 7231, 7229, 7239, 7238, 7239, 7250, 7252, 7255, 7277,
7279, 7284, 7290, 7293, 7294, 7316, 7325, 7326, 7327, 7328, 7326, 7325, 7329, 7335, 7340,
7344, 7337, 7338, 7348, 7366, 7376, 7377, 7382, 7387, 7401, 7392, 7396, 7400, 7406, 7405,
7410, 7425, 7426, 7432, 7449, 7450, 7444, 7450, 7455, 7456, 7471, 7472, 7494, 7514, 7515,
7512, 7515, 7517, 7514, 7538, 7539, 7540, 7556, 7557, 7553, 7549, 7546, 7566, 7567, 7564,
7565, 7559, 7560, 7563, 7571, 7570, 7572, 7580, 7587, 7595, 7597, 7614, 7622, 7621, 7627,
7656, 7652, 7659, 7658, 7657, 7660, 7665, 7664, 7666, 7669, 7678, 7680, 7682, 7683, 7669,
7692, 7695, 7699, 7705, 7717, 7720, 7730, 7733, 7748, 7759, 7747, 7748, 7759, 7778, 7777,
7778, 7780, 7781, 7780, 7788, 7787, 7788, 7814, 7810, 7787, 7788, 7802, 7803, 7813, 7815,
7818, 7823, 7830, 7852, 7868, 7874, 7879, 7873, 7854, 7871, 7877, 7876, 7883, 7885, 7887,
7886, 7889, 7893, 7900, 7909, 7910, 7906, 7910, 7913, 7918, 7919, 7941, 7947, 7948, 7949,
7951, 7952, 7977, 7976, 7979, 8010, 8011, 8014, 8015, 8031, 8054, 8056, 8059, 8085, 8087,
8107, 8109, 8115, 8116, 8117, 8115, 8116, 8119, 8127, 8145, 8147}
for i := 0; i < b.N; i++ {
countLargerMeasurements(depths)
}
}
func BenchmarkCountThreeLarger(b *testing.B) {
depths := []int32{143, 147, 150, 166, 180, 187, 188, 185, 199, 198, 199, 197, 218, 221, 238, 239, 233, 234,
217, 212, 215, 216, 215, 217, 220, 241, 245, 232, 230, 234, 206, 251, 272, 273, 278, 279,
285, 292, 288, 286, 289, 290, 294, 301, 298, 300, 297, 305, 306, 310, 340, 343, 352, 356,
362, 376, 380, 383, 382, 385, 372, 391, 384, 385, 398, 430, 426, 422, 423, 433, 419, 421,
431, 424, 442, 450, 453, 449, 487, 491, 462, 464, 466, 467, 468, 466, 482, 484, 501, 500,
501, 497, 498, 497, 508, 516, 521, 540, 541, 536, 537, 536, 542, 549, 535, 541, 548, 549,
551, 570, 572, 606, 605, 601, 608, 609, 611, 605, 631, 618, 635, 636, 635, 636, 637, 638,
676, 673, 668, 674, 683, 684, 683, 680, 687, 688, 698, 700, 706, 696, 713, 707, 708, 719,
721, 728, 719, 723, 715, 712, 722, 721, 709, 680, 681, 680, 681, 690, 686, 687, 682, 683,
680, 693, 708, 718, 730, 743, 726, 753, 747, 748, 751, 772, 801, 805, 797, 789, 790, 797,
812, 806, 813, 815, 813, 815, 822, 835, 843, 854, 858, 859, 860, 869, 872, 870, 864, 876,
880, 882, 865, 869, 876, 882, 886, 885, 898, 900, 909, 913, 902, 910, 914, 916, 920, 898,
899, 931, 936, 937, 938, 941, 950, 939, 942, 935, 932, 941, 945, 944, 957, 959, 967, 975,
990, 1011, 1008, 1011, 1015, 1017, 1018, 1011, 1010, 1035, 1036, 1040, 1041, 1046, 1048,
1027, 1047, 1069, 1079, 1074, 1075, 1080, 1079, 1083, 1071, 1077, 1080, 1081, 1082, 1083,
1085, 1087, 1089, 1095, 1096, 1099, 1110, 1112, 1119, 1122, 1124, 1130, 1133, 1143, 1139,
1141, 1142, 1144, 1176, 1175, 1159, 1165, 1172, 1182, 1203, 1209, 1193, 1194, 1193, 1191,
1196, 1201, 1202, 1205, 1216, 1219, 1220, 1213, 1212, 1220, 1226, 1233, 1234, 1236, 1239,
1240, 1242, 1237, 1261, 1259, 1257, 1245, 1251, 1252, 1272, 1276, 1277, 1279, 1284, 1285,
1286, 1285, 1315, 1316, 1331, 1333, 1335, 1347, 1344, 1343, 1342, 1340, 1354, 1359, 1367,
1368, 1378, 1380, 1358, 1370, 1371, 1387, 1388, 1402, 1404, 1406, 1401, 1403, 1405, 1407,
1408, 1428, 1432, 1433, 1434, 1435, 1446, 1460, 1461, 1460, 1468, 1471, 1472, 1474, 1465,
1473, 1496, 1507, 1510, 1506, 1502, 1509, 1512, 1515, 1516, 1524, 1526, 1527, 1528, 1527,
1541, 1544, 1545, 1543, 1548, 1551, 1544, 1545, 1550, 1554, 1556, 1557, 1558, 1575, 1578,
1581, 1587, 1586, 1590, 1584, 1571, 1576, 1574, 1575, 1585, 1601, 1604, 1612, 1613, 1600,
1587, 1591, 1596, 1595, 1601, 1608, 1614, 1615, 1614, 1634, 1636, 1637, 1639, 1642, 1629,
1617, 1618, 1617, 1612, 1613, 1614, 1615, 1617, 1608, 1609, 1611, 1608, 1634, 1636, 1641,
1637, 1657, 1665, 1668, 1679, 1680, 1695, 1697, 1698, 1669, 1670, 1669, 1673, 1672, 1678,
1670, 1671, 1673, 1689, 1683, 1685, 1691, 1692, 1669, 1670, 1679, 1672, 1684, 1704, 1700,
1692, 1714, 1715, 1716, 1712, 1714, 1716, 1714, 1718, 1719, 1713, 1717, 1710, 1711, 1721,
1745, 1752, 1767, 1762, 1787, 1785, 1786, 1790, 1796, 1814, 1843, 1859, 1871, 1876, 1890,
1891, 1892, 1894, 1912, 1914, 1939, 1955, 1956, 1960, 1964, 1967, 1970, 1971, 1972, 1969,
1970, 1984, 1996, 2017, 2018, 2020, 2005, 2006, 2005, 2007, 2005, 2006, 2027, 2031, 2032,
2012, 2014, 2013, 2016, 2014, 2018, 2040, 2042, 2061, 2064, 2063, 2080, 2086, 2094, 2063,
2071, 2092, 2077, 2078, 2079, 2080, 2099, 2100, 2103, 2118, 2119, 2124, 2128, 2135, 2136,
2135, 2136, 2137, 2135, 2163, 2168, 2162, 2164, 2160, 2161, 2162, 2165, 2169, 2168, 2185,
2191, 2197, 2212, 2224, 2228, 2229, 2230, 2243, 2250, 2251, 2252, 2255, 2256, 2247, 2253,
2262, 2296, 2317, 2324, 2327, 2299, 2300, 2290, 2291, 2303, 2292, 2293, 2294, 2297, 2298,
2308, 2309, 2312, 2337, 2340, 2349, 2354, 2345, 2341, 2340, 2341, 2344, 2345, 2341, 2346,
2352, 2357, 2340, 2339, 2336, 2339, 2351, 2356, 2358, 2364, 2366, 2367, 2370, 2376, 2377,
2375, 2378, 2391, 2394, 2389, 2391, 2388, 2389, 2407, 2420, 2417, 2416, 2424, 2443, 2455,
2457, 2446, 2449, 2458, 2466, 2479, 2486, 2478, 2491, 2503, 2507, 2513, 2518, 2500, 2502,
2504, 2514, 2516, 2517, 2518, 2529, 2532, 2543, 2544, 2543, 2545, 2553, 2548, 2559, 2563,
2573, 2587, 2584, 2581, 2579, 2582, 2577, 2588, 2589, 2614, 2617, 2620, 2622, 2615, 2619,
2618, 2610, 2616, 2615, 2618, 2616, 2627, 2630, 2641, 2659, 2662, 2663, 2661, 2669, 2662,
2687, 2690, 2691, 2693, 2696, 2728, 2746, 2743, 2746, 2745, 2746, 2742, 2749, 2756, 2750,
2738, 2743, 2744, 2746, 2750, 2753, 2762, 2770, 2767, 2766, 2778, 2780, 2781, 2776, 2763,
2762, 2763, 2766, 2762, 2753, 2761, 2768, 2772, 2746, 2762, 2767, 2774, 2795, 2806, 2809,
2816, 2830, 2821, 2837, 2836, 2838, 2842, 2831, 2815, 2816, 2818, 2817, 2822, 2817, 2823,
2834, 2825, 2818, 2821, 2847, 2846, 2845, 2847, 2848, 2849, 2863, 2872, 2866, 2874, 2875,
2885, 2886, 2892, 2898, 2897, 2917, 2922, 2916, 2921, 2922, 2921, 2940, 2945, 2935, 2950,
2946, 2949, 2951, 2948, 2940, 2941, 2940, 2939, 2955, 2943, 2941, 2942, 2945, 2961, 2962,
2965, 2975, 2970, 2971, 2960, 2961, 2956, 2960, 2961, 2967, 2966, 2979, 2976, 2983, 3012,
3024, 3021, 3041, 3040, 3041, 3042, 3048, 3077, 3088, 3091, 3109, 3107, 3116, 3111, 3115,
3121, 3095, 3097, 3077, 3059, 3065, 3068, 3061, 3062, 3061, 3063, 3068, 3070, 3073, 3057,
3075, 3082, 3089, 3086, 3090, 3092, 3106, 3112, 3118, 3120, 3119, 3121, 3119, 3126, 3133,
3135, 3163, 3166, 3167, 3181, 3185, 3191, 3197, 3198, 3199, 3194, 3210, 3216, 3224, 3225,
3235, 3237, 3245, 3253, 3259, 3248, 3256, 3273, 3265, 3248, 3249, 3258, 3257, 3256, 3255,
3250, 3252, 3260, 3261, 3267, 3268, 3269, 3284, 3294, 3334, 3337, 3340, 3307, 3316, 3320,
3328, 3330, 3350, 3357, 3363, 3364, 3361, 3396, 3397, 3410, 3411, 3433, 3452, 3453, 3465,
3467, 3469, 3470, 3467, 3480, 3494, 3517, 3519, 3527, 3542, 3546, 3547, 3544, 3570, 3571,
3596, 3604, 3603, 3587, 3588, 3591, 3589, 3590, 3591, 3600, 3598, 3599, 3600, 3599, 3600,
3609, 3608, 3610, 3611, 3632, 3633, 3632, 3633, 3635, 3642, 3643, 3656, 3652, 3653, 3657,
3676, 3677, 3687, 3690, 3693, 3694, 3697, 3702, 3703, 3722, 3729, 3730, 3731, 3737, 3744,
3747, 3751, 3757, 3758, 3761, 3769, 3781, 3796, 3798, 3797, 3798, 3812, 3848, 3849, 3850,
3854, 3861, 3859, 3863, 3867, 3870, 3882, 3884, 3876, 3879, 3909, 3908, 3910, 3909, 3908,
3886, 3888, 3889, 3891, 3920, 3921, 3922, 3913, 3955, 3956, 3974, 3975, 3976, 3988, 3992,
3999, 4009, 4010, 4019, 4023, 4024, 4035, 4045, 4044, 4046, 4072, 4074, 4093, 4101, 4105,
4111, 4141, 4146, 4147, 4160, 4162, 4161, 4162, 4156, 4158, 4160, 4168, 4169, 4168, 4169,
4192, 4191, 4222, 4239, 4227, 4228, 4226, 4228, 4227, 4228, 4221, 4222, 4223, 4248, 4249,
4258, 4249, 4257, 4260, 4258, 4257, 4259, 4260, 4267, 4284, 4285, 4286, 4283, 4280, 4281,
4280, 4279, 4282, 4289, 4290, 4295, 4300, 4289, 4290, 4291, 4300, 4310, 4314, 4319, 4320,
4321, 4324, 4330, 4326, 4340, 4326, 4327, 4332, 4333, 4334, 4339, 4361, 4369, 4370, 4374,
4372, 4382, 4398, 4399, 4395, 4396, 4397, 4400, 4404, 4401, 4405, 4415, 4427, 4433, 4446,
4447, 4455, 4456, 4457, 4458, 4467, 4471, 4463, 4470, 4460, 4458, 4459, 4457, 4468, 4470,
4448, 4449, 4453, 4474, 4470, 4475, 4474, 4473, 4472, 4474, 4476, 4478, 4479, 4483, 4484,
4485, 4481, 4483, 4492, 4505, 4513, 4529, 4530, 4531, 4503, 4504, 4505, 4499, 4500, 4516,
4517, 4513, 4497, 4493, 4494, 4524, 4537, 4543, 4544, 4551, 4552, 4555, 4565, 4567, 4566,
4567, 4566, 4590, 4591, 4596, 4597, 4588, 4572, 4575, 4572, 4574, 4575, 4576, 4584, 4572,
4578, 4579, 4593, 4600, 4604, 4584, 4586, 4583, 4599, 4601, 4603, 4625, 4624, 4625, 4627,
4623, 4624, 4620, 4618, 4617, 4616, 4617, 4612, 4623, 4634, 4635, 4623, 4628, 4629, 4631,
4633, 4654, 4659, 4669, 4674, 4677, 4678, 4694, 4699, 4701, 4713, 4714, 4722, 4724, 4725,
4761, 4785, 4791, 4783, 4790, 4791, 4792, 4809, 4835, 4840, 4845, 4852, 4851, 4852, 4846,
4848, 4849, 4854, 4856, 4862, 4856, 4875, 4876, 4878, 4892, 4893, 4903, 4891, 4909, 4923,
4938, 4939, 4944, 4953, 4954, 4975, 4977, 4978, 4984, 4987, 4989, 4987, 4995, 4994, 4996,
5000, 5008, 5018, 5024, 5027, 5028, 5031, 5035, 5051, 5059, 5061, 5065, 5069, 5090, 5110,
5111, 5113, 5115, 5129, 5128, 5139, 5140, 5141, 5142, 5149, 5148, 5147, 5153, 5157, 5178,
5189, 5209, 5217, 5211, 5215, 5221, 5243, 5244, 5275, 5287, 5290, 5300, 5301, 5297, 5300,
5289, 5311, 5314, 5303, 5304, 5306, 5307, 5316, 5329, 5330, 5333, 5334, 5335, 5343, 5359,
5380, 5382, 5383, 5392, 5403, 5409, 5410, 5420, 5427, 5438, 5448, 5455, 5458, 5489, 5490,
5491, 5492, 5494, 5497, 5522, 5523, 5525, 5536, 5537, 5542, 5522, 5529, 5534, 5535, 5545,
5546, 5544, 5538, 5547, 5551, 5550, 5551, 5552, 5553, 5565, 5578, 5589, 5590, 5589, 5592,
5601, 5614, 5615, 5614, 5615, 5616, 5626, 5627, 5629, 5647, 5649, 5650, 5645, 5646, 5647,
5672, 5690, 5692, 5697, 5701, 5697, 5702, 5705, 5718, 5720, 5725, 5747, 5749, 5751, 5756,
5762, 5764, 5771, 5772, 5771, 5772, 5809, 5822, 5823, 5840, 5841, 5839, 5838, 5839, 5840,
5838, 5852, 5853, 5859, 5865, 5879, 5885, 5899, 5907, 5910, 5936, 5943, 5957, 5953, 5958,
5968, 5979, 5971, 5966, 5967, 5971, 5991, 5993, 5994, 5993, 5996, 5993, 6004, 6010, 6017,
6018, 6017, 6038, 6036, 6035, 6036, 6039, 6042, 6037, 6059, 6062, 6067, 6068, 6084, 6077,
6080, 6081, 6079, 6093, 6095, 6098, 6111, 6107, 6114, 6105, 6104, 6111, 6112, 6120, 6126,
6140, 6143, 6142, 6148, 6152, 6159, 6150, 6152, 6168, 6169, 6167, 6161, 6160, 6159, 6160,
6168, 6169, 6188, 6200, 6220, 6221, 6219, 6221, 6220, 6232, 6233, 6234, 6242, 6247, 6246,
6247, 6253, 6258, 6256, 6249, 6228, 6225, 6223, 6227, 6228, 6259, 6265, 6268, 6270, 6271,
6275, 6270, 6263, 6265, 6279, 6323, 6365, 6374, 6397, 6415, 6416, 6419, 6403, 6405, 6410,
6417, 6424, 6425, 6422, 6432, 6442, 6451, 6459, 6458, 6463, 6485, 6486, 6509, 6508, 6532,
6535, 6551, 6562, 6563, 6572, 6571, 6590, 6628, 6629, 6627, 6625, 6620, 6619, 6625, 6616,
6615, 6646, 6648, 6649, 6651, 6653, 6645, 6651, 6652, 6665, 6660, 6664, 6665, 6684, 6685,
6694, 6695, 6696, 6693, 6694, 6703, 6702, 6696, 6693, 6694, 6691, 6725, 6728, 6715, 6722,
6728, 6729, 6730, 6742, 6753, 6754, 6752, 6753, 6761, 6771, 6773, 6778, 6779, 6795, 6819,
6797, 6799, 6798, 6807, 6809, 6837, 6861, 6864, 6865, 6868, 6869, 6867, 6861, 6871, 6874,
6872, 6873, 6875, 6867, 6877, 6876, 6898, 6900, 6902, 6904, 6905, 6915, 6908, 6927, 6928,
6935, 6942, 6943, 6945, 6948, 6943, 6947, 6990, 6993, 7013, 7014, 6997, 6976, 6972, 6975,
6976, 6982, 6991, 7006, 7009, 7015, 7019, 7018, 7008, 7011, 7018, 7023, 7027, 7044, 7054,
7060, 7058, 7057, 7061, 7038, 7036, 7040, 7041, 7039, 7042, 7039, 7037, 7049, 7056, 7057,
7072, 7095, 7101, 7102, 7104, 7103, 7106, 7107, 7108, 7112, 7117, 7119, 7117, 7132, 7130,
7137, 7143, 7135, 7138, 7152, 7156, 7180, 7181, 7182, 7179, 7180, 7174, 7172, 7162, 7177,
7180, 7187, 7153, 7159, 7145, 7139, 7140, 7137, 7156, 7160, 7161, 7162, 7163, 7170, 7163,
7171, 7167, 7166, 7167, 7169, 7170, 7178, 7179, 7166, 7177, 7176, 7203, 7194, 7187, 7191,
7202, 7198, 7199, 7205, 7206, 7207, 7208, 7214, 7215, 7214, 7215, 7217, 7208, 7205, 7214,
7215, 7216, 7212, 7214, 7222, 7226, 7231, 7229, 7239, 7238, 7239, 7250, 7252, 7255, 7277,
7279, 7284, 7290, 7293, 7294, 7316, 7325, 7326, 7327, 7328, 7326, 7325, 7329, 7335, 7340,
7344, 7337, 7338, 7348, 7366, 7376, 7377, 7382, 7387, 7401, 7392, 7396, 7400, 7406, 7405,
7410, 7425, 7426, 7432, 7449, 7450, 7444, 7450, 7455, 7456, 7471, 7472, 7494, 7514, 7515,
7512, 7515, 7517, 7514, 7538, 7539, 7540, 7556, 7557, 7553, 7549, 7546, 7566, 7567, 7564,
7565, 7559, 7560, 7563, 7571, 7570, 7572, 7580, 7587, 7595, 7597, 7614, 7622, 7621, 7627,
7656, 7652, 7659, 7658, 7657, 7660, 7665, 7664, 7666, 7669, 7678, 7680, 7682, 7683, 7669,
7692, 7695, 7699, 7705, 7717, 7720, 7730, 7733, 7748, 7759, 7747, 7748, 7759, 7778, 7777,
7778, 7780, 7781, 7780, 7788, 7787, 7788, 7814, 7810, 7787, 7788, 7802, 7803, 7813, 7815,
7818, 7823, 7830, 7852, 7868, 7874, 7879, 7873, 7854, 7871, 7877, 7876, 7883, 7885, 7887,
7886, 7889, 7893, 7900, 7909, 7910, 7906, 7910, 7913, 7918, 7919, 7941, 7947, 7948, 7949,
7951, 7952, 7977, 7976, 7979, 8010, 8011, 8014, 8015, 8031, 8054, 8056, 8059, 8085, 8087,
8107, 8109, 8115, 8116, 8117, 8115, 8116, 8119, 8127, 8145, 8147}
for i := 0; i < b.N; i++ {
countThreeLarger(depths)
}
}
|
<gh_stars>1-10
// lib文件
// 包含书籍中所依赖的库 以及自定义的全局函数
/* lib/cuon-utils.js */
/* lib/webgl-utils.js */
/* lib/webgl-debug.js */
/* cuon-matrix.js */
function ready (cb) {
document.addEventListener('DOMContentLoaded', cb);
}
/* lib/cuon-utils.js */
// cuon-utils.js (c) 2012 kanda and matsuda
/**
* Create a program object and make current
* @param gl GL context
* @param vshader a vertex shader program (string)
* @param fshader a fragment shader program (string)
* @return true, if the program object was created and successfully made current
*/
function initShaders(gl, vshader, fshader) {
var program = createProgram(gl, vshader, fshader);
if (!program) {
console.log('Failed to create program');
return false;
}
gl.useProgram(program);
gl.program = program;
return true;
}
/**
* Create the linked program object
* @param gl GL context
* @param vshader a vertex shader program (string)
* @param fshader a fragment shader program (string)
* @return created program object, or null if the creation has failed
*/
function createProgram(gl, vshader, fshader) {
// Create shader object
var vertexShader = loadShader(gl, gl.VERTEX_SHADER, vshader);
var fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fshader);
if (!vertexShader || !fragmentShader) {
return null;
}
// Create a program object
var program = gl.createProgram();
if (!program) {
return null;
}
// Attach the shader objects
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
// Link the program object
gl.linkProgram(program);
// Check the result of linking
var linked = gl.getProgramParameter(program, gl.LINK_STATUS);
if (!linked) {
var error = gl.getProgramInfoLog(program);
console.log('Failed to link program: ' + error);
gl.deleteProgram(program);
gl.deleteShader(fragmentShader);
gl.deleteShader(vertexShader);
return null;
}
return program;
}
/**
* Create a shader object
* @param gl GL context
* @param type the type of the shader object to be created
* @param source shader program (string)
* @return created shader object, or null if the creation has failed.
*/
function loadShader(gl, type, source) {
// Create shader object
var shader = gl.createShader(type);
if (shader == null) {
console.log('unable to create shader');
return null;
}
// Set the shader program
gl.shaderSource(shader, source);
// Compile the shader
gl.compileShader(shader);
// Check the result of compilation
var compiled = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (!compiled) {
var error = gl.getShaderInfoLog(shader);
console.log('Failed to compile shader: ' + error);
gl.deleteShader(shader);
return null;
}
return shader;
}
/**
* Initialize and get the rendering for WebGL
* @param canvas <cavnas> element
* @param opt_debug flag to initialize the context for debugging
* @return the rendering context for WebGL
*/
function getWebGLContext(canvas, opt_debug) {
// Get the rendering context for WebGL
var gl = WebGLUtils.setupWebGL(canvas);
if (!gl) return null;
// if opt_debug is explicitly false, create the context for debugging
if (arguments.length < 2 || opt_debug) {
gl = WebGLDebugUtils.makeDebugContext(gl);
}
return gl;
}
/* lib/webgl-utils.js */
/*
* Copyright 2010, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @fileoverview This file contains functions every webgl program will need
* a version of one way or another.
*
* Instead of setting up a context manually it is recommended to
* use. This will check for success or failure. On failure it
* will attempt to present an approriate message to the user.
*
* gl = WebGLUtils.setupWebGL(canvas);
*
* For animated WebGL apps use of setTimeout or setInterval are
* discouraged. It is recommended you structure your rendering
* loop like this.
*
* function render() {
* window.requestAnimationFrame(render, canvas);
*
* // do rendering
* ...
* }
* render();
*
* This will call your rendering function up to the refresh rate
* of your display but will stop rendering if your app is not
* visible.
*/
WebGLUtils = function() {
/**
* Creates the HTLM for a failure message
* @param {string} canvasContainerId id of container of th
* canvas.
* @return {string} The html.
*/
var makeFailHTML = function(msg) {
return '' +
'<div style="margin: auto; width:500px;z-index:10000;margin-top:20em;text-align:center;">' + msg + '</div>';
return '' +
'<table style="background-color: #8CE; width: 100%; height: 100%;"><tr>' +
'<td align="center">' +
'<div style="display: table-cell; vertical-align: middle;">' +
'<div style="">' + msg + '</div>' +
'</div>' +
'</td></tr></table>';
};
/**
* Mesasge for getting a webgl browser
* @type {string}
*/
var GET_A_WEBGL_BROWSER = '' +
'This page requires a browser that supports WebGL.<br/>' +
'<a href="http://get.webgl.org">Click here to upgrade your browser.</a>';
/**
* Mesasge for need better hardware
* @type {string}
*/
var OTHER_PROBLEM = '' +
"It doesn't appear your computer can support WebGL.<br/>" +
'<a href="http://get.webgl.org">Click here for more information.</a>';
/**
* Creates a webgl context. If creation fails it will
* change the contents of the container of the <canvas>
* tag to an error message with the correct links for WebGL.
* @param {Element} canvas. The canvas element to create a
* context from.
* @param {WebGLContextCreationAttirbutes} opt_attribs Any
* creation attributes you want to pass in.
* @param {function:(msg)} opt_onError An function to call
* if there is an error during creation.
* @return {WebGLRenderingContext} The created context.
*/
var setupWebGL = function(canvas, opt_attribs, opt_onError) {
function handleCreationError(msg) {
var container = document.getElementsByTagName("body")[0];
//var container = canvas.parentNode;
if (container) {
var str = window.WebGLRenderingContext ?
OTHER_PROBLEM :
GET_A_WEBGL_BROWSER;
if (msg) {
str += "<br/><br/>Status: " + msg;
}
container.innerHTML = makeFailHTML(str);
}
};
opt_onError = opt_onError || handleCreationError;
if (canvas.addEventListener) {
canvas.addEventListener("webglcontextcreationerror", function(event) {
opt_onError(event.statusMessage);
}, false);
}
var context = create3DContext(canvas, opt_attribs);
if (!context) {
if (!window.WebGLRenderingContext) {
opt_onError("");
} else {
opt_onError("");
}
}
return context;
};
/**
* Creates a webgl context.
* @param {!Canvas} canvas The canvas tag to get context
* from. If one is not passed in one will be created.
* @return {!WebGLContext} The created context.
*/
var create3DContext = function(canvas, opt_attribs) {
var names = ["webgl", "experimental-webgl", "webkit-3d", "moz-webgl"];
var context = null;
for (var ii = 0; ii < names.length; ++ii) {
try {
context = canvas.getContext(names[ii], opt_attribs);
} catch(e) {}
if (context) {
break;
}
}
return context;
}
return {
create3DContext: create3DContext,
setupWebGL: setupWebGL
};
}();
/**
* Provides requestAnimationFrame in a cross browser
* way.
*/
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = (function() {
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(/* function FrameRequestCallback */ callback, /* DOMElement Element */ element) {
window.setTimeout(callback, 1000/60);
};
})();
}
/** * ERRATA: 'cancelRequestAnimationFrame' renamed to 'cancelAnimationFrame' to reflect an update to the W3C Animation-Timing Spec.
*
* Cancels an animation frame request.
* Checks for cross-browser support, falls back to clearTimeout.
* @param {number} Animation frame request. */
if (!window.cancelAnimationFrame) {
window.cancelAnimationFrame = (window.cancelRequestAnimationFrame ||
window.webkitCancelAnimationFrame || window.webkitCancelRequestAnimationFrame ||
window.mozCancelAnimationFrame || window.mozCancelRequestAnimationFrame ||
window.msCancelAnimationFrame || window.msCancelRequestAnimationFrame ||
window.oCancelAnimationFrame || window.oCancelRequestAnimationFrame ||
window.clearTimeout);
}
/* lib/webgl-debug.js */
//Copyright (c) 2009 The Chromium Authors. All rights reserved.
//Use of this source code is governed by a BSD-style license that can be
//found in the LICENSE file.
// Various functions for helping debug WebGL apps.
WebGLDebugUtils = function() {
/**
* Wrapped logging function.
* @param {string} msg Message to log.
*/
var log = function(msg) {
if (window.console && window.console.log) {
window.console.log(msg);
}
};
/**
* Which arguements are enums.
* @type {!Object.<number, string>}
*/
var glValidEnumContexts = {
// Generic setters and getters
'enable': { 0:true },
'disable': { 0:true },
'getParameter': { 0:true },
// Rendering
'drawArrays': { 0:true },
'drawElements': { 0:true, 2:true },
// Shaders
'createShader': { 0:true },
'getShaderParameter': { 1:true },
'getProgramParameter': { 1:true },
// Vertex attributes
'getVertexAttrib': { 1:true },
'vertexAttribPointer': { 2:true },
// Textures
'bindTexture': { 0:true },
'activeTexture': { 0:true },
'getTexParameter': { 0:true, 1:true },
'texParameterf': { 0:true, 1:true },
'texParameteri': { 0:true, 1:true, 2:true },
'texImage2D': { 0:true, 2:true, 6:true, 7:true },
'texSubImage2D': { 0:true, 6:true, 7:true },
'copyTexImage2D': { 0:true, 2:true },
'copyTexSubImage2D': { 0:true },
'generateMipmap': { 0:true },
// Buffer objects
'bindBuffer': { 0:true },
'bufferData': { 0:true, 2:true },
'bufferSubData': { 0:true },
'getBufferParameter': { 0:true, 1:true },
// Renderbuffers and framebuffers
'pixelStorei': { 0:true, 1:true },
'readPixels': { 4:true, 5:true },
'bindRenderbuffer': { 0:true },
'bindFramebuffer': { 0:true },
'checkFramebufferStatus': { 0:true },
'framebufferRenderbuffer': { 0:true, 1:true, 2:true },
'framebufferTexture2D': { 0:true, 1:true, 2:true },
'getFramebufferAttachmentParameter': { 0:true, 1:true, 2:true },
'getRenderbufferParameter': { 0:true, 1:true },
'renderbufferStorage': { 0:true, 1:true },
// Frame buffer operations (clear, blend, depth test, stencil)
'clear': { 0:true },
'depthFunc': { 0:true },
'blendFunc': { 0:true, 1:true },
'blendFuncSeparate': { 0:true, 1:true, 2:true, 3:true },
'blendEquation': { 0:true },
'blendEquationSeparate': { 0:true, 1:true },
'stencilFunc': { 0:true },
'stencilFuncSeparate': { 0:true, 1:true },
'stencilMaskSeparate': { 0:true },
'stencilOp': { 0:true, 1:true, 2:true },
'stencilOpSeparate': { 0:true, 1:true, 2:true, 3:true },
// Culling
'cullFace': { 0:true },
'frontFace': { 0:true },
};
/**
* Map of numbers to names.
* @type {Object}
*/
var glEnums = null;
/**
* Initializes this module. Safe to call more than once.
* @param {!WebGLRenderingContext} ctx A WebGL context. If
* you have more than one context it doesn't matter which one
* you pass in, it is only used to pull out constants.
*/
function init(ctx) {
if (glEnums == null) {
glEnums = { };
for (var propertyName in ctx) {
if (typeof ctx[propertyName] == 'number') {
glEnums[ctx[propertyName]] = propertyName;
}
}
}
}
/**
* Checks the utils have been initialized.
*/
function checkInit() {
if (glEnums == null) {
throw 'WebGLDebugUtils.init(ctx) not called';
}
}
/**
* Returns true or false if value matches any WebGL enum
* @param {*} value Value to check if it might be an enum.
* @return {boolean} True if value matches one of the WebGL defined enums
*/
function mightBeEnum(value) {
checkInit();
return (glEnums[value] !== undefined);
}
/**
* Gets an string version of an WebGL enum.
*
* Example:
* var str = WebGLDebugUtil.glEnumToString(ctx.getError());
*
* @param {number} value Value to return an enum for
* @return {string} The string version of the enum.
*/
function glEnumToString(value) {
checkInit();
var name = glEnums[value];
return (name !== undefined) ? name :
("*UNKNOWN WebGL ENUM (0x" + value.toString(16) + ")");
}
/**
* Returns the string version of a WebGL argument.
* Attempts to convert enum arguments to strings.
* @param {string} functionName the name of the WebGL function.
* @param {number} argumentIndx the index of the argument.
* @param {*} value The value of the argument.
* @return {string} The value as a string.
*/
function glFunctionArgToString(functionName, argumentIndex, value) {
var funcInfo = glValidEnumContexts[functionName];
if (funcInfo !== undefined) {
if (funcInfo[argumentIndex]) {
return glEnumToString(value);
}
}
return value.toString();
}
/**
* Given a WebGL context returns a wrapped context that calls
* gl.getError after every command and calls a function if the
* result is not gl.NO_ERROR.
*
* @param {!WebGLRenderingContext} ctx The webgl context to
* wrap.
* @param {!function(err, funcName, args): void} opt_onErrorFunc
* The function to call when gl.getError returns an
* error. If not specified the default function calls
* console.log with a message.
*/
function makeDebugContext(ctx, opt_onErrorFunc) {
init(ctx);
opt_onErrorFunc = opt_onErrorFunc || function(err, functionName, args) {
// apparently we can't do args.join(",");
var argStr = "";
for (var ii = 0; ii < args.length; ++ii) {
argStr += ((ii == 0) ? '' : ', ') +
glFunctionArgToString(functionName, ii, args[ii]);
}
log("WebGL error "+ glEnumToString(err) + " in "+ functionName +
"(" + argStr + ")");
};
// Holds booleans for each GL error so after we get the error ourselves
// we can still return it to the client app.
var glErrorShadow = { };
// Makes a function that calls a WebGL function and then calls getError.
function makeErrorWrapper(ctx, functionName) {
return function() {
var result = ctx[functionName].apply(ctx, arguments);
var err = ctx.getError();
if (err != 0) {
glErrorShadow[err] = true;
opt_onErrorFunc(err, functionName, arguments);
}
return result;
};
}
// Make a an object that has a copy of every property of the WebGL context
// but wraps all functions.
var wrapper = {};
for (var propertyName in ctx) {
if (typeof ctx[propertyName] == 'function') {
wrapper[propertyName] = makeErrorWrapper(ctx, propertyName);
} else {
wrapper[propertyName] = ctx[propertyName];
}
}
// Override the getError function with one that returns our saved results.
wrapper.getError = function() {
for (var err in glErrorShadow) {
if (glErrorShadow[err]) {
glErrorShadow[err] = false;
return err;
}
}
return ctx.NO_ERROR;
};
return wrapper;
}
function resetToInitialState(ctx) {
var numAttribs = ctx.getParameter(ctx.MAX_VERTEX_ATTRIBS);
var tmp = ctx.createBuffer();
ctx.bindBuffer(ctx.ARRAY_BUFFER, tmp);
for (var ii = 0; ii < numAttribs; ++ii) {
ctx.disableVertexAttribArray(ii);
ctx.vertexAttribPointer(ii, 4, ctx.FLOAT, false, 0, 0);
ctx.vertexAttrib1f(ii, 0);
}
ctx.deleteBuffer(tmp);
var numTextureUnits = ctx.getParameter(ctx.MAX_TEXTURE_IMAGE_UNITS);
for (var ii = 0; ii < numTextureUnits; ++ii) {
ctx.activeTexture(ctx.TEXTURE0 + ii);
ctx.bindTexture(ctx.TEXTURE_CUBE_MAP, null);
ctx.bindTexture(ctx.TEXTURE_2D, null);
}
ctx.activeTexture(ctx.TEXTURE0);
ctx.useProgram(null);
ctx.bindBuffer(ctx.ARRAY_BUFFER, null);
ctx.bindBuffer(ctx.ELEMENT_ARRAY_BUFFER, null);
ctx.bindFramebuffer(ctx.FRAMEBUFFER, null);
ctx.bindRenderbuffer(ctx.RENDERBUFFER, null);
ctx.disable(ctx.BLEND);
ctx.disable(ctx.CULL_FACE);
ctx.disable(ctx.DEPTH_TEST);
ctx.disable(ctx.DITHER);
ctx.disable(ctx.SCISSOR_TEST);
ctx.blendColor(0, 0, 0, 0);
ctx.blendEquation(ctx.FUNC_ADD);
ctx.blendFunc(ctx.ONE, ctx.ZERO);
ctx.clearColor(0, 0, 0, 0);
ctx.clearDepth(1);
ctx.clearStencil(-1);
ctx.colorMask(true, true, true, true);
ctx.cullFace(ctx.BACK);
ctx.depthFunc(ctx.LESS);
ctx.depthMask(true);
ctx.depthRange(0, 1);
ctx.frontFace(ctx.CCW);
ctx.hint(ctx.GENERATE_MIPMAP_HINT, ctx.DONT_CARE);
ctx.lineWidth(1);
ctx.pixelStorei(ctx.PACK_ALIGNMENT, 4);
ctx.pixelStorei(ctx.UNPACK_ALIGNMENT, 4);
ctx.pixelStorei(ctx.UNPACK_FLIP_Y_WEBGL, false);
ctx.pixelStorei(ctx.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
// TODO: Delete this IF.
if (ctx.UNPACK_COLORSPACE_CONVERSION_WEBGL) {
ctx.pixelStorei(ctx.UNPACK_COLORSPACE_CONVERSION_WEBGL, ctx.BROWSER_DEFAULT_WEBGL);
}
ctx.polygonOffset(0, 0);
ctx.sampleCoverage(1, false);
ctx.scissor(0, 0, ctx.canvas.width, ctx.canvas.height);
ctx.stencilFunc(ctx.ALWAYS, 0, 0xFFFFFFFF);
ctx.stencilMask(0xFFFFFFFF);
ctx.stencilOp(ctx.KEEP, ctx.KEEP, ctx.KEEP);
ctx.viewport(0, 0, ctx.canvas.clientWidth, ctx.canvas.clientHeight);
ctx.clear(ctx.COLOR_BUFFER_BIT | ctx.DEPTH_BUFFER_BIT | ctx.STENCIL_BUFFER_BIT);
// TODO: This should NOT be needed but Firefox fails with 'hint'
while(ctx.getError());
}
function makeLostContextSimulatingContext(ctx) {
var wrapper_ = {};
var contextId_ = 1;
var contextLost_ = false;
var resourceId_ = 0;
var resourceDb_ = [];
var onLost_ = undefined;
var onRestored_ = undefined;
var nextOnRestored_ = undefined;
// Holds booleans for each GL error so can simulate errors.
var glErrorShadow_ = { };
function isWebGLObject(obj) {
//return false;
return (obj instanceof WebGLBuffer ||
obj instanceof WebGLFramebuffer ||
obj instanceof WebGLProgram ||
obj instanceof WebGLRenderbuffer ||
obj instanceof WebGLShader ||
obj instanceof WebGLTexture);
}
function checkResources(args) {
for (var ii = 0; ii < args.length; ++ii) {
var arg = args[ii];
if (isWebGLObject(arg)) {
return arg.__webglDebugContextLostId__ == contextId_;
}
}
return true;
}
function clearErrors() {
var k = Object.keys(glErrorShadow_);
for (var ii = 0; ii < k.length; ++ii) {
delete glErrorShdow_[k];
}
}
// Makes a function that simulates WebGL when out of context.
function makeLostContextWrapper(ctx, functionName) {
var f = ctx[functionName];
return function() {
// Only call the functions if the context is not lost.
if (!contextLost_) {
if (!checkResources(arguments)) {
glErrorShadow_[ctx.INVALID_OPERATION] = true;
return;
}
var result = f.apply(ctx, arguments);
return result;
}
};
}
for (var propertyName in ctx) {
if (typeof ctx[propertyName] == 'function') {
wrapper_[propertyName] = makeLostContextWrapper(ctx, propertyName);
} else {
wrapper_[propertyName] = ctx[propertyName];
}
}
function makeWebGLContextEvent(statusMessage) {
return {statusMessage: statusMessage};
}
function freeResources() {
for (var ii = 0; ii < resourceDb_.length; ++ii) {
var resource = resourceDb_[ii];
if (resource instanceof WebGLBuffer) {
ctx.deleteBuffer(resource);
} else if (resource instanceof WebctxFramebuffer) {
ctx.deleteFramebuffer(resource);
} else if (resource instanceof WebctxProgram) {
ctx.deleteProgram(resource);
} else if (resource instanceof WebctxRenderbuffer) {
ctx.deleteRenderbuffer(resource);
} else if (resource instanceof WebctxShader) {
ctx.deleteShader(resource);
} else if (resource instanceof WebctxTexture) {
ctx.deleteTexture(resource);
}
}
}
wrapper_.loseContext = function() {
if (!contextLost_) {
contextLost_ = true;
++contextId_;
while (ctx.getError());
clearErrors();
glErrorShadow_[ctx.CONTEXT_LOST_WEBGL] = true;
setTimeout(function() {
if (onLost_) {
onLost_(makeWebGLContextEvent("context lost"));
}
}, 0);
}
};
wrapper_.restoreContext = function() {
if (contextLost_) {
if (onRestored_) {
setTimeout(function() {
freeResources();
resetToInitialState(ctx);
contextLost_ = false;
if (onRestored_) {
var callback = onRestored_;
onRestored_ = nextOnRestored_;
nextOnRestored_ = undefined;
callback(makeWebGLContextEvent("context restored"));
}
}, 0);
} else {
throw "You can not restore the context without a listener"
}
}
};
// Wrap a few functions specially.
wrapper_.getError = function() {
if (!contextLost_) {
var err;
while (err = ctx.getError()) {
glErrorShadow_[err] = true;
}
}
for (var err in glErrorShadow_) {
if (glErrorShadow_[err]) {
delete glErrorShadow_[err];
return err;
}
}
return ctx.NO_ERROR;
};
var creationFunctions = [
"createBuffer",
"createFramebuffer",
"createProgram",
"createRenderbuffer",
"createShader",
"createTexture"
];
for (var ii = 0; ii < creationFunctions.length; ++ii) {
var functionName = creationFunctions[ii];
wrapper_[functionName] = function(f) {
return function() {
if (contextLost_) {
return null;
}
var obj = f.apply(ctx, arguments);
obj.__webglDebugContextLostId__ = contextId_;
resourceDb_.push(obj);
return obj;
};
}(ctx[functionName]);
}
var functionsThatShouldReturnNull = [
"getActiveAttrib",
"getActiveUniform",
"getBufferParameter",
"getContextAttributes",
"getAttachedShaders",
"getFramebufferAttachmentParameter",
"getParameter",
"getProgramParameter",
"getProgramInfoLog",
"getRenderbufferParameter",
"getShaderParameter",
"getShaderInfoLog",
"getShaderSource",
"getTexParameter",
"getUniform",
"getUniformLocation",
"getVertexAttrib"
];
for (var ii = 0; ii < functionsThatShouldReturnNull.length; ++ii) {
var functionName = functionsThatShouldReturnNull[ii];
wrapper_[functionName] = function(f) {
return function() {
if (contextLost_) {
return null;
}
return f.apply(ctx, arguments);
}
}(wrapper_[functionName]);
}
var isFunctions = [
"isBuffer",
"isEnabled",
"isFramebuffer",
"isProgram",
"isRenderbuffer",
"isShader",
"isTexture"
];
for (var ii = 0; ii < isFunctions.length; ++ii) {
var functionName = isFunctions[ii];
wrapper_[functionName] = function(f) {
return function() {
if (contextLost_) {
return false;
}
return f.apply(ctx, arguments);
}
}(wrapper_[functionName]);
}
wrapper_.checkFramebufferStatus = function(f) {
return function() {
if (contextLost_) {
return ctx.FRAMEBUFFER_UNSUPPORTED;
}
return f.apply(ctx, arguments);
};
}(wrapper_.checkFramebufferStatus);
wrapper_.getAttribLocation = function(f) {
return function() {
if (contextLost_) {
return -1;
}
return f.apply(ctx, arguments);
};
}(wrapper_.getAttribLocation);
wrapper_.getVertexAttribOffset = function(f) {
return function() {
if (contextLost_) {
return 0;
}
return f.apply(ctx, arguments);
};
}(wrapper_.getVertexAttribOffset);
wrapper_.isContextLost = function() {
return contextLost_;
};
function wrapEvent(listener) {
if (typeof(listener) == "function") {
return listener;
} else {
return function(info) {
listener.handleEvent(info);
}
}
}
wrapper_.registerOnContextLostListener = function(listener) {
onLost_ = wrapEvent(listener);
};
wrapper_.registerOnContextRestoredListener = function(listener) {
if (contextLost_) {
nextOnRestored_ = wrapEvent(listener);
} else {
onRestored_ = wrapEvent(listener);
}
}
return wrapper_;
}
return {
/**
* Initializes this module. Safe to call more than once.
* @param {!WebGLRenderingContext} ctx A WebGL context. If
* you have more than one context it doesn't matter which one
* you pass in, it is only used to pull out constants.
*/
'init': init,
/**
* Returns true or false if value matches any WebGL enum
* @param {*} value Value to check if it might be an enum.
* @return {boolean} True if value matches one of the WebGL defined enums
*/
'mightBeEnum': mightBeEnum,
/**
* Gets an string version of an WebGL enum.
*
* Example:
* WebGLDebugUtil.init(ctx);
* var str = WebGLDebugUtil.glEnumToString(ctx.getError());
*
* @param {number} value Value to return an enum for
* @return {string} The string version of the enum.
*/
'glEnumToString': glEnumToString,
/**
* Converts the argument of a WebGL function to a string.
* Attempts to convert enum arguments to strings.
*
* Example:
* WebGLDebugUtil.init(ctx);
* var str = WebGLDebugUtil.glFunctionArgToString('bindTexture', 0, gl.TEXTURE_2D);
*
* would return 'TEXTURE_2D'
*
* @param {string} functionName the name of the WebGL function.
* @param {number} argumentIndx the index of the argument.
* @param {*} value The value of the argument.
* @return {string} The value as a string.
*/
'glFunctionArgToString': glFunctionArgToString,
/**
* Given a WebGL context returns a wrapped context that calls
* gl.getError after every command and calls a function if the
* result is not NO_ERROR.
*
* You can supply your own function if you want. For example, if you'd like
* an exception thrown on any GL error you could do this
*
* function throwOnGLError(err, funcName, args) {
* throw WebGLDebugUtils.glEnumToString(err) + " was caused by call to" +
* funcName;
* };
*
* ctx = WebGLDebugUtils.makeDebugContext(
* canvas.getContext("webgl"), throwOnGLError);
*
* @param {!WebGLRenderingContext} ctx The webgl context to wrap.
* @param {!function(err, funcName, args): void} opt_onErrorFunc The function
* to call when gl.getError returns an error. If not specified the default
* function calls console.log with a message.
*/
'makeDebugContext': makeDebugContext,
/**
* Given a WebGL context returns a wrapped context that adds 4
* functions.
*
* ctx.loseContext:
* simulates a lost context event.
*
* ctx.restoreContext:
* simulates the context being restored.
*
* ctx.registerOnContextLostListener(listener):
* lets you register a listener for context lost. Use instead
* of addEventListener('webglcontextlostevent', listener);
*
* ctx.registerOnContextRestoredListener(listener):
* lets you register a listener for context restored. Use
* instead of addEventListener('webglcontextrestored',
* listener);
*
* @param {!WebGLRenderingContext} ctx The webgl context to wrap.
*/
'makeLostContextSimulatingContext': makeLostContextSimulatingContext,
/**
* Resets a context to the initial state.
* @param {!WebGLRenderingContext} ctx The webgl context to
* reset.
*/
'resetToInitialState': resetToInitialState
};
}();
// cuon-matrix.js (c) 2012 kanda and matsuda
/**
* This is a class treating 4x4 matrix.
* This class contains the function that is equivalent to OpenGL matrix stack.
* The matrix after conversion is calculated by multiplying a conversion matrix from the right.
* The matrix is replaced by the calculated result.
*/
/**
* Constructor of Matrix4
* If opt_src is specified, new matrix is initialized by opt_src.
* Otherwise, new matrix is initialized by identity matrix.
* @param opt_src source matrix(option)
*/
var Matrix4 = function(opt_src) {
var i, s, d;
if (opt_src && typeof opt_src === 'object' && opt_src.hasOwnProperty('elements')) {
s = opt_src.elements;
d = new Float32Array(16);
for (i = 0; i < 16; ++i) {
d[i] = s[i];
}
this.elements = d;
} else {
this.elements = new Float32Array([1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1]);
}
};
/**
* Set the identity matrix.
* @return this
*/
Matrix4.prototype.setIdentity = function() {
var e = this.elements;
e[0] = 1; e[4] = 0; e[8] = 0; e[12] = 0;
e[1] = 0; e[5] = 1; e[9] = 0; e[13] = 0;
e[2] = 0; e[6] = 0; e[10] = 1; e[14] = 0;
e[3] = 0; e[7] = 0; e[11] = 0; e[15] = 1;
return this;
};
/**
* Copy matrix.
* @param src source matrix
* @return this
*/
Matrix4.prototype.set = function(src) {
var i, s, d;
s = src.elements;
d = this.elements;
if (s === d) {
return;
}
for (i = 0; i < 16; ++i) {
d[i] = s[i];
}
return this;
};
/**
* Multiply the matrix from the right.
* @param other The multiply matrix
* @return this
*/
Matrix4.prototype.concat = function(other) {
var i, e, a, b, ai0, ai1, ai2, ai3;
// Calculate e = a * b
e = this.elements;
a = this.elements;
b = other.elements;
// If e equals b, copy b to temporary matrix.
if (e === b) {
b = new Float32Array(16);
for (i = 0; i < 16; ++i) {
b[i] = e[i];
}
}
for (i = 0; i < 4; i++) {
ai0=a[i]; ai1=a[i+4]; ai2=a[i+8]; ai3=a[i+12];
e[i] = ai0 * b[0] + ai1 * b[1] + ai2 * b[2] + ai3 * b[3];
e[i+4] = ai0 * b[4] + ai1 * b[5] + ai2 * b[6] + ai3 * b[7];
e[i+8] = ai0 * b[8] + ai1 * b[9] + ai2 * b[10] + ai3 * b[11];
e[i+12] = ai0 * b[12] + ai1 * b[13] + ai2 * b[14] + ai3 * b[15];
}
return this;
};
Matrix4.prototype.multiply = Matrix4.prototype.concat;
/**
* Multiply the three-dimensional vector.
* @param pos The multiply vector
* @return The result of multiplication(Float32Array)
*/
Matrix4.prototype.multiplyVector3 = function(pos) {
var e = this.elements;
var p = pos.elements;
var v = new Vector3();
var result = v.elements;
result[0] = p[0] * e[0] + p[1] * e[4] + p[2] * e[ 8] + e[12];
result[1] = p[0] * e[1] + p[1] * e[5] + p[2] * e[ 9] + e[13];
result[2] = p[0] * e[2] + p[1] * e[6] + p[2] * e[10] + e[14];
return v;
};
/**
* Multiply the four-dimensional vector.
* @param pos The multiply vector
* @return The result of multiplication(Float32Array)
*/
Matrix4.prototype.multiplyVector4 = function(pos) {
var e = this.elements;
var p = pos.elements;
var v = new Vector4();
var result = v.elements;
result[0] = p[0] * e[0] + p[1] * e[4] + p[2] * e[ 8] + p[3] * e[12];
result[1] = p[0] * e[1] + p[1] * e[5] + p[2] * e[ 9] + p[3] * e[13];
result[2] = p[0] * e[2] + p[1] * e[6] + p[2] * e[10] + p[3] * e[14];
result[3] = p[0] * e[3] + p[1] * e[7] + p[2] * e[11] + p[3] * e[15];
return v;
};
/**
* Transpose the matrix.
* @return this
*/
Matrix4.prototype.transpose = function() {
var e, t;
e = this.elements;
t = e[ 1]; e[ 1] = e[ 4]; e[ 4] = t;
t = e[ 2]; e[ 2] = e[ 8]; e[ 8] = t;
t = e[ 3]; e[ 3] = e[12]; e[12] = t;
t = e[ 6]; e[ 6] = e[ 9]; e[ 9] = t;
t = e[ 7]; e[ 7] = e[13]; e[13] = t;
t = e[11]; e[11] = e[14]; e[14] = t;
return this;
};
/**
* Calculate the inverse matrix of specified matrix, and set to this.
* @param other The source matrix
* @return this
*/
Matrix4.prototype.setInverseOf = function(other) {
var i, s, d, inv, det;
s = other.elements;
d = this.elements;
inv = new Float32Array(16);
inv[0] = s[5]*s[10]*s[15] - s[5] *s[11]*s[14] - s[9] *s[6]*s[15]
+ s[9]*s[7] *s[14] + s[13]*s[6] *s[11] - s[13]*s[7]*s[10];
inv[4] = - s[4]*s[10]*s[15] + s[4] *s[11]*s[14] + s[8] *s[6]*s[15]
- s[8]*s[7] *s[14] - s[12]*s[6] *s[11] + s[12]*s[7]*s[10];
inv[8] = s[4]*s[9] *s[15] - s[4] *s[11]*s[13] - s[8] *s[5]*s[15]
+ s[8]*s[7] *s[13] + s[12]*s[5] *s[11] - s[12]*s[7]*s[9];
inv[12] = - s[4]*s[9] *s[14] + s[4] *s[10]*s[13] + s[8] *s[5]*s[14]
- s[8]*s[6] *s[13] - s[12]*s[5] *s[10] + s[12]*s[6]*s[9];
inv[1] = - s[1]*s[10]*s[15] + s[1] *s[11]*s[14] + s[9] *s[2]*s[15]
- s[9]*s[3] *s[14] - s[13]*s[2] *s[11] + s[13]*s[3]*s[10];
inv[5] = s[0]*s[10]*s[15] - s[0] *s[11]*s[14] - s[8] *s[2]*s[15]
+ s[8]*s[3] *s[14] + s[12]*s[2] *s[11] - s[12]*s[3]*s[10];
inv[9] = - s[0]*s[9] *s[15] + s[0] *s[11]*s[13] + s[8] *s[1]*s[15]
- s[8]*s[3] *s[13] - s[12]*s[1] *s[11] + s[12]*s[3]*s[9];
inv[13] = s[0]*s[9] *s[14] - s[0] *s[10]*s[13] - s[8] *s[1]*s[14]
+ s[8]*s[2] *s[13] + s[12]*s[1] *s[10] - s[12]*s[2]*s[9];
inv[2] = s[1]*s[6]*s[15] - s[1] *s[7]*s[14] - s[5] *s[2]*s[15]
+ s[5]*s[3]*s[14] + s[13]*s[2]*s[7] - s[13]*s[3]*s[6];
inv[6] = - s[0]*s[6]*s[15] + s[0] *s[7]*s[14] + s[4] *s[2]*s[15]
- s[4]*s[3]*s[14] - s[12]*s[2]*s[7] + s[12]*s[3]*s[6];
inv[10] = s[0]*s[5]*s[15] - s[0] *s[7]*s[13] - s[4] *s[1]*s[15]
+ s[4]*s[3]*s[13] + s[12]*s[1]*s[7] - s[12]*s[3]*s[5];
inv[14] = - s[0]*s[5]*s[14] + s[0] *s[6]*s[13] + s[4] *s[1]*s[14]
- s[4]*s[2]*s[13] - s[12]*s[1]*s[6] + s[12]*s[2]*s[5];
inv[3] = - s[1]*s[6]*s[11] + s[1]*s[7]*s[10] + s[5]*s[2]*s[11]
- s[5]*s[3]*s[10] - s[9]*s[2]*s[7] + s[9]*s[3]*s[6];
inv[7] = s[0]*s[6]*s[11] - s[0]*s[7]*s[10] - s[4]*s[2]*s[11]
+ s[4]*s[3]*s[10] + s[8]*s[2]*s[7] - s[8]*s[3]*s[6];
inv[11] = - s[0]*s[5]*s[11] + s[0]*s[7]*s[9] + s[4]*s[1]*s[11]
- s[4]*s[3]*s[9] - s[8]*s[1]*s[7] + s[8]*s[3]*s[5];
inv[15] = s[0]*s[5]*s[10] - s[0]*s[6]*s[9] - s[4]*s[1]*s[10]
+ s[4]*s[2]*s[9] + s[8]*s[1]*s[6] - s[8]*s[2]*s[5];
det = s[0]*inv[0] + s[1]*inv[4] + s[2]*inv[8] + s[3]*inv[12];
if (det === 0) {
return this;
}
det = 1 / det;
for (i = 0; i < 16; i++) {
d[i] = inv[i] * det;
}
return this;
};
/**
* Calculate the inverse matrix of this, and set to this.
* @return this
*/
Matrix4.prototype.invert = function() {
return this.setInverseOf(this);
};
/**
* Set the orthographic projection matrix.
* @param left The coordinate of the left of clipping plane.
* @param right The coordinate of the right of clipping plane.
* @param bottom The coordinate of the bottom of clipping plane.
* @param top The coordinate of the top top clipping plane.
* @param near The distances to the nearer depth clipping plane. This value is minus if the plane is to be behind the viewer.
* @param far The distances to the farther depth clipping plane. This value is minus if the plane is to be behind the viewer.
* @return this
*/
Matrix4.prototype.setOrtho = function(left, right, bottom, top, near, far) {
var e, rw, rh, rd;
if (left === right || bottom === top || near === far) {
throw 'null frustum';
}
rw = 1 / (right - left);
rh = 1 / (top - bottom);
rd = 1 / (far - near);
e = this.elements;
e[0] = 2 * rw;
e[1] = 0;
e[2] = 0;
e[3] = 0;
e[4] = 0;
e[5] = 2 * rh;
e[6] = 0;
e[7] = 0;
e[8] = 0;
e[9] = 0;
e[10] = -2 * rd;
e[11] = 0;
e[12] = -(right + left) * rw;
e[13] = -(top + bottom) * rh;
e[14] = -(far + near) * rd;
e[15] = 1;
return this;
};
/**
* Multiply the orthographic projection matrix from the right.
* @param left The coordinate of the left of clipping plane.
* @param right The coordinate of the right of clipping plane.
* @param bottom The coordinate of the bottom of clipping plane.
* @param top The coordinate of the top top clipping plane.
* @param near The distances to the nearer depth clipping plane. This value is minus if the plane is to be behind the viewer.
* @param far The distances to the farther depth clipping plane. This value is minus if the plane is to be behind the viewer.
* @return this
*/
Matrix4.prototype.ortho = function(left, right, bottom, top, near, far) {
return this.concat(new Matrix4().setOrtho(left, right, bottom, top, near, far));
};
/**
* Set the perspective projection matrix.
* @param left The coordinate of the left of clipping plane.
* @param right The coordinate of the right of clipping plane.
* @param bottom The coordinate of the bottom of clipping plane.
* @param top The coordinate of the top top clipping plane.
* @param near The distances to the nearer depth clipping plane. This value must be plus value.
* @param far The distances to the farther depth clipping plane. This value must be plus value.
* @return this
*/
Matrix4.prototype.setFrustum = function(left, right, bottom, top, near, far) {
var e, rw, rh, rd;
if (left === right || top === bottom || near === far) {
throw 'null frustum';
}
if (near <= 0) {
throw 'near <= 0';
}
if (far <= 0) {
throw 'far <= 0';
}
rw = 1 / (right - left);
rh = 1 / (top - bottom);
rd = 1 / (far - near);
e = this.elements;
e[ 0] = 2 * near * rw;
e[ 1] = 0;
e[ 2] = 0;
e[ 3] = 0;
e[ 4] = 0;
e[ 5] = 2 * near * rh;
e[ 6] = 0;
e[ 7] = 0;
e[ 8] = (right + left) * rw;
e[ 9] = (top + bottom) * rh;
e[10] = -(far + near) * rd;
e[11] = -1;
e[12] = 0;
e[13] = 0;
e[14] = -2 * near * far * rd;
e[15] = 0;
return this;
};
/**
* Multiply the perspective projection matrix from the right.
* @param left The coordinate of the left of clipping plane.
* @param right The coordinate of the right of clipping plane.
* @param bottom The coordinate of the bottom of clipping plane.
* @param top The coordinate of the top top clipping plane.
* @param near The distances to the nearer depth clipping plane. This value must be plus value.
* @param far The distances to the farther depth clipping plane. This value must be plus value.
* @return this
*/
Matrix4.prototype.frustum = function(left, right, bottom, top, near, far) {
return this.concat(new Matrix4().setFrustum(left, right, bottom, top, near, far));
};
/**
* Set the perspective projection matrix by fovy and aspect.
* @param fovy The angle between the upper and lower sides of the frustum.
* @param aspect The aspect ratio of the frustum. (width/height)
* @param near The distances to the nearer depth clipping plane. This value must be plus value.
* @param far The distances to the farther depth clipping plane. This value must be plus value.
* @return this
*/
Matrix4.prototype.setPerspective = function(fovy, aspect, near, far) {
var e, rd, s, ct;
if (near === far || aspect === 0) {
throw 'null frustum';
}
if (near <= 0) {
throw 'near <= 0';
}
if (far <= 0) {
throw 'far <= 0';
}
fovy = Math.PI * fovy / 180 / 2;
s = Math.sin(fovy);
if (s === 0) {
throw 'null frustum';
}
rd = 1 / (far - near);
ct = Math.cos(fovy) / s;
e = this.elements;
e[0] = ct / aspect;
e[1] = 0;
e[2] = 0;
e[3] = 0;
e[4] = 0;
e[5] = ct;
e[6] = 0;
e[7] = 0;
e[8] = 0;
e[9] = 0;
e[10] = -(far + near) * rd;
e[11] = -1;
e[12] = 0;
e[13] = 0;
e[14] = -2 * near * far * rd;
e[15] = 0;
return this;
};
/**
* Multiply the perspective projection matrix from the right.
* @param fovy The angle between the upper and lower sides of the frustum.
* @param aspect The aspect ratio of the frustum. (width/height)
* @param near The distances to the nearer depth clipping plane. This value must be plus value.
* @param far The distances to the farther depth clipping plane. This value must be plus value.
* @return this
*/
Matrix4.prototype.perspective = function(fovy, aspect, near, far) {
return this.concat(new Matrix4().setPerspective(fovy, aspect, near, far));
};
/**
* Set the matrix for scaling.
* @param x The scale factor along the X axis
* @param y The scale factor along the Y axis
* @param z The scale factor along the Z axis
* @return this
*/
Matrix4.prototype.setScale = function(x, y, z) {
var e = this.elements;
e[0] = x; e[4] = 0; e[8] = 0; e[12] = 0;
e[1] = 0; e[5] = y; e[9] = 0; e[13] = 0;
e[2] = 0; e[6] = 0; e[10] = z; e[14] = 0;
e[3] = 0; e[7] = 0; e[11] = 0; e[15] = 1;
return this;
};
/**
* Multiply the matrix for scaling from the right.
* @param x The scale factor along the X axis
* @param y The scale factor along the Y axis
* @param z The scale factor along the Z axis
* @return this
*/
Matrix4.prototype.scale = function(x, y, z) {
var e = this.elements;
e[0] *= x; e[4] *= y; e[8] *= z;
e[1] *= x; e[5] *= y; e[9] *= z;
e[2] *= x; e[6] *= y; e[10] *= z;
e[3] *= x; e[7] *= y; e[11] *= z;
return this;
};
/**
* Set the matrix for translation.
* @param x The X value of a translation.
* @param y The Y value of a translation.
* @param z The Z value of a translation.
* @return this
*/
Matrix4.prototype.setTranslate = function(x, y, z) {
var e = this.elements;
e[0] = 1; e[4] = 0; e[8] = 0; e[12] = x;
e[1] = 0; e[5] = 1; e[9] = 0; e[13] = y;
e[2] = 0; e[6] = 0; e[10] = 1; e[14] = z;
e[3] = 0; e[7] = 0; e[11] = 0; e[15] = 1;
return this;
};
/**
* Multiply the matrix for translation from the right.
* @param x The X value of a translation.
* @param y The Y value of a translation.
* @param z The Z value of a translation.
* @return this
*/
Matrix4.prototype.translate = function(x, y, z) {
var e = this.elements;
e[12] += e[0] * x + e[4] * y + e[8] * z;
e[13] += e[1] * x + e[5] * y + e[9] * z;
e[14] += e[2] * x + e[6] * y + e[10] * z;
e[15] += e[3] * x + e[7] * y + e[11] * z;
return this;
};
/**
* Set the matrix for rotation.
* The vector of rotation axis may not be normalized.
* @param angle The angle of rotation (degrees)
* @param x The X coordinate of vector of rotation axis.
* @param y The Y coordinate of vector of rotation axis.
* @param z The Z coordinate of vector of rotation axis.
* @return this
*/
Matrix4.prototype.setRotate = function(angle, x, y, z) {
var e, s, c, len, rlen, nc, xy, yz, zx, xs, ys, zs;
angle = Math.PI * angle / 180;
e = this.elements;
s = Math.sin(angle);
c = Math.cos(angle);
if (0 !== x && 0 === y && 0 === z) {
// Rotation around X axis
if (x < 0) {
s = -s;
}
e[0] = 1; e[4] = 0; e[ 8] = 0; e[12] = 0;
e[1] = 0; e[5] = c; e[ 9] =-s; e[13] = 0;
e[2] = 0; e[6] = s; e[10] = c; e[14] = 0;
e[3] = 0; e[7] = 0; e[11] = 0; e[15] = 1;
} else if (0 === x && 0 !== y && 0 === z) {
// Rotation around Y axis
if (y < 0) {
s = -s;
}
e[0] = c; e[4] = 0; e[ 8] = s; e[12] = 0;
e[1] = 0; e[5] = 1; e[ 9] = 0; e[13] = 0;
e[2] =-s; e[6] = 0; e[10] = c; e[14] = 0;
e[3] = 0; e[7] = 0; e[11] = 0; e[15] = 1;
} else if (0 === x && 0 === y && 0 !== z) {
// Rotation around Z axis
if (z < 0) {
s = -s;
}
e[0] = c; e[4] =-s; e[ 8] = 0; e[12] = 0;
e[1] = s; e[5] = c; e[ 9] = 0; e[13] = 0;
e[2] = 0; e[6] = 0; e[10] = 1; e[14] = 0;
e[3] = 0; e[7] = 0; e[11] = 0; e[15] = 1;
} else {
// Rotation around another axis
len = Math.sqrt(x*x + y*y + z*z);
if (len !== 1) {
rlen = 1 / len;
x *= rlen;
y *= rlen;
z *= rlen;
}
nc = 1 - c;
xy = x * y;
yz = y * z;
zx = z * x;
xs = x * s;
ys = y * s;
zs = z * s;
e[ 0] = x*x*nc + c;
e[ 1] = xy *nc + zs;
e[ 2] = zx *nc - ys;
e[ 3] = 0;
e[ 4] = xy *nc - zs;
e[ 5] = y*y*nc + c;
e[ 6] = yz *nc + xs;
e[ 7] = 0;
e[ 8] = zx *nc + ys;
e[ 9] = yz *nc - xs;
e[10] = z*z*nc + c;
e[11] = 0;
e[12] = 0;
e[13] = 0;
e[14] = 0;
e[15] = 1;
}
return this;
};
/**
* Multiply the matrix for rotation from the right.
* The vector of rotation axis may not be normalized.
* @param angle The angle of rotation (degrees)
* @param x The X coordinate of vector of rotation axis.
* @param y The Y coordinate of vector of rotation axis.
* @param z The Z coordinate of vector of rotation axis.
* @return this
*/
Matrix4.prototype.rotate = function(angle, x, y, z) {
return this.concat(new Matrix4().setRotate(angle, x, y, z));
};
/**
* Set the viewing matrix.
* @param eyeX, eyeY, eyeZ The position of the eye point.
* @param centerX, centerY, centerZ The position of the reference point.
* @param upX, upY, upZ The direction of the up vector.
* @return this
*/
Matrix4.prototype.setLookAt = function(eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ) {
var e, fx, fy, fz, rlf, sx, sy, sz, rls, ux, uy, uz;
fx = centerX - eyeX;
fy = centerY - eyeY;
fz = centerZ - eyeZ;
// Normalize f.
rlf = 1 / Math.sqrt(fx*fx + fy*fy + fz*fz);
fx *= rlf;
fy *= rlf;
fz *= rlf;
// Calculate cross product of f and up.
sx = fy * upZ - fz * upY;
sy = fz * upX - fx * upZ;
sz = fx * upY - fy * upX;
// Normalize s.
rls = 1 / Math.sqrt(sx*sx + sy*sy + sz*sz);
sx *= rls;
sy *= rls;
sz *= rls;
// Calculate cross product of s and f.
ux = sy * fz - sz * fy;
uy = sz * fx - sx * fz;
uz = sx * fy - sy * fx;
// Set to this.
e = this.elements;
e[0] = sx;
e[1] = ux;
e[2] = -fx;
e[3] = 0;
e[4] = sy;
e[5] = uy;
e[6] = -fy;
e[7] = 0;
e[8] = sz;
e[9] = uz;
e[10] = -fz;
e[11] = 0;
e[12] = 0;
e[13] = 0;
e[14] = 0;
e[15] = 1;
// Translate.
return this.translate(-eyeX, -eyeY, -eyeZ);
};
/**
* Multiply the viewing matrix from the right.
* @param eyeX, eyeY, eyeZ The position of the eye point.
* @param centerX, centerY, centerZ The position of the reference point.
* @param upX, upY, upZ The direction of the up vector.
* @return this
*/
Matrix4.prototype.lookAt = function(eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ) {
return this.concat(new Matrix4().setLookAt(eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ));
};
/**
* Multiply the matrix for project vertex to plane from the right.
* @param plane The array[A, B, C, D] of the equation of plane "Ax + By + Cz + D = 0".
* @param light The array which stored coordinates of the light. if light[3]=0, treated as parallel light.
* @return this
*/
Matrix4.prototype.dropShadow = function(plane, light) {
var mat = new Matrix4();
var e = mat.elements;
var dot = plane[0] * light[0] + plane[1] * light[1] + plane[2] * light[2] + plane[3] * light[3];
e[ 0] = dot - light[0] * plane[0];
e[ 1] = - light[1] * plane[0];
e[ 2] = - light[2] * plane[0];
e[ 3] = - light[3] * plane[0];
e[ 4] = - light[0] * plane[1];
e[ 5] = dot - light[1] * plane[1];
e[ 6] = - light[2] * plane[1];
e[ 7] = - light[3] * plane[1];
e[ 8] = - light[0] * plane[2];
e[ 9] = - light[1] * plane[2];
e[10] = dot - light[2] * plane[2];
e[11] = - light[3] * plane[2];
e[12] = - light[0] * plane[3];
e[13] = - light[1] * plane[3];
e[14] = - light[2] * plane[3];
e[15] = dot - light[3] * plane[3];
return this.concat(mat);
}
/**
* Multiply the matrix for project vertex to plane from the right.(Projected by parallel light.)
* @param normX, normY, normZ The normal vector of the plane.(Not necessary to be normalized.)
* @param planeX, planeY, planeZ The coordinate of arbitrary points on a plane.
* @param lightX, lightY, lightZ The vector of the direction of light.(Not necessary to be normalized.)
* @return this
*/
Matrix4.prototype.dropShadowDirectionally = function(normX, normY, normZ, planeX, planeY, planeZ, lightX, lightY, lightZ) {
var a = planeX * normX + planeY * normY + planeZ * normZ;
return this.dropShadow([normX, normY, normZ, -a], [lightX, lightY, lightZ, 0]);
};
/**
* Constructor of Vector3
* If opt_src is specified, new vector is initialized by opt_src.
* @param opt_src source vector(option)
*/
var Vector3 = function(opt_src) {
var v = new Float32Array(3);
if (opt_src && typeof opt_src === 'object') {
v[0] = opt_src[0]; v[1] = opt_src[1]; v[2] = opt_src[2];
}
this.elements = v;
}
/**
* Normalize.
* @return this
*/
Vector3.prototype.normalize = function() {
var v = this.elements;
var c = v[0], d = v[1], e = v[2], g = Math.sqrt(c*c+d*d+e*e);
if(g){
if(g == 1)
return this;
} else {
v[0] = 0; v[1] = 0; v[2] = 0;
return this;
}
g = 1/g;
v[0] = c*g; v[1] = d*g; v[2] = e*g;
return this;
};
/**
* Constructor of Vector4
* If opt_src is specified, new vector is initialized by opt_src.
* @param opt_src source vector(option)
*/
var Vector4 = function(opt_src) {
var v = new Float32Array(4);
if (opt_src && typeof opt_src === 'object') {
v[0] = opt_src[0]; v[1] = opt_src[1]; v[2] = opt_src[2]; v[3] = opt_src[3];
}
this.elements = v;
}
|
<reponame>vampire-studios/Obsidian
package io.github.vampirestudios.obsidian.mixins.client;
import io.github.vampirestudios.obsidian.api.fabric.TridentInterface;
import net.minecraft.client.render.entity.model.DrownedEntityModel;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Redirect;
@Mixin(DrownedEntityModel.class)
public class DrownedEntityModelMixin {
@Redirect(method = "animateModel", at = @At(value = "INVOKE", target = "Lnet/minecraft/item/ItemStack;isOf(Lnet/minecraft/item/Item;)Z"))
public boolean ob_getItem(ItemStack itemStack, Item item) {
return itemStack.getItem() instanceof TridentInterface;
}
} |
def pascalTriangle(n):
row = [1]
y = [0]
for x in range(n):
print(row)
row = [l+r for l, r in zip(row+y, y+row)]
n = 5
pascalTriangle(n)
# [1]
# [1, 1]
# [1, 2, 1]
# [1, 3, 3, 1]
# [1, 4, 6, 4, 1] |
<reponame>Shevaitverma/Mini_projects
import React from 'react';
import ReactDOM from 'react-dom';
import './index.css';
import Card from './Card';
import Data from './Data';
ReactDOM.render(
<>
<h1 className='Heading_style'> List of top best Anime</h1>
{Data.map((value)=>{
return(
<Card
C_name ={value.C_name}
img_scr={value.img_scr}
title={value.title}
links={value.links}
/>
)
})}
</>,
document.getElementById('root')
);
|
<table>
<tr>
<td>Row 1, Col 1</td>
<td>Row 1, Col 2</td>
</tr>
<tr>
<td>Row 2, Col 1</td>
<td>Row 2, Col 2</td>
</tr>
<tr>
<td>Row 3, Col 1</td>
<td>Row 3, Col 2</td>
</tr>
</table> |
import { RegisteredErc20 } from "./core"
export const erc20Devchain: RegisteredErc20[] = [
{
name: "<NAME>",
symbol: "tCELO",
address: "0xAA86dDA78E9434acA114b6676Fc742A18d15a1CC",
decimals: 18,
},
] |
const watcher = Deno.watchFs("/");
for await (const event of watcher) {
console.log(">>>> event", event);
// { kind: "create", paths: [ "/foo.txt" ] }
}
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "biod/ec_command.h"
using testing::_;
using testing::InvokeWithoutArgs;
using testing::Return;
namespace biod {
namespace {
constexpr int kDummyFd = 0;
constexpr int kIoctlFailureRetVal = -1;
template <typename O, typename I>
class MockEcCommand : public EcCommand<O, I> {
public:
using EcCommand<O, I>::EcCommand;
~MockEcCommand() override = default;
using Data = typename EcCommand<O, I>::Data;
MOCK_METHOD(int, ioctl, (int fd, uint32_t request, Data* data));
};
class MockFpModeCommand : public MockEcCommand<struct ec_params_fp_mode,
struct ec_response_fp_mode> {
public:
MockFpModeCommand() : MockEcCommand(EC_CMD_FP_MODE, 0, {.mode = 1}) {}
};
// ioctl behavior for EC commands:
// returns sizeof(EC response) (>=0) on success, -1 on failure
// cmd.result is error code from EC (EC_RES_SUCCESS, etc)
TEST(EcCommand, Run_Success) {
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl).WillOnce(Return(mock.RespSize()));
EXPECT_TRUE(mock.Run(kDummyFd));
}
TEST(EcCommand, Run_IoctlFailure) {
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl).WillOnce(Return(kIoctlFailureRetVal));
EXPECT_FALSE(mock.Run(kDummyFd));
}
TEST(EcCommand, Run_CommandFailure) {
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl)
.WillOnce([](int, uint32_t, MockFpModeCommand::Data* data) {
// Test the case where the ioctl itself succeeds, the but the EC
// command did not. In this case, "result" will be set, but the
// response size will not match the command's response size.
data->cmd.result = EC_RES_ACCESS_DENIED;
return 0;
});
EXPECT_FALSE(mock.Run(kDummyFd));
}
TEST(EcCommand, ConstReq) {
const MockFpModeCommand mock;
EXPECT_TRUE(mock.Req());
}
TEST(EcCommand, ConstResp) {
const MockFpModeCommand mock;
EXPECT_TRUE(mock.Resp());
}
TEST(EcCommand, Run_CheckResult_Success) {
constexpr int kExpectedResult = 42;
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl)
.WillOnce([](int, uint32_t, MockFpModeCommand::Data* data) {
data->cmd.result = kExpectedResult;
return data->cmd.insize;
});
EXPECT_TRUE(mock.Run(kDummyFd));
EXPECT_EQ(mock.Result(), kExpectedResult);
}
TEST(EcCommand, Run_CheckResult_Failure) {
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl)
.WillOnce([](int, uint32_t, MockFpModeCommand::Data* data) {
// Note that it's not expected that the result would be set by the
// kernel driver in this case, but we want to be defensive against
// the behavior in case there is an instance where it does.
data->cmd.result = EC_RES_ERROR;
return kIoctlFailureRetVal;
});
EXPECT_FALSE(mock.Run(kDummyFd));
EXPECT_EQ(mock.Result(), kEcCommandUninitializedResult);
}
TEST(EcCommand, RunWithMultipleAttempts_Success) {
constexpr int kNumAttempts = 2;
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl)
.Times(kNumAttempts)
// First ioctl() fails
.WillOnce(InvokeWithoutArgs([]() {
errno = ETIMEDOUT;
return kIoctlFailureRetVal;
}))
// Second ioctl() succeeds
.WillOnce(Return(mock.RespSize()));
EXPECT_TRUE(mock.RunWithMultipleAttempts(kDummyFd, kNumAttempts));
}
TEST(EcCommand, RunWithMultipleAttempts_Timeout_Failure) {
constexpr int kNumAttempts = 2;
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl)
.Times(kNumAttempts)
// All calls to ioctl() timeout
.WillRepeatedly(InvokeWithoutArgs([]() {
errno = ETIMEDOUT;
return kIoctlFailureRetVal;
}));
EXPECT_FALSE(mock.RunWithMultipleAttempts(kDummyFd, kNumAttempts));
}
TEST(EcCommand, RunWithMultipleAttempts_ErrorNotTimeout_Failure) {
constexpr int kNumAttempts = 2;
MockFpModeCommand mock;
EXPECT_CALL(mock, ioctl)
// Errors other than timeout should cause immediate failure even when
// attempting retries.
.Times(1)
.WillOnce(InvokeWithoutArgs([]() {
errno = EINVAL;
return kIoctlFailureRetVal;
}));
EXPECT_FALSE(mock.RunWithMultipleAttempts(kDummyFd, kNumAttempts));
}
} // namespace
} // namespace biod
|
SELECT name
FROM employees
WHERE years_in_company > 10; |
import java.io.BufferedReader;
import java.io.InputStreamReader;
public class DecisionTree {
public static void main(String[] args) {
int age;
float height;
String gender;
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
System.out.print("Please enter the person's age: ");
age = Integer.parseInt(reader.readLine());
System.out.print("Please enter the person's height: ");
height = Float.parseFloat(reader.readLine());
if(age < 18 && height < 5.0) {
gender = "Female";
}
else if (age >= 18 && height >= 5.0) {
gender = "Male";
}
else {
gender = "Unspecified";
}
System.out.println("The person's gender is: " + gender);
}
} |
#!/bin/bash
set -euo pipefail
TOP_DIR=$(git rev-parse --show-toplevel)
XSLTPROC_CMD="${XSLTPROC} --nomkdir --nonet --xinclude ${XSLTPROC_PARAMS}"
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/identity.xsl "$@" | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/convert-pb_type-attributes.xsl - | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/convert-port-tag.xsl - | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/convert-prefix-port.xsl - | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/pack-patterns.xsl - | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/remove-duplicate-models.xsl - | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/attribute-fixes.xsl - | \
${XSLTPROC_CMD} ${TOP_DIR}/common/xml/sort-tags.xsl - | \
cat
|
var should = require('should');
describe('validatePlant()', function() {
});
|
import { Injectable } from '@angular/core';
import { Router } from '@angular/router';
import { NgModule } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { Http, ResponseContentType } from '@angular/http';
import {environment} from '../environments/environment';
import { NoteAPIResponse } from './response.model';
@Injectable({
providedIn: 'root'
})
export class NotesService{
private servicesList: any = [];
private serverAddress = environment.backendURL;
constructor(private httpClient: HttpClient, private router: Router, private http: Http) { }
getFamilyNotes(familyId: Number) {
return this.httpClient.get<NoteAPIResponse>(this.serverAddress + 'api/family/notes/id/' + familyId);
}
addNote(note: any) {
return this.httpClient.post<{noteAdded: boolean}>(this.serverAddress + 'api/family/notes/new', note);
}
editNote(note: any) {
return this.httpClient.post<{noteUpdated: boolean}>(this.serverAddress + 'api/family/notes/edit', note);
}
deleteNote(note: any) {
return this.httpClient.post<{noteDeleted: boolean}>(this.serverAddress + 'api/family/notes/delete', note);
}
getVPizzaTransactionHistory():any{
return this.httpClient.get<NoteAPIResponse>(this.serverAddress + 'api/VPizza/history');
}
markPizzaRefilled(pizza: any) {
return this.httpClient.post<{refilled: boolean}>(this.serverAddress + 'api/VPizza/refill', pizza);
}
}
|
<gh_stars>0
module Gingerr
class SignalCreator
attr_reader :errors
def initialize
@errors = []
end
def create(app, params)
if params[:type]
signal_params = { pid: params[:pid], type: Gingerr::Signal.class_for_type(params[:type]) }
Signal.transaction do
endpoint = Endpoint.from_params(params.slice(:ip, :hostname, :login))
@errors += endpoint.errors.full_messages unless endpoint.persisted?
signal = app.signals.create(signal_params.merge(endpoint: endpoint))
@errors += signal.errors.full_messages unless signal.persisted?
if signal.respond_to?(:error?) && signal.error?
error = signal.create_error(params[:error])
@errors += error.errors.full_messages unless error.persisted?
end
signal
end
else
@errors << 'Missing parameter \'type\''
app.signals.build
end
end
end
end
|
<gh_stars>0
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.auth.jwt;
import static io.stargate.auth.jwt.AuthnJwtService.CLAIMS_FIELD;
import static io.stargate.auth.jwt.AuthnJwtService.STARGATE_PREFIX;
import io.stargate.auth.AuthenticationSubject;
import io.stargate.auth.AuthorizationService;
import io.stargate.auth.Scope;
import io.stargate.auth.SourceAPI;
import io.stargate.auth.TypedKeyValue;
import io.stargate.auth.UnauthorizedException;
import io.stargate.auth.entity.ResourceKind;
import io.stargate.db.datastore.ResultSet;
import io.stargate.db.schema.Column;
import io.stargate.db.schema.Column.ColumnType;
import io.stargate.db.schema.Column.Type;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.regex.Pattern;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AuthzJwtService implements AuthorizationService {
private static final Logger log = LoggerFactory.getLogger(AuthzJwtService.class);
private final Pattern tokenPattern = Pattern.compile("\\.");
/**
* Using the provided JWT and the claims it contains will perform pre-authorization where
* possible, executes the query provided, and then authorizes the response of the query.
*
* <p>{@inheritdoc}
*/
@Override
public ResultSet authorizedDataRead(
Callable<ResultSet> action,
AuthenticationSubject authenticationSubject,
String keyspace,
String table,
List<TypedKeyValue> typedKeyValues,
SourceAPI
sourceAPI) // this isn’t supported but if you want to use it you’ll need something other
// than a JWT
throws Exception {
JSONObject stargateClaims = extractClaimsFromJWT(authenticationSubject.token());
preCheckDataReadWrite(stargateClaims, typedKeyValues);
ResultSet result = action.call();
if (result == null) {
return null;
}
return result.withRowInspector(row -> hasCorrectClaims(stargateClaims, row));
}
static boolean hasCorrectClaims(JSONObject stargateClaims, io.stargate.db.datastore.Row row) {
if (row == null) {
return true;
}
for (Column col : row.columns()) {
if (stargateClaims.has(STARGATE_PREFIX + col.name())) {
String stargateClaimValue;
try {
stargateClaimValue = stargateClaims.getString(STARGATE_PREFIX + col.name());
} catch (JSONException e) {
log.warn("Unable to get stargate claim for " + STARGATE_PREFIX + col.name());
return false;
}
String columnValue = row.getString(col.name());
if (!stargateClaimValue.equals(columnValue)) {
return false;
}
}
}
return true;
}
/**
* Authorization for data resource access without keys is not provided by JWTs so all
* authorization will be deferred to the underlying permissions assigned to the role the JWT maps
* to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeDataRead(
AuthenticationSubject authenticationSubject,
String keyspace,
String table,
SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for data resource access without keys is not provided by JWTs so all
* authorization will be deferred to the underlying permissions assigned to the role the JWT maps
* to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeDataWrite(
AuthenticationSubject authenticationSubject,
String keyspace,
String table,
Scope scope,
SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/** {@inheritdoc} */
@Override
public void authorizeDataWrite(
AuthenticationSubject authenticationSubject,
String keyspace,
String table,
List<TypedKeyValue> typedKeyValues,
Scope scope,
SourceAPI sourceAPI)
throws UnauthorizedException {
JSONObject stargateClaims = extractClaimsFromJWT(authenticationSubject.token());
preCheckDataReadWrite(stargateClaims, typedKeyValues);
// Just return. No value in doing a post check since we can't roll back anyway.
}
/**
* Authorization for schema resource access is not provided by JWTs so all authorization will be
* deferred to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeSchemaRead(
AuthenticationSubject authenticationSubject,
List<String> keyspaceNames,
List<String> tableNames,
SourceAPI sourceAPI,
ResourceKind resource)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for schema resource access is not provided by JWTs so all authorization will be
* deferred to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeSchemaWrite(
AuthenticationSubject authenticationSubject,
String keyspace,
String table,
Scope scope,
SourceAPI sourceAPI,
ResourceKind resource)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for role management is not provided by JWTs so all authorization will be deferred
* to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeRoleManagement(
AuthenticationSubject authenticationSubject, String role, Scope scope, SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for role management is not provided by JWTs so all authorization will be deferred
* to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeRoleManagement(
AuthenticationSubject authenticationSubject,
String role,
String grantee,
Scope scope,
SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for role management is not provided by JWTs so all authorization will be deferred
* to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizeRoleRead(
AuthenticationSubject authenticationSubject, String role, SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for permission management is not provided by JWTs so all authorization will be
* deferred to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizePermissionManagement(
AuthenticationSubject authenticationSubject,
String resource,
String grantee,
Scope scope,
SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
/**
* Authorization for permission management is not provided by JWTs so all authorization will be
* deferred to the underlying permissions assigned to the role the JWT maps to.
*
* <p>{@inheritdoc}
*/
@Override
public void authorizePermissionRead(
AuthenticationSubject authenticationSubject, String role, SourceAPI sourceAPI)
throws UnauthorizedException {
// Cannot perform authorization with a JWT token so just return
}
private JSONObject extractClaimsFromJWT(String token) throws JSONException {
// Grab the custom claims from the JWT. It's safe to work with the JWT as a plain Base64 encoded
// json object here since by this point we've already authenticated the request.
String[] parts = tokenPattern.split(token);
if (parts.length < 3) {
throw new IllegalArgumentException(
"Valid JWT should contain 3 parts but provided only contains " + parts.length);
}
String decodedPayload =
new String(
Base64.getUrlDecoder().decode(parts[1]),
// Per RFC-7519, JWTs are encoded from the UTF-8 representation of the JSON payload:
StandardCharsets.UTF_8);
JSONObject payload = new JSONObject(decodedPayload);
return payload.getJSONObject(CLAIMS_FIELD);
}
private void preCheckDataReadWrite(JSONObject stargateClaims, List<TypedKeyValue> typedKeyValues)
throws JSONException, UnauthorizedException {
for (TypedKeyValue typedKeyValue : typedKeyValues) {
// If one of the columns exist as a field in the JWT claims and the values do not match then
// the request is not allowed.
if (stargateClaims.has(STARGATE_PREFIX + typedKeyValue.getName())) {
ColumnType targetCellType = typedKeyValue.getType();
if (!targetCellType.equals(Type.Text)) {
throw new IllegalArgumentException(
"Column must be of type text to be used for authorization");
}
String stargateClaimValue =
stargateClaims.getString(STARGATE_PREFIX + typedKeyValue.getName());
String columnValue = (String) typedKeyValue.getValue();
if (!stargateClaimValue.equals(columnValue)) {
throw new UnauthorizedException("Not allowed to access this resource");
}
}
}
}
}
|
import os
import platform
shortcut_path = '/path/to/shortcut' # Replace with the actual path to the shortcut file
# Complete the conditional statements to set the correct permissions based on the operating system
if platform.system() == 'Windows':
os.system('icacls ' + shortcut_path + ' /grant Everyone:F') # Set full control for everyone on Windows
elif platform.system() == 'Darwin':
os.system('chmod 755 ' + shortcut_path) # Set read, write, and execute permissions for owner, read and execute for group and others on macOS
elif platform.system() == 'Linux':
os.system('chmod +x ' + shortcut_path) # Set execute permission for all users on Linux
# Print the success message along with the path of the shortcut file
print('HOPS successfully installed.')
print('The shortcut has been saved here:\n\n{0}\n\n'
'You can freely move this file to your preferred location.'.format(shortcut_path)) |
<gh_stars>0
/* FCE Ultra - NES/Famicom Emulator
*
* Copyright notice for this file:
* Copyright (C) 2005 CaH4e3
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* Family Study Box by <NAME>
*/
#include "mapinc.h"
static uint8 SWRAM[3072];
static uint8 *WRAM = NULL;
static uint8 regs[4];
static SFORMAT StateRegs[] =
{
{ regs, 4, "DREG" },
{ SWRAM, 3072, "SWRM" },
{ 0 }
};
static void Sync(void) {
setprg8r(0x10, 0x6000, regs[0] >> 6);
setprg16(0x8000, regs[1]);
setprg16(0xc000, 0);
}
static DECLFW(M186Write) {
if (A & 0x4203) regs[A & 3] = V;
Sync();
}
static DECLFR(M186Read) {
switch (A) {
case 0x4200: return 0x00; break;
case 0x4201: return 0x00; break;
case 0x4202: return 0x40; break;
case 0x4203: return 0x00; break;
}
return 0xFF;
}
static DECLFR(ASWRAM) {
return(SWRAM[A - 0x4400]);
}
static DECLFW(BSWRAM) {
SWRAM[A - 0x4400] = V;
}
static void M186Power(void) {
setchr8(0);
SetReadHandler(0x6000, 0xFFFF, CartBR);
SetWriteHandler(0x6000, 0xFFFF, CartBW);
SetReadHandler(0x4200, 0x43FF, M186Read);
SetWriteHandler(0x4200, 0x43FF, M186Write);
SetReadHandler(0x4400, 0x4FFF, ASWRAM);
SetWriteHandler(0x4400, 0x4FFF, BSWRAM);
regs[0] = regs[1] = regs[2] = regs[3];
Sync();
}
static void M186Close(void) {
if (WRAM)
FCEU_gfree(WRAM);
WRAM = NULL;
}
static void M186Restore(int version) {
Sync();
}
void Mapper186_Init(CartInfo *info) {
info->Power = M186Power;
info->Close = M186Close;
GameStateRestore = M186Restore;
WRAM = (uint8*)FCEU_gmalloc(32768);
SetupCartPRGMapping(0x10, WRAM, 32768, 1);
AddExState(WRAM, 32768, 0, "WRAM");
AddExState(StateRegs, ~0, 0, 0);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.