text stringlengths 1 1.05M |
|---|
<reponame>caglar/Arcade-Universe<filename>arcade_universe/perlin.py<gh_stars>10-100
from __future__ import division
import Image, ImageDraw
import numpy as np
#This is a perlin noise generating script
class PerlinNoiseGenerator(object):
"""
The constructor for the PerlinNoiseGenerator,
w is the width of the Image
h is the height of the Image
size is the size for the noise scaling.
rnd : seed for the random number generator.
"""
def __init__(self, w, h, size=64, rnd=12312):
self.w = w
self.h = h
self.rnd = rnd
self.size = size
self.rng = np.random.RandomState(self.rnd)
def generate_noise(self):
noise = np.array(np.zeros(self.w*self.h))
noise = noise.reshape((self.w,self.h))
for x in xrange(self.w):
for y in xrange(self.h):
noise[x][y] = (self.rng.random_integers(0, 32768) / 32768)
return noise
"""
Smooth the noise at the pixel x and y, by using bilinear interpolation.
"""
def smoothnoise(self, x, y, noise):
x_i = int(x)
y_i = int(y)
fract_x = x - x_i
fract_y = y - y_i
x1 = (x_i + self.w) % self.w
y1 = (y_i + self.h) % self.h
#Neighbour Values
x2 = (x1 + self.w - 1) % self.w
y2 = (y1 + self.h - 1) % self.h
#smooth the noise with bilinear interpolation
val = 0.0
val += fract_x * fract_y * noise[x1][y1]
val += fract_x * (1 - fract_y) * noise[x1][y2]
val += (1 - fract_x) * fract_y * noise[x2][y1]
val += (1 - fract_x) * (1 - fract_y) * noise[x2][y2]
return val
"""
Add noise to the images by smoothing it.
"""
def turbulence(self, x, y, noise, size=0):
val = 0.0
if size == 0:
size = self.size
init_size = size
while(size>=1):
val += self.smoothnoise(x/size, y/size, noise) * size
size /= 2.0
return int(128 * val/init_size)
def get_background_noise(self):
data = np.zeros((self.w, self.h))
noise = self.generate_noise()
for x in xrange(self.w):
for y in xrange(self.h):
px = self.turbulence(x, y, noise)
data[x][y] = px
return data
def gen_img(self):
img = Image.new("RGB", (self.w, self.h), "#FFFFFF")
draw = ImageDraw.Draw(img)
noise = self.generate_noise()
for x in xrange(self.w):
for y in xrange(self.h):
r = g = b = self.turbulence(x, y, noise)
draw.point((x, y) , fill=(r, g, b))
img.save("out.png", "PNG")
if __name__=="__main__":
perl = PerlinNoiseGenerator(32, 32, size=32)
perl.gen_img()
|
<reponame>tdm1223/Algorithm
// https://www.hackerrank.com/challenges/ctci-ice-cream-parlor
void whatFlavors(vector<int> cost, int money)
{
map <int, int > m;
for (int i = 0; i < cost.size(); i++)
{
m[cost[i]] = i;
}
map<int, int > ::iterator iter;
for (int i = 0; i < cost.size(); i++)
{
iter = m.find(money - cost[i]);
if (iter != m.end())
{
if (m[money - cost[i]] != i)
{
cout << i + 1 << " " << iter->second + 1 << endl;
break;
}
}
}
}
|
echo Installing bash_it...
git clone --depth=1 https://github.com/Bash-it/bash-it.git ~/.bash_it
~/.bash_it/install.sh
sed -i "s/BASH_IT_THEME='.*'/BASH_IT_THEME='bakke'/g" ~/.bashrc
# append our custom bashrc to bash_it
echo "source ~/.myprof/bashrc" >> ~/.bashrc
echo Installing tmux config...
# TODO: use new file & source execution for custom overrides support
ln -sf ~/.myprof/tmux.conf ~/.tmux.conf
echo Installing vim config...
echo "so ~/.myprof/vimrc" >> ~/.vimrc
echo Done.
|
#!/bin/bash
BASEDIR=`dirname $0`/../../..
${BASEDIR}/consul agent -server -bootstrap-expect 1 -advertise 127.0.0.1 -data-dir /tmp/consul -ui-dir ${BASEDIR}/src/test/resources/consul_ui -config-dir ${BASEDIR}/src/test/resources/consul_config &
# wait for consul to elect a leader before sending acl
sleep 5
curl -X PUT -d @`dirname $0`/../../test/resources/consul_acl/consul_anonymous_acl.json http://localhost:8500/v1/acl/create?token=2ee647bd-bd69-4118-9f34-b9a6e9e60746
curl -X PUT -d @`dirname $0`/../../test/resources/consul_acl/consul_discovery_client_acl.json http://localhost:8500/v1/acl/create?token=2ee647bd-bd69-4118-9f34-b9a6e9e60746
|
package fr.iv.calories.entity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "food")
public class Food {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id")
private int id;
@Column(name = "type")
private String type;
@Column(name = "name")
private String name;
@Column(name = "kcal_per_hundred_grams")
private int kcalPerHundredGm;
public Food() {
}
public Food(String type, String name, int kcalPerHundredGm) {
this.type = type;
this.name = name;
this.kcalPerHundredGm = kcalPerHundredGm;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getKcalPerHundredGm() {
return kcalPerHundredGm;
}
public void setKcalPerHundredGm(int kcalPerHundredGm) {
this.kcalPerHundredGm = kcalPerHundredGm;
}
}
|
def matrix_sum(matrix):
sum = 0
for row in matrix:
for elem in row:
sum += elem
return sum |
package desarrollomobile.tiendadeclases.tiendadeclases.Services;
import android.util.Log;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
public class MyFirebaseMessagingService extends FirebaseMessagingService {
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
Log.d("Notif Who", "From: " + remoteMessage.getFrom());
if (remoteMessage.getData().size() > 0) {
Log.d("Notif Payload", "Message data payload: " + remoteMessage.getData());
}
}
}
|
#!/bin/bash
#SBATCH --time=90:55:00
#SBATCH --account=vhs
#SBATCH --job-name=lustre_4n_6t_6d_1000f_617m_5i
#SBATCH --nodes=4
#SBATCH --nodelist=comp02,comp03,comp04,comp06
#SBATCH --output=./results/exp_iterations/run-4/lustre_4n_6t_6d_1000f_617m_5i/slurm-%x-%j.out
source /home/vhs/Sea/.venv/bin/activate
srun -N4 ../scripts/clear_client_pc.sh
start=`date +%s.%N`
srun -N 1 bash ./results/exp_iterations/run-4/lustre_4n_6t_6d_1000f_617m_5i/n0_sea_parallel.sh &
srun -N 1 bash ./results/exp_iterations/run-4/lustre_4n_6t_6d_1000f_617m_5i/n1_sea_parallel.sh &
srun -N 1 bash ./results/exp_iterations/run-4/lustre_4n_6t_6d_1000f_617m_5i/n2_sea_parallel.sh &
srun -N 1 bash ./results/exp_iterations/run-4/lustre_4n_6t_6d_1000f_617m_5i/n3_sea_parallel.sh &
wait
end=`date +%s.%N`
runtime=$( echo "$end - $start" | bc -l )
echo "Runtime: $runtime"
|
var profileChart;
$(document).ready(function(){
$('#profile-chart').each(function(){
profileChartOptions = {
chart: {
renderTo: 'profile-chart',
type: 'line',
height: 250,
width: 400
},
title: {
text: 'Ranked points history'
},
xAxis: {
categories: []
},
yAxis: {
title: {
text: 'Ranking points'
},
showEmpty: false
},
series: [{
name: 'Ranking points',
data: []
}],
legend: { enabled: false }
}
$('ul#pointsChartData li').each(function(){
profileChartOptions.xAxis.categories.push($(this).find('span[data-type="date"]').text());
profileChartOptions.series[0].data.push(parseInt($(this).find('span[data-type="points"]').text()));
});
profileChartOptions.xAxis.categories.reverse();
profileChartOptions.series[0].data.reverse();
console.log(profileChartOptions.series);
profileChart = new Highcharts.Chart(profileChartOptions);
});
}); |
/*
* Copyright (c) 2007-2013 Concurrent, Inc. All Rights Reserved.
*
* Project and contact information: http://www.cascading.org/
*
* This file is part of the Cascading project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cascading.provider;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Arrays;
import cascading.util.Util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ChildFirstURLClassLoader is an internal utility class used to load CascadingServices from an isolated
* classpath to prevent collisions between dependent jar versions in the parent classpath.
*/
class ChildFirstURLClassLoader extends ClassLoader
{
private static final Logger LOG = LoggerFactory.getLogger( ChildFirstURLClassLoader.class );
private final String[] exclusions;
private ChildURLClassLoader childClassLoader;
private class ChildURLClassLoader extends URLClassLoader
{
private ClassLoader parentClassLoader;
public ChildURLClassLoader( URL[] urls, ClassLoader parentClassLoader )
{
super( urls, null );
this.parentClassLoader = parentClassLoader;
}
@Override
public Class<?> findClass( String name ) throws ClassNotFoundException
{
for( String exclusion : exclusions )
{
if( name.startsWith( exclusion ) )
return parentClassLoader.loadClass( name );
}
try
{
return super.findClass( name );
}
catch( ClassNotFoundException exception )
{
return parentClassLoader.loadClass( name );
}
}
}
public ChildFirstURLClassLoader( String[] exclusions, URL... urls )
{
super( Thread.currentThread().getContextClassLoader() );
this.exclusions = Util.removeNulls( exclusions );
childClassLoader = new ChildURLClassLoader( urls, this.getParent() );
if( LOG.isDebugEnabled() )
LOG.debug( "child first classloader exclusions: {}", Arrays.toString( exclusions ) );
}
@Override
protected synchronized Class<?> loadClass( String name, boolean resolve ) throws ClassNotFoundException
{
for( String exclusion : exclusions )
{
if( name.startsWith( exclusion ) )
{
LOG.debug( "loading exclusion: {}, from parent: {}", exclusion, name );
return super.loadClass( name, resolve );
}
}
try
{
LOG.debug( "loading from child: {}", name );
return childClassLoader.loadClass( name );
}
catch( ClassNotFoundException exception )
{
return super.loadClass( name, resolve );
}
}
}
|
#!/usr/bin/env bats
# -*- shell-script -*-
load test_helpers
setup() {
env_setup
asdf direnv setup --shell bash --version system
source $HOME/.bashrc
}
teardown() {
env_teardown
}
@test "dummy 1.0 is available via asdf exec" {
install_dummy_plugin "dummy" "1.0"
ASDF_DUMMY_VERSION=1.0 run asdf exec dummy
[ "$output" == "This is dummy 1.0" ]
}
@test "direnv loads simple envrc" {
cd "$PROJECT_DIR"
[ -z "$FOO" ]
echo 'export FOO=BAR' > "$PROJECT_DIR/.envrc"
direnv allow "$PROJECT_DIR/.envrc"
envrc_load
[ "$FOO" == "BAR" ]
}
# This is to support asdf multiple version multiline feature
@test "use multiple versions for same plugin - multiline" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
cd "$PROJECT_DIR"
asdf direnv local dummy 2.0 dummy 1.0
asdf local dummy 2.0
echo "dummy 1.0" >> .tool-versions
asdf direnv local
envrc_load
run path_as_lines
[ "${lines[0]}" = "$(dummy_bin_path dummy 2.0)" ]
[ "${lines[1]}" = "$(dummy_bin_path dummy 1.0)" ]
}
# This is to support asdf multiple version inline feature
@test "use multiple versions for same plugin - inline" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
cd "$PROJECT_DIR"
asdf local dummy 2.0 1.0
asdf direnv local
envrc_load
run path_as_lines
[ "${lines[0]}" = "$(dummy_bin_path dummy 2.0)" ]
[ "${lines[1]}" = "$(dummy_bin_path dummy 1.0)" ]
}
@test "use asdf - makes global tools available in PATH" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
cd "$PROJECT_DIR"
asdf direnv local
[ ! $(type -P dummy) ] # not available
asdf global dummy 1.0
rm -f "$PROJECT_DIR"/.tool-versions # no local tools
touch .envrc
envrc_load
run dummy
[ "$output" == "This is dummy 1.0" ] # executable in path
}
@test "use asdf - makes local tools available in PATH" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
cd "$PROJECT_DIR"
asdf direnv local
[ ! $(type -P dummy) ] # not available
asdf global dummy 1.0 # should be ignored by asdf
asdf local dummy 2.0
# Touching local .envrc file should re-create cached-envrc
touch .envrc
envrc_load
run dummy
[ "$output" == "This is dummy 2.0" ] # executable in path
}
@test "use asdf - prepends plugin custom shims to PATH" {
echo "If a plugin has helper shims defined, they also appear on PATH"
install_dummy_plugin dummy 1.0 mummy
asdf global dummy 1.0
cd "$PROJECT_DIR"
asdf direnv local
[ ! $(type -P mummy) ] # not available
[ ! $(type -P dummy) ] # not available
envrc_load
run mummy
[ "$output" == "This is dummy mummy shim" ] # executable in path
run dummy
[ "$output" == "This is dummy 1.0" ] # executable in path
# plugin bin at head of PATH
run path_as_lines
path_as_lines | sed -n 1p | grep "direnv"
path_as_lines | sed -n 2p | grep "$(dummy_bin_path dummy 1.0)"
path_as_lines | sed -n 3p | grep "$(dummy_shims_path dummy 1.0)"
}
@test "use asdf - exports plugin custom env not only PATH" {
install_dummy_plugin dummy 1.0
cat <<-EOF > "$ASDF_DATA_DIR/plugins/dummy/bin/exec-env"
#!/usr/bin/env bash
export JOJO=JAJA
export FOO=$'\nBAR' # something starting with new line
EOF
chmod +x "$ASDF_DATA_DIR/plugins/dummy/bin/exec-env"
cd "$PROJECT_DIR"
export ASDF_DUMMY_VERSION=1.0
asdf direnv local
envrc_load
[ "$JOJO" == "JAJA" ] # Env exported by plugin
[ "$FOO" == $'\nBAR' ] # Keeps special chars
}
@test "use asdf - determines version from tool-versions" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
cd "$PROJECT_DIR"
asdf global dummy 1.0
asdf local dummy 2.0
asdf direnv local
envrc_load
run dummy
[ "$output" == "This is dummy 2.0" ] # executable in path
}
@test "use asdf - resolves latest:X version from tool-versions" {
install_dummy_plugin dummy 2.0
install_dummy_plugin dummy 2.1
cd "$PROJECT_DIR"
asdf global dummy 2.0
asdf local dummy latest:2
asdf direnv local
envrc_load
run dummy
[ "$output" == "This is dummy 2.1" ] # executable in path
}
@test "use asdf - watches tool-versions for changes" {
install_dummy_plugin dummy 1.0
cd "$PROJECT_DIR"
asdf local dummy 1.0
asdf direnv local
envrc_load
direnv status | grep -F 'Loaded watch: ".tool-versions"'
}
@test "use asdf - watches plugin legacy file for changes" {
install_dummy_plugin dummy 1.0
setup_dummy_legacyfile dummy .dummy-version
cd "$PROJECT_DIR"
echo "1.0" > "$PROJECT_DIR/.dummy-version"
asdf direnv local
envrc_load
run dummy
[ "$output" == "This is dummy 1.0" ]
direnv status | grep -F 'Loaded watch: ".dummy-version"'
}
@test "use asdf - activates currently selected plugins" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
install_dummy_plugin gummy 1.0
install_dummy_plugin puppy 2.0
install_dummy_plugin mummy 1.0 # installed, but not seelcted globally nor locally
setup_dummy_legacyfile dummy .dummy-version
cd "$PROJECT_DIR"
asdf global dummy 1.0
asdf global gummy 1.0
echo "2.0" > "$PROJECT_DIR/.dummy-version"
asdf local puppy 2.0
asdf direnv local
envrc_load
run dummy # selected from legacyfile
[ "$output" == "This is dummy 2.0" ]
run puppy # selected from local tool-versions
[ "$output" == "This is puppy 2.0" ]
run gummy # selected from global tool-versions
[ "$output" == "This is gummy 1.0" ]
[ ! $(type -P mummy) ] # never selected
[ ! $(path_as_lines | grep "$(dummy_bin_path dummy 1.0)") ]
}
@test "use asdf - watches selection files" {
install_dummy_plugin dummy 1.0
install_dummy_plugin dummy 2.0
install_dummy_plugin gummy 1.0
install_dummy_plugin puppy 2.0
install_dummy_plugin mummy 1.0 # installed, but not seelcted globally nor locally
cd "$PROJECT_DIR"
asdf global dummy 1.0
asdf global gummy 1.0
asdf local dummy 2.0
asdf local puppy 2.0
asdf direnv local
envrc_load
direnv status | grep -F 'Loaded watch: ".tool-versions"'
direnv status | grep -F 'Loaded watch: "../.tool-versions"'
}
@test "use asdf - watches legacy files" {
install_dummy_plugin dummy 2.0
setup_dummy_legacyfile dummy .dummy-version
cd "$PROJECT_DIR"
echo "2.0" > "$PROJECT_DIR/.dummy-version"
asdf direnv local
envrc_load
direnv status
direnv status | grep -F 'Loaded watch: ".dummy-version"'
}
@test "use asdf - sets local tools on PATH before global tools" {
install_dummy_plugin dummy 1.0
install_dummy_plugin gummy 1.0
install_dummy_plugin mummy 1.0
install_dummy_plugin puppy 1.0
install_dummy_plugin rummy 1.0
setup_dummy_legacyfile dummy .dummy-version
cd "$PROJECT_DIR"
asdf global mummy 1.0
asdf global rummy 1.0
echo "1.0" > "$PROJECT_DIR/.dummy-version"
asdf local puppy 1.0
asdf local gummy 1.0
asdf direnv local
envrc_load
path_as_lines
local dummy_line="$(path_as_lines | grep -n -F "$(dummy_bin_path dummy 1.0)" | cut -d: -f1)"
local gummy_line="$(path_as_lines | grep -n -F "$(dummy_bin_path gummy 1.0)" | cut -d: -f1)"
local mummy_line="$(path_as_lines | grep -n -F "$(dummy_bin_path mummy 1.0)" | cut -d: -f1)"
local puppy_line="$(path_as_lines | grep -n -F "$(dummy_bin_path puppy 1.0)" | cut -d: -f1)"
local rummy_line="$(path_as_lines | grep -n -F "$(dummy_bin_path rummy 1.0)" | cut -d: -f1)"
[ "$puppy_line" -lt "$gummy_line" ] # first tool in tool-versions is also first on PATH
[ "$gummy_line" -lt "$mummy_line" ] # local plugins should be on PATH before gloabl ones
[ "$puppy_line" -lt "$mummy_line" ]
[ "$puppy_line" -lt "$dummy_line" ] # since dummy is not in tool-versions its loaded after local tools
# global plugins order is lexicographical since they are not in tool-versions file
[ "$dummy_line" -lt "$mummy_line" ] # dummy is resolved by `use asdf global` since its not in tool-versions
[ "$mummy_line" -lt "$rummy_line" ]
}
|
Function getRandom() as Integer
Dim RandomNumber As Integer
Randomize
'Generate random number between 1-10
RandomNumber = CInt(Int((10 * Rnd()) + 1))
getRandom = RandomNumber
End Function |
package com.cwl.service.part_1.pool;
/**
* @author cwl
* @description: 任务队列,主要用于缓存提交到线程池中的任务
* @date 2019/12/2010:42
*/
public interface RunnableQueue {
//当有新的任务进来时首先会offer到队列中
void offer(Runnable runnable);
//工作线程通过take方法获取Runnable
Runnable take() throws InterruptedException;
//获取任务队列中任务的数量
int size();
} |
#!/bin/sh
set -eu
clang -fbracket-depth=999999 -march=native -mbmi2 -mtune=native -std=gnu11 -O3 -flto -fuse-ld=lld -fomit-frame-pointer -fwrapv -Wno-attributes -fno-strict-aliasing -Da24_hex='0x3039' -Da24_val='12345' -Da_minus_two_over_four_array='{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x39}' -Dbitwidth='64' -Dlimb_weight_gaps_array='{52,51,51,51,51}' -Dmodulus_array='{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}' -Dmodulus_bytes_val='32' -Dmodulus_limbs='5' -Dq_mpz='(1_mpz<<256) - (1_mpz<<224) + (1_mpz<<192) + (1_mpz<<96) - 1' "$@"
|
<reponame>FrankAst/graphql-compose-elasticsearch<gh_stars>0
/* @flow */
import { graphql } from 'graphql-compose';
import elasticsearch from 'elasticsearch';
import { composeWithElastic, elasticApiFieldConfig } from '../../src'; // from 'graphql-compose-elasticsearch';
const { GraphQLSchema, GraphQLObjectType } = graphql;
export const elasticIndex = 'university';
export const elasticType = 'university';
export const elasticClient = new elasticsearch.Client({
host: 'http://localhost:9200',
apiVersion: '5.0',
// log: 'trace',
});
export const elasticMapping = {
properties: {
title: { type: 'text' },
title_suggest: {
type: 'completion',
analyzer: 'simple',
preserve_separators: true,
preserve_position_increments: true,
max_input_length: 50,
},
},
};
export const UniversityEsTC = composeWithElastic({
graphqlTypeName: 'UniversityEsTC',
elasticIndex,
elasticType,
elasticMapping,
elasticClient,
});
const schema = new GraphQLSchema({
query: new GraphQLObjectType({
name: 'Query',
fields: {
search: UniversityEsTC.getResolver('search').getFieldConfig(),
searchConnection: UniversityEsTC.getResolver('searchConnection').getFieldConfig(),
suggest: UniversityEsTC.getResolver('suggest').getFieldConfig(),
insertSuggest: UniversityEsTC.getResolver('insertSuggest').getFieldConfig(),
elastic: elasticApiFieldConfig({
host: 'http://localhost:9200',
apiVersion: '5.0',
log: 'trace',
}),
},
}),
});
export default schema;
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/TencentOpenAPISwift/TencentOpenAPISwift.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/TencentOpenAPISwift/TencentOpenAPISwift.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import express, { Express, RequestHandler } from 'express'
import { getDatabase } from '../database/helper'
import { setupRoutes } from '../routes/router'
import { Route } from '../routes/types'
import { Setting } from '../types'
export const setupExpress = (): Express => express()
export const setupCallback = (payload: Setting): (() => void) => {
if (payload.enabled) {
return payload.callback
}
return null
}
export const setupRequestHandler = (
setting: Setting,
handler: (payload?: unknown) => RequestHandler | RequestHandler[],
): RequestHandler | RequestHandler[] => {
const { enabled, payload } = setting
if (enabled) {
return handler(payload)
}
return null
}
export const initExpress = (configuration, routes: Route<unknown>[]) => {
const app = setupExpress()
const database = getDatabase()
return app
.use(
configuration.router.prefix,
setupRequestHandler(
{
...configuration.router,
payload: {
database,
routes,
app,
},
},
setupRoutes,
),
)
.listen(configuration.port, setupCallback(configuration.listen))
}
|
<reponame>studiobicker/project-gatsby-amdehaan<gh_stars>0
import React from "react"
import { useStaticQuery, graphql } from "gatsby"
import Img from "gatsby-image"
import styled from "styled-components"
import { color, device } from "../../styles/variables"
import { Heading01, Copy01 } from "../../styles/type"
import Header from "../header"
import Footer from "../footer"
import Button from "../general/button"
const PageWrapper = styled.section`
width: 100%;
height: 100vh;
padding: 0;
color: ${color.dark};
@media ${device.mobile} {
padding: 2rem;
}
`
const Inner = styled.div`
background: ${color.primary};
display: flex;
flex-direction: column;
justify-content: space-between;
min-height: 100%;
padding: 2rem;
`
const ContentWrapper = styled.div`
display: flex;
flex-wrap: wrap;
justify-content: center;
align-items: center;
margin: 0 auto;
@media ${device.tablet} {
max-width: 45rem;
}
@media ${device.desktop} {
max-width: 83rem;
}
`
const CopyWrapper = styled.div`
width: 100%;
margin: 1rem 0;
@media ${device.tablet} {
margin: 2rem 0;
}
@media ${device.desktop} {
flex: 0 1 50%;
max-width: 50%;
order: 0;
}
`
const MediaWrapper = styled.div`
width: 100%;
display: flex;
justify-content: center;
margin: 1rem 0;
@media ${device.tablet} {
margin: 2rem 0;
}
@media ${device.desktop} {
flex: 0 1 50%;
max-width: 50%;
order: 1;
}
`
const Thumbnail = styled.div`
width: 20rem;
@media ${device.tablet} {
width: 30rem;
}
`
const Intro = () => {
const data = useStaticQuery(graphql`
query {
placeholderImage: file(name: { eq: "hero" }) {
childImageSharp {
fluid(maxWidth: 500) {
...GatsbyImageSharpFluid_withWebp_tracedSVG
}
}
}
}
`)
return (
<>
<PageWrapper>
<Inner>
<Header />
<ContentWrapper>
<MediaWrapper>
<Thumbnail>
<Img fluid={data.placeholderImage.childImageSharp.fluid} />
</Thumbnail>
</MediaWrapper>
<CopyWrapper>
<Heading01>
Front-end developer
<br />& UX enthousiast
</Heading01>
<Copy01>
specialized in building <span>high-quality</span> websites and
applications.
</Copy01>
<Button
to="/contact/"
state={{
modal: true,
}}
label="Get in touch"
/>
</CopyWrapper>
</ContentWrapper>
<Footer />
</Inner>
</PageWrapper>
</>
)
}
export default Intro
|
def perform_graphblas_operations(X, W, biases, threshold):
# Implement the missing parts to perform the operations described in the code snippet
matmul_result = irb_inner.graphblas.matrix_multiply(X, W)
add_bias_result = irb_inner.graphblas.matrix_apply(
matmul_result, biases, "plus_plus"
)
clamp_result = irb_inner.graphblas.matrix_apply(
add_bias_result, "min", threshold
)
relu_result = irb_inner.graphblas.matrix_select(clamp_result, "gt0")
return relu_result |
<?php
//Function to generate a random alphanumeric string of length 8
function generate_random_string() {
$characters = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
$charactersLength = strlen($characters);
$randomString = '';
for ($i = 0; $i < 8; $i++) {
$randomString .= $characters[rand(0, $charactersLength - 1)];
}
return $randomString;
}
//Example
echo generate_random_string();
// Output: 4XhJ1gcu |
class MinecraftAstroPi(MinecraftShape):
def __init__(self, mc, pos):
super().__init__(mc, pos, visible=False)
def createLEDMatrix(self):
# Implement the creation of the LED matrix using setBlock(s) commands
self.setBlocks(-6, 1, -9, 7, 1, 6, 35, 13, tag="led_matrix")
def createSensors(self):
# Implement the creation of the sensors using setBlock(s) commands
self.setBlocks(-6, 0, 7, 7, 0, 7, 35, 13, tag="sensors")
def createButtons(self):
# Implement the creation of the buttons using setBlock(s) commands
self.setBlocks(-6, 0, -10, 7, 0, -10, 35, 13, tag="buttons")
def displayMessage(self, message):
# Implement the display of the message on the LED matrix using setBlock(s) commands
# Assume that the message is a string of characters to be displayed on the LED matrix
# Each character will be represented by a block on the LED matrix
# Use different block types or colors to represent different characters
# For example, use wool blocks of different colors to display the message
# Calculate the position and color of the blocks to represent the message on the LED matrix
# Use the setBlock(s) commands to display the message on the LED matrix
pass # Placeholder for the implementation of displaying the message |
#!/bin/bash
#
# Set up the environment for python and R
#
#
if [ -e "$HOME/anaconda3" ]
then
echo "Anaconda3 already available at $HOME/anaconda"
else
echo "installing Anaconda3"
if [ uname == "Linux" ]
then
bash /opt/installs/Anaconda3-4.3.0-Linux-x86_64.sh
else
bash /opt/installs/Anaconda3-4.3.0-MacOSX-x86_64.sh
fi
fi
#
# Set up location of conda and add R and python modules
#
if [ -x $HOME/anaconda3/bin/conda ]
then
echo "checking and installing python3.5, R, rpy2, numpy"
$HOME/anaconda3/bin/conda install python=3.5
$HOME/anaconda3/bin/conda config --add channels bioconda
$HOME/anaconda3/bin/conda install bioconductor-affy bioconductor-annotation dbi
$HOME/anaconda3/bin/conda install r-statmod psycopg2 r-argparse
else
echo "unable to locate conda"
exit 1
fi
#
# do these installs through conda
#
# From R -- install the necessary bioconductor packages
#
#R --no-save << EOD
#install.packages("argparse")
#EOD
#install through conda
#source("http://bioconductor.org/biocLite.R")
#biocLite( lib="~/anaconda3/lib/R/lib")
#biocLite('affy', lib="~/anaconda3/lib/R/lib")
#biocLite('gcrma', lib="~/anaconda3/lib/R/lib")
#biocLite('limma', lib="~/anaconda3/lib/R/lib")
#biocLite('statmod', lib="~/anaconda3/lib/R/lib")
|
#!/usr/bin/env bash
# Copyright 2020 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Docs -> file://./upgrade/README.md
# Script entry point.
export GO111MODULE=on
source "$(dirname "$0")/e2e-common.sh"
readonly PROBER_READY_FILE="/tmp/prober-ready"
readonly PROBER_PIPE_FILE="/tmp/prober-signal"
# Overrides
function knative_setup {
install_latest_release || fail_test 'Installing latest release of Knative Eventing failed'
}
function install_test_resources {
# Nothing to install before tests
true
}
function uninstall_test_resources {
# Nothing to uninstall after tests
true
}
initialize $@ --skip-istio-addon
TIMEOUT=${TIMEOUT:-30m}
header "Running preupgrade tests"
go_test_e2e -tags=preupgrade -timeout="${TIMEOUT}" ./test/upgrade || fail_test
header "Starting prober test"
rm -fv ${PROBER_READY_FILE}
go_test_e2e -tags=probe -timeout="${TIMEOUT}" ./test/upgrade --pipefile="${PROBER_PIPE_FILE}" --readyfile="${PROBER_READY_FILE}" &
PROBER_PID=$!
echo "Prober PID is ${PROBER_PID}"
wait_for_file ${PROBER_READY_FILE} || fail_test
header "Performing upgrade to HEAD"
install_head || fail_test 'Installing HEAD version of eventing failed'
install_channel_crds || fail_test 'Installing HEAD channel CRDs failed'
install_mt_broker || fail_test 'Installing HEAD Broker failed'
install_sugar || fail_test 'Installing HEAD Sugar failed'
header "Running postupgrade tests"
go_test_e2e -tags=postupgrade -timeout="${TIMEOUT}" ./test/upgrade || fail_test
header "Performing downgrade to latest release"
install_latest_release || fail_test 'Installing latest release of Knative Eventing failed'
header "Running postdowngrade tests"
go_test_e2e -tags=postdowngrade -timeout="${TIMEOUT}" ./test/upgrade || fail_test
# The prober is blocking on ${PROBER_PIPE_FILE} to know when it should exit.
echo "done" > ${PROBER_PIPE_FILE}
header "Waiting for prober test"
wait ${PROBER_PID} || fail_test "Prober failed"
success
|
#button {
background-color: blue;
}
#button:active {
background-color: red;
} |
bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 5 --topic AtomicMessage |
<filename>schemas/_put.spec.ts
import 'mocha'
import { expect } from 'chai'
import { testing_client, setup_schema_from_files, destroy_schema } from '../lib/utils.spec'
describe('put blog.sql', async () => {
beforeEach(async () => await setup_schema_from_files('./schemas/_functions.sql', './schemas/blog.sql'))
afterEach(async () => await destroy_schema())
it('basic put', async () => {
const client = await testing_client()
const query = `
with _person_rows as (
select $1 :: jsonb as _value
)
update person set
first_name = (_person_rows._value->>'first_name') :: text,
last_name = (_person_rows._value->>'last_name') :: text,
organization_id = (_person_rows._value->>'organization_id') :: int,
preferred_weapons = array(select jsonb_array_elements_text(_person_rows._value->'preferred_weapons')) :: text[]
from _person_rows
where id = (_person_rows._value->>'id') :: int
returning *
`
expect((await client.query(`
select id, first_name, last_name, organization_id, preferred_weapons from person where id = 2
`)).rows).eql([{
id: 2, first_name: "Luke", last_name: "Skywalker", organization_id: 2, preferred_weapons: ['stolen blaster', 'green lightsaber'],
}])
expect((await client.query(query, [{
id: 2, first_name: "Luke", last_name: null, organization_id: 1, preferred_weapons: ['green lightsaber'],
}])).rows.length).eql(1)
expect((await client.query(`
select id, first_name, last_name, organization_id, preferred_weapons from person where id = 2
`)).rows).eql([{
id: 2, first_name: "Luke", last_name: null, organization_id: 1, preferred_weapons: ['green lightsaber'],
}])
await client.end()
})
it('multiple put', async () => {
const client = await testing_client()
const query = `
with _person_rows as (
select "value" as _value
from jsonb_array_elements($1)
)
update person set
first_name = (_person_rows._value->>'first_name') :: text,
last_name = (_person_rows._value->>'last_name') :: text,
organization_id = (_person_rows._value->>'organization_id') :: int,
preferred_weapons = array(select jsonb_array_elements_text(_person_rows._value->'preferred_weapons')) :: text[]
from _person_rows
where id = (_person_rows._value->>'id') :: int
returning *
`
expect((await client.query(`
select id, first_name, last_name, organization_id, preferred_weapons from person where id in (1, 2)
`)).rows).deep.members([{
id: 1, first_name: "Darth", last_name: "Vader", organization_id: 1, preferred_weapons: ['red lightsaber'],
}, {
id: 2, first_name: "Luke", last_name: "Skywalker", organization_id: 2, preferred_weapons: ['stolen blaster', 'green lightsaber'],
}])
expect((await client.query(query, [JSON.stringify([{
id: 1, first_name: "Darth", last_name: "Father", organization_id: null, preferred_weapons: ['red lightsaber', 'bare hands'],
}, {
id: 2, first_name: "Luke", last_name: null, organization_id: 1, preferred_weapons: ['green lightsaber'],
}])])).rows.length).eql(2)
expect((await client.query(`
select id, first_name, last_name, organization_id, preferred_weapons from person where id in (1, 2)
`)).rows).deep.members([{
id: 1, first_name: "Darth", last_name: "Father", organization_id: null, preferred_weapons: ['red lightsaber', 'bare hands'],
}, {
id: 2, first_name: "Luke", last_name: null, organization_id: 1, preferred_weapons: ['green lightsaber'],
}])
await client.end()
})
it('put with association delete', async () => {
const client = await testing_client()
await client.query(`
with
_organization_rows as (
select
_value as _organization,
(_value->>'id') :: int as _organization_id,
(_value->>'name') :: text as "name"
from jsonb_array_elements($1) as _(_value)
),
_person_rows as (
select
*,
_value ? 'id' as __needs_update,
case _value ? 'id' when true then (_value->>'id') :: int else nextval('person_id_seq'::regclass) end as _person_id,
(_value->>'first_name') :: text as first_name
from (select _organization_id, jsonb_array_elements(_organization->'people') as _value from _organization_rows) _
),
_update_organization as (
update organization set
name = _organization_rows.name
from _organization_rows
where id = _organization_rows._organization_id
),
_insert_person as (
insert into person (id, organization_id, first_name)
select _person_id, _organization_id, first_name
from _person_rows
where not _person_rows.__needs_update
),
_update_person as (
update person set
organization_id = _person_rows._organization_id
from _person_rows
where id = _person_rows._person_id and _person_rows.__needs_update
),
_delete_person as (
delete from person where
-- is in the parent
exists (select from _organization_rows where organization_id = _organization_rows._organization_id)
-- but isn't in what we just did
and not exists (select from _person_rows where id = _person_rows._person_id)
)
select true
`, [JSON.stringify([{
id: 1, name: "<NAME>",
people: [{
first_name: "Sheev",
}],
}])])
expect((await client.query(`
select id, name from organization where id = 1
`)).rows).eql([{
id: 1, name: "Galactic Empire",
}])
expect((await client.query('select * from post where person_id = 1')).rows).eql([])
expect((await client.query('select * from post where person_id != 1')).rows.length).eql(5)
expect((await client.query(`
select id, first_name from person where organization_id = 1
`)).rows).eql([{
id: 7, first_name: "Sheev",
}])
expect((await client.query('select * from person where organization_id != 1 or organization_id is null')).rows.length).eql(5)
await client.end()
})
})
|
<gh_stars>0
/*
* Copyright 2012 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.om.core.api.annotation;
import java.util.Map;
/**
* Describes how a collection is to be created from the underlying nodes after
* Object Mapper has resolved the collection base using
* {@link Collection#location()} setting.
*
*
* Note that some of the settings are only applicable for {@link Map}s.
*
* @author <NAME>
*
*/
public enum CollectionMode {
/**
* Construct a collection from the child nodes of the collection base node.
*/
Children,
/**
* Treat the collection base node as a multi-valued property and use the
* property values to build the collection.
*/
MultiValueProperty,
/**
* Creates a collection from all properties found on the base node.
*/
Properties
}
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
git clone https://github.com/kenzok8/openwrt-packages.git package/openwrt-packages
git clone https://github.com/kenzok8/small.git package/small
git clone https://github.com/rufengsuixing/luci-app-adguardhome.git package/luci-app-adguardhome
|
class Node:
def __init__(self, key):
self.left = None
self.right = None
self.val = key
def insert(node, val):
if node is None:
return Node(val)
if node.val > val:
node.left = insert(node.left, val)
else:
node.right = insert(node.right, val)
return node
def min_find(node):
if node is None:
return None
if node.left is None:
return node
return min_find(node.left)
def max_find(node):
if node is None:
return None
if node.right is None:
return node
return max_find(node.right)
def main():
root = None
array = [19, 12, 85, 14, 37, 23]
for i in array:
root = insert(root, i)
print("Minimum element is " + str(min_find(root).val))
print("Maximum element is " + str(max_find(root).val))
if __name__ == '__main__':
main() |
<reponame>git-lt/weui-react
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var Manager = function () {
function Manager(data, count, fixedColumns) {
_classCallCheck(this, Manager);
this.data = data;
this.count = count;
if (fixedColumns) {
this.fixedColumns = fixedColumns;
}
}
_createClass(Manager, [{
key: 'getChildren',
value: function getChildren(value) {
return this.data.filter(function (v) {
return v.parent === value;
});
}
}, {
key: 'getFirstColumn',
value: function getFirstColumn() {
return this.data.filter(function (v) {
return !v.parent || v.parent === 0 || v.parent === '0';
});
}
}, {
key: 'getPure',
value: function getPure(obj) {
return JSON.parse(JSON.stringify(obj));
}
}, {
key: 'getColumns',
value: function getColumns(value) {
var _this = this;
// check is data contains the values
if (value.length > 0) {
var matchCount = this.getPure(this.data).filter(function (item) {
return _this.getPure(value).indexOf(item.value) > -1;
}).length;
if (matchCount < this.getPure(value).length) {
value = [];
}
}
var datas = [];
var max = this.fixedColumns || 8;
for (var i = 0; i < max; i++) {
if (i === 0) {
datas.push(this.getFirstColumn());
} else {
// 没有数据时,取得上一级的第一个
if (!value[i]) {
if (typeof datas[i - 1][0] === 'undefined') {
break;
} else {
var topValue = datas[i - 1][0].value;
datas.push(this.getChildren(topValue));
}
} else {
datas.push(this.getChildren(value[i - 1]));
}
}
}
var list = datas.filter(function (item) {
return item.length > 0;
});
// correct the column
this.count = list.length;
return list;
}
}]);
return Manager;
}();
exports.default = Manager; |
setup() {
load "${BATS_UTILS_PATH}/bats-support/load.bash"
load "${BATS_UTILS_PATH}/bats-assert/load.bash"
cd ./tests/os_command_env
}
@test "LETS_COMMAND_NAME: contains command name" {
run lets print-command-name-from-env
assert_success
assert_line --index 0 "print-command-name-from-env"
}
@test "LETS_COMMAND_ARGS: contains all positional args" {
run lets print-command-args-from-env --foo --bar=x y
assert_success
assert_line --index 0 "--foo --bar=x y"
}
@test "\$@: contains all positional args" {
run lets print-shell-args --foo --bar=x y
assert_success
assert_line --index 0 "--foo --bar=x y"
}
|
<gh_stars>10-100
import React from "react";
import "./edit.scss";
import { Box } from "@material-ui/core";
import EditProfileForm from "../../../components/Forms/EditProfileForm";
import useStoreViewsSelector from "../../../hooks/useStoreViewsSelector";
import { Helmet } from "react-helmet";
import { useIntl } from "react-intl";
const CopyTradersEdit = () => {
const storeViews = useStoreViewsSelector();
const intl = useIntl();
return (
<Box
alignItems="center"
className="copyTradersEditPage"
display="flex"
flexDirection="row"
justifyContent="center"
>
<Helmet>
<title>
{`${storeViews.provider.name} - ${intl.formatMessage({
id: "srv.edit",
})} | ${intl.formatMessage({ id: "product" })}`}
</title>
</Helmet>
{<EditProfileForm provider={storeViews.provider} />}
</Box>
);
};
export default CopyTradersEdit;
|
# Function to help manage traps
# Al Williams - Hackaday, August 2019
#
# Here's what it does:
# 1) Provide a function trappist_trap
# 2) Call trappist_init with or without arguments
#
# If you forget to provide trappist_trap, a stupid one will be provided but you can
# still override it later
# With no arguments to trappist_init, all signals you can catch go through your function with an argument indicating which signal fired
# You can ignore it or reraise it as shown in the default handler (see below)
# Of course, you can't do anything with signals you can't catch (e.g., kill -9)
#
# If you provide a list of signals that start with + or -, you will cause those signals to get the default handler (+) or
# to be ignored totally (-) assuming you are allowed to ignore the signal.
# Example:
# trappist_init +SIGQUIT -SIGHUP -SIGILL
#
# If you use the first argument as = you will ONLY catch the signals you name by themselves or with an = prefix and then
# you can also use the + and - prefix, although since + will be the default, you'll probably only need -
# Example:
# trappist_init = SIGQUIT SIGHUP
# trappist_init = =SIGQUIT =SIGHUP # same as above
# trappist_init = SIGQUIT -SIGHUP
#
# For regularity, you can use @ as the first argument to get the default behavior
# trappist_init @ +SIGQUIT -SIGHUP -SIGILL # same as earlier example without @ or =
#
# Normally, you should define your trappist_trap function and then source this file (. /path/to/trappist.sh)
# However, you can define trappist_trap afterwards
function trappist_init()
{
# define a trappist_trap function if there isn't already one
if ! type trappist_trap >/dev/null
then
# set up default handler: restore default and reraise which is, of course, stupid
eval 'function trappist_trap()
{
echo Default trappist_trap: $1
trap $1
kill -$1 $$
}'
fi
# if no arguments, mode 0, do all
if [ $# -lt 1 ]
then
mode=0
else # mode 1 is @ or, at least, no = sign
mode=1 # assume we will get all or handle exclusions
if [ "$1" == "@" ] # @ +sig1 -sig2 means default action for sig1, no action for sig2, trap call for all others (mode 1)
then
shift
else if [ "$1" == "=" ] # = sig1 sig2 +sig3 -sig4 means sig1/2 call trap, sig3 gets default, sig4 is ignored (mode 2)
then
mode=2 # mode 2 only sets explicit signals
shift
fi
fi
fi
# set up trap calls for everything in mode 0 or 1
if [ $mode -eq 0 -o $mode -eq 1 ]
then
for t in $(trap -l | tr ')' '\n' | tr '\t' ' ' | cut -d ' ' -f 2 | grep ^[A-Z] )
do
trap "trappist_trap $t" $t
done
fi
# for mode 2, only include signals that don't have + or - prefix
# = prefix means set this signal (not really required as no prefix has same effect)
if [ $mode -eq 2 ]
then
for t in $@
do
if [[ ${t:0:1} == "=" ]] # skip = which isn't necessary but allowed
then
t=${t:1}
fi
# ignore signals with +/- prefix for now
if [[ ${t:0:1} != "+" && ${t:0:1} != "-" ]]
then
trap "trappist_trap $t" $t
fi
done
fi
# For mode 1 or 2, we need to work through the +signal -signal items
if [ $mode -eq 1 -o $mode -eq 2 ]
then
for t in $@
do
if [[ ${t:0:1} == "+" ]] # default handler
then
trap ${t:1}
else
if [[ ${t:0:1} == "-" ]] # ignore
then
trap "" ${t:1}
fi
fi
done
fi
}
################
# Everything from here down is just an example
## Really simple trap handler
# function trappist_trap()
# {
# echo Trap: $1
# }
## test harness
#trappist_init
#echo Trap me @ $$!
#while true
#do sleep 5 # Note: most signals won't happen until after sleep returns!
# echo Still going
#done
|
# SETUP DEPENDENCIES
# 1) Install dependencies
sudo apt-get update
sudo apt-get install -y build-essential
# 2) Install Miniconda3
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
sh Miniconda3-latest-Linux-x86_64.sh
export CONDAPATH="$(pwd)/miniconda3"
export PYTHON="$(pwd)/miniconda3/envs/hummingbot/bin/python3"
# INSTALL HUMMINGBOT
# 3) Clone Hummingbot
git clone https://github.com/entwty/hummingbot.git
# 4) Install Hummingbot
export hummingbotPath="$(pwd)/hummingbot" && cd $hummingbotPath && ./install
# 5) Activate environment and compile code
${CONDAPATH}/bin/activate hummingbot && ${PYTHON} setup.py build_ext --inplace
# 6) Start Hummingbot
${PYTHON} bin/hummingbot.py
# 7) Update .bashrc to register `conda`
exec bash |
from datetime import datetime
from kivy.app import App
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.uix.label import Label
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.widget import Widget
import time
#Define the different screens
class Dashboard(Screen):
pass
class ViewAllOrders(Screen):
pass
class OrdersAwaitingPostage(Screen):
pass
class OrdersAwaitingPacking(Screen):
pass
class OrdersAwaitingPicking(Screen):
pass
class ApplicationSettings(Screen):
pass
class WindowManager(ScreenManager):
pass
#Comment
class DynamicPackingRow(Widget):
pass
#Displays the current time
class MyClock(Label):
def __init__(self, **kwargs):
super().__init__(**kwargs)
Clock.schedule_interval(self.update_my_clock, 1 / 10.)
def update_my_clock(self, delta):
self.ids.time.text = str(datetime.today().time())[0:8]
#Displays the current date
class MyCalendar(Label):
def __init__(self, **kwargs):
super().__init__(**kwargs)
Clock.schedule_interval(self.update_my_calendar, 1 / 10.)
def update_my_calendar(self, delta):
self.ids.date.text = str(datetime.today().date())
#Designate the .kv design file
kv = Builder.load_file('app_design.kv')
#
class MyApp(App):
def build(self):
return kv
#Runs the app
if __name__ == '__main__':
MyApp().run()
#Dashboard
#ViewAll Orders
#View Orders Awaiting Postaage
#awaiting packing
#awaiting picking
#refresh order button
#settings page |
<filename>spec/views/attendances/index.html.erb_spec.rb
require 'rails_helper'
RSpec.describe "attendances/index", type: :view do
before(:each) do
assign(:attendances, [
Attendance.create!(
:meeting => nil,
:user => nil,
:special_duties => "Special Duties",
:completed_hours => false
),
Attendance.create!(
:meeting => nil,
:user => nil,
:special_duties => "Special Duties",
:completed_hours => false
)
])
end
it "renders a list of attendances" do
render
assert_select "tr>td", :text => nil.to_s, :count => 2
assert_select "tr>td", :text => nil.to_s, :count => 2
assert_select "tr>td", :text => "Special Duties".to_s, :count => 2
assert_select "tr>td", :text => false.to_s, :count => 2
end
end
|
<reponame>coignetp/jua
package jua.parser;
import jua.ast.Expression;
import jua.ast.Statement;
import jua.ast.StatementWhile;
import jua.token.Keyword;
import jua.token.Token;
public class WhileStatementParser implements StatementParser {
@Override
public Statement parse(Parser parser) throws IllegalParseException {
Token tok = parser.currentToken();
parser.consume(Keyword.WHILE);
Expression condition = parser.parseExpression();
// TODO: maybe reuse the parser's instance ?
BlockStatementParser blockParser = new BlockStatementParser();
Statement consequence = blockParser.parse(parser);
return new StatementWhile(tok, condition, consequence);
}
@Override
public boolean matches(Parser parser) {
return parser.currentToken().isSubtype(Keyword.WHILE);
}
}
|
package com.shop.gulimall.order.dao;
import com.shop.gulimall.order.entity.OrderEntity;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* 订单
*
* @author lvjl
* @email <EMAIL>
* @date 2020-09-10 17:04:43
*/
@Mapper
public interface OrderDao extends BaseMapper<OrderEntity> {
}
|
package dkr
import (
"fmt"
"io"
"os"
"os/exec"
"os/user"
"strings"
)
type SandboxOptions struct {
Out io.Writer
}
func Sandbox(image string, entrypoint []string, args []string, options *SandboxOptions) error {
pwd, err := os.Getwd()
if err != nil {
return err
}
env := []string{}
for _, e := range os.Environ() {
if strings.HasPrefix(e, "HOME=") {
homeSegments := strings.SplitN(e, "=", 2)
e = fmt.Sprintf("HOME=/host%s", homeSegments[1])
}
env = append(env, "-e"+e)
}
user, err := user.Current()
if err != nil {
return err
}
cmdArgs := []string{"run", "--rm", "--user", user.Gid + ":" + user.Uid, "-v=/var/run/docker.sock:/var/run/docker.sock",
fmt.Sprintf("-v=%s:%s", "/", "/host"),
fmt.Sprintf("-w=/host%s", pwd)}
isTty, err := isTty()
if err != nil {
return err
}
if isTty {
cmdArgs = append(cmdArgs, "-it")
}
cmdArgs = append(cmdArgs, env...)
dockerConfig := append(cmdArgs, image)
if entrypoint != nil && len(entrypoint) > 0 {
entrypointArg := fmt.Sprintf(`["%s"]`, strings.Join(entrypoint, `","`))
dockerConfig = append(dockerConfig, "--entrypoint", entrypointArg)
}
c := exec.Command("docker", append(dockerConfig, args...)...)
c.Env = os.Environ()
c.Stdin = os.Stdin
if options != nil && options.Out != nil {
c.Stdout = options.Out
} else {
c.Stdout = os.Stdout
}
c.Stderr = os.Stderr
err = c.Run()
return err
}
func isTty() (bool, error) {
cmd := exec.Command("docker", "run", "-it", "alpine", "echo")
cmd.Stdin = os.Stdin
out, err := cmd.CombinedOutput()
if strings.Contains(string(out), "not a TTY") {
return false, nil
}
return true, err
}
|
import React from 'react'
import { withRouter } from 'react-router'
import Button from '@material-ui/core/Button'
import Card from '@material-ui/core/Card'
import CardActions from '@material-ui/core/CardActions'
import CardContent from '@material-ui/core/CardContent'
import Typography from '@material-ui/core/Typography'
import UpdateIcon from '@material-ui/icons/UpdateTwoTone'
import OpenInBrowserIcon from '@material-ui/icons/OpenInBrowserTwoTone'
import { makeStyles } from '@material-ui/core/styles'
import { useApplicationStateValue } from './context/applicationState'
import { useSearchStateValue } from './context/searchState'
import * as urlHelper from './helpers/url'
const useStyles = makeStyles((theme) => ({
bullet: {
display: 'inline-block',
margin: '0 4px'
},
card: {
marginLeft: theme.spacing(1),
marginRight: theme.spacing(1),
border: '2px solid #ffe0b2'
},
cardActions: {
backgroundColor: '#fff3e0',
color: '#388e3c'
},
libraryName: {
fontWeight: theme.typography.fontWeightBold
}
}))
function LibraryCard (props) {
const [{ services }] = useApplicationStateValue()
const [{ library, service }, dispatchSearch] = useSearchStateValue() //eslint-disable-line
const classes = useStyles()
const changeService = (serviceName) => {
const serviceMatch = services.filter(s => s.Name === serviceName)
if (serviceMatch && serviceMatch.length === 1) {
dispatchSearch({ type: 'SetService', service: serviceMatch[0] })
urlHelper.addService(props.history, serviceMatch[0].systemName)
}
}
const bull = <span className={classes.bullet}> • </span>
const distance = library != null ? Math.round(library.distance / 1609) : null
return (
<div>
{library != null ? (
<Card elevation={0} className={classes.card}>
<CardContent>
<Typography component='h2' variant='h5'>Your nearest library is {distance} mile{distance !== 1 ? 's' : ''}</Typography>
<Typography component='p' variant='h5'><span className={classes.libraryName}>{library.library_name}</span></Typography>
<Typography variant='body2' component='p'>
{[library.address_1, library.address_2, library.address_3, library.postcode].filter(a => a !== '').join(', ')}
{bull}
{library.local_authority}
{library.colocated === 'Yes' && library.colocated_with !== null ? (bull + 'Colocated with ' + library.colocated_with) : ''}
</Typography>
</CardContent>
<CardActions className={classes.cardActions}>
<Button variant='text' size='large' color='secondary' startIcon={<OpenInBrowserIcon />} target='_blank' href={library.url}>Go to website</Button>
{library.local_authority !== service.Name ? <Button variant='text' size='large' color='secondary' startIcon={<UpdateIcon />} onClick={() => changeService(library.local_authority)}>Use this library service</Button> : null}
</CardActions>
</Card>
) : null}
</div>
)
}
export default withRouter(LibraryCard)
|
#!/bin/sh
#
# populate_fiji.sh
#
# This script generates a local Fiji.app with SciView installed.
# -- Functions --
left() { echo "${1%$2*}"; }
right() { echo "${1##*$2}"; }
mid() { right "$(left "$1" "$3")" "$2"; }
die() {
echo "ERROR: $@" 1>&2; exit 1;
}
# Copies the given Maven coordinate to the specified output directory.
install() {
(set -x; mvn dependency:copy -Dartifact="$1" -DoutputDirectory="$2" > /dev/null) ||
die "Install failed"
}
# Copies the given Maven coordinate to the specified output directory, keeping the groupId
installWithGroupId() {
(set -x; mvn dependency:copy -Dartifact="$1" -DoutputDirectory="$2" -Dmdep.prependGroupId=true > /dev/null) ||
die "Install failed"
}
# Deletes the natives JAR with the given artifactId and classifier.
deleteNative() {
(set -x; rm -f $FijiDirectory/jars/$1-[0-9]*-$2.jar $FijiDirectory/jars/*/$1-[0-9]*-$2.jar) ||
die "Delete failed"
}
# Deletes all natives JARs with the given artifactId.
deleteNatives() {
(set -x; rm -f $FijiDirectory/jars/$1-[0-9]*-natives-*.jar $FijiDirectory/jars/*/$1-[0-9]*-natives-*.jar) ||
die "Delete failed"
}
# -- Check if we have a path given, in that case we do not download a new Fiji, but use the path given --
if [ -z "$1" ]
then
echo "--> Installing into pristine Fiji installation"
echo "--> If you want to install into a pre-existing Fiji installation, run as"
echo " $0 path/to/Fiji.app"
# -- Determine correct ImageJ launcher executable --
case "$(uname -s),$(uname -m)" in
Linux,x86_64) launcher=ImageJ-linux64 ;;
Linux,*) launcher=ImageJ-linux32 ;;
Darwin,*) launcher=Contents/MacOS/ImageJ-macosx ;;
MING*,*) launcher=ImageJ-win32.exe ;;
MSYS_NT*,*) launcher=ImageJ-win32.exe ;;
*) die "Unknown platform" ;;
esac
# -- Roll out a fresh Fiji --
if [ ! -f fiji-nojre.zip ]
then
echo
echo "--> Downloading Fiji"
curl -L -O https://downloads.imagej.net/fiji/latest/fiji-nojre.zip ||
die "Could not download Fiji"
fi
echo "--> Unpacking Fiji"
rm -rf Fiji.app
unzip fiji-nojre.zip || die "Could not unpack Fiji"
echo
echo "--> Updating Fiji"
Fiji.app/$launcher --update update-force-pristine
FijiDirectory=Fiji.app
else
echo "--> Installing into Fiji installation at $1"
FijiDirectory=$1
fi
echo
echo "--> Copying dependencies into Fiji installation"
(set -x; mvn -Dscijava.app.directory=$FijiDirectory) ||
die "Failed to copy dependencies into Fiji directory"
echo "--> Removing slf4j bindings"
(set -x; rm -f $FijiDirectory/jars/slf4j-simple-*.jar) ||
die "Failed to remove slf4j bindings"
# -- Put back jar/gluegen-rt and jar/jogl-all --
echo
echo "--> Reinstalling gluegen-rt, jogl-all, jocl, jinput, and ffmpeg"
gluegenJar=$(echo $FijiDirectory/jars/gluegen-rt-main-*.jar)
gluegenVersion=$(mid "$gluegenJar" "-" ".jar")
install "org.jogamp.gluegen:gluegen-rt:$gluegenVersion" $FijiDirectory/jars
joglJar=$(echo $FijiDirectory/jars/jogl-all-main-*.jar)
joglVersion=$(mid "$joglJar" "-" ".jar")
install "org.jogamp.jogl:jogl-all:$joglVersion" $FijiDirectory/jars
joclGAV=$(mvn dependency:tree | grep jocl | awk -e '{print $NF}' | cut -d: -f1-4 | sed 's/:jar//g')
installWithGroupId "$joclGAV" $FijiDirectory/jars
jinputGAV=$(mvn dependency:tree | grep jinput | head -n1 | awk -e '{print $NF}' | cut -d: -f1-4 | sed 's/:jar//g' | sed 's/:compile//g')
install "$jinputGAV" $FijiDirectory/jars
installWithGroupId "$jinputGAV:jar:natives-all" $FijiDirectory/jars/win64
installWithGroupId "$jinputGAV:jar:natives-all" $FijiDirectory/jars/linux64
installWithGroupId "$jinputGAV:jar:natives-all" $FijiDirectory/jars/macosx
echo "--> Removing jinput natives from JAR root"
(set -x; rm -f $FijiDirectory/jars/jinput-*-natives-all.jar)
ffmpegGAV=$(mvn dependency:tree | grep 'ffmpeg:jar' | head -n1 | awk -e '{print $NF}' | cut -d: -f1-4 | sed 's/:jar//g' | sed 's/:compile//g')
installWithGroupId "$ffmpegGAV" $FijiDirectory/jars
installWithGroupId "$ffmpegGAV:jar:windows-x86_64" $FijiDirectory/jars/win64
installWithGroupId "$ffmpegGAV:jar:linux-x86_64" $FijiDirectory/jars/linux64
installWithGroupId "$ffmpegGAV:jar:macosx-x86_64" $FijiDirectory/jars/macosx
# -- Get the latest imagej-launcher -- [CHECK IF THIS CAN BE REMOVED]
wget "https://maven.scijava.org/service/local/repositories/releases/content/net/imagej/imagej-launcher/5.0.2/imagej-launcher-5.0.2-linux64.exe" -O $FijiDirectory/ImageJ-linux64 ||
die "Could not get linux64 launcher"
chmod +x $FijiDirectory/ImageJ-linux64
wget "https://maven.scijava.org/service/local/repositories/releases/content/net/imagej/imagej-launcher/5.0.2/imagej-launcher-5.0.2-macosx.exe" -O $FijiDirectory/Contents/MacOS/ImageJ-macosx ||
die "Could not get macOS launcher"
chmod +x $FijiDirectory/Contents/MacOS/ImageJ-macosx
wget "https://maven.scijava.org/service/local/repositories/releases/content/net/imagej/imagej-launcher/5.0.2/imagej-launcher-5.0.2-win32.exe" -O $FijiDirectory/ImageJ-win32 ||
die "Could not get Win32 launcher"
chmod +x $FijiDirectory/ImageJ-win32
wget "https://maven.scijava.org/service/local/repositories/releases/content/net/imagej/imagej-launcher/5.0.2/imagej-launcher-5.0.2-win64.exe" -O $FijiDirectory/ImageJ-win64 ||
die "Could not get Win64 launcher"
chmod +x $FijiDirectory/ImageJ-win64
# -- Fix old miglayout
rm $FijiDirectory/jars/miglayout-3.7.4-swing.jar
install "com.miglayout:miglayout-swing:5.2" $FijiDirectory/jars
# -- Get the list of native libraries --
# [NB] dependency:list emits G:A:P:C:V but dependency:copy needs G:A:V:P:C.
echo
echo "--> Extracting list of native dependencies"
natives=$(mvn -B dependency:list |
grep natives |
sed -e 's/^\[INFO\] *\([^:]*\):\([^:]*\):\([^:]*\):\([^:]*\):\([^:]*\):.*/\1:\2:\5:\3:\4/' |
grep -v -- '-\(android\|armv6\|solaris\)' |
sort)
for gavpc in $natives
do
gavp=$(left "$gavpc" ':')
gav=$(left "$gavp" ':')
ga=$(left "$gav" ':')
g=$(left "$ga" ':')
a=$(right "$ga" ':')
v=$(right "$gav" ':')
p=$(right "$gavp" ':')
c=$(right "$gavpc" ':')
echo
echo "[$a-$v-$c]"
case "$g" in
org.lwjgl|graphics.scenery)
deleteNatives "$a"
# [NB] Install all architectures manually; only one is a dependency.
install "$gavp:natives-windows" $FijiDirectory/jars/win64
install "$gavp:natives-macos" $FijiDirectory/jars/macosx
install "$gavp:natives-linux" $FijiDirectory/jars/linux64
;;
*)
deleteNative "$a" "$c"
case "$c" in
natives-win*-i586) continue ;;
natives-win*) platform=win64 ;;
natives-linux*-i586) continue ;;
natives-linux*) platform=linux64 ;;
natives-osx|natives-mac*) platform=macosx ;;
natives-all*) continue ;;
*) die "Unsupported platform: $c" ;;
esac
install "$gavpc" "$FijiDirectory/jars/$platform"
;;
esac
done
|
rm -rf bin corehunter-lib/target corehunter-cli/target coreanalyser-cli/target
|
<reponame>1024pix/pix-ui
import { action } from '@ember/object';
import { tracked } from '@glimmer/tracking';
import PixButtonBase from './pix-button-base';
export default class PixButton extends PixButtonBase {
text = 'pix-button';
defaultModel = [];
@tracked isTriggering = false;
get isLoading() {
return this.args.isLoading || this.isTriggering;
}
get type() {
return this.args.type || 'button';
}
get loadingColor() {
return this.args.loadingColor || this.args['loading-color'] || 'white';
}
get isButtonLoadingOrDisabled() {
return this.isLoading || this.args.isDisabled;
}
get ariaDisabled() {
return this.isButtonLoadingOrDisabled;
}
get className() {
return super.baseClassNames.join(' ');
}
get enableTriggerAction() {
return !(this.type === 'submit' && !this.args.triggerAction);
}
@action
async triggerAction(params) {
try {
this.isTriggering = true;
await this.args.triggerAction(params);
this.isTriggering = false;
} catch (e) {
this.isTriggering = false;
if (!this.args.triggerAction) {
throw new Error('@triggerAction params is required for PixButton !');
}
throw new Error(e);
}
}
}
|
<filename>RRTS/HttpUnit/httpunit-1.7/src/com/meterware/servletunit/DispatchedRequestWrapper.java
package com.meterware.servletunit;
/********************************************************************************************************************
* $Id: DispatchedRequestWrapper.java 475 2003-02-21 15:44:00Z russgold $
*
* Copyright (c) 2003, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*******************************************************************************************************************/
import java.util.Enumeration;
import java.util.Map;
import java.util.Hashtable;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.RequestDispatcher;
/**
* This class represents a request dispatched via a RequestDispatcherImpl.
**/
class DispatchedRequestWrapper extends HttpServletRequestWrapper {
/** Request-specific information, including parameters and paths. **/
private RequestContext _requestContext;
/** The request being wrapped. **/
private HttpServletRequest _baseRequest;
static HttpServletRequest createIncludeRequestWrapper( HttpServletRequest request, RequestDispatcher dispatcher ) {
return new IncludeRequestWrapper( request, dispatcher );
}
static HttpServletRequest createForwardRequestWrapper( HttpServletRequest request, RequestDispatcher dispatcher ) {
return new ForwardRequestWrapper( request, dispatcher );
}
DispatchedRequestWrapper( HttpServletRequest baseRequest, RequestDispatcher dispatcher ) {
super( baseRequest );
_baseRequest = baseRequest;
_requestContext = (RequestContext) dispatcher;
_requestContext.setParentRequest( baseRequest );
}
HttpServletRequest getBaseRequest() {
return _baseRequest;
}
public String getParameter( String s ) {
return _requestContext.getParameter( s );
}
public Enumeration getParameterNames() {
return _requestContext.getParameterNames();
}
public String[] getParameterValues( String s ) {
return _requestContext.getParameterValues( s );
}
public Map getParameterMap() {
return _requestContext.getParameterMap();
}
}
class IncludeRequestWrapper extends DispatchedRequestWrapper {
final static String REQUEST_URI = "javax.servlet.include.request_uri";
final static String CONTEXT_PATH = "javax.servlet.include.context_path";
final static String SERVLET_PATH = "javax.servlet.include.servlet_path";
final static String PATH_INFO = "javax.servlet.include.path_info";
final static String QUERY_STRING = "javax.servlet.include.query_string";
private Hashtable _attributes = new Hashtable();
IncludeRequestWrapper( HttpServletRequest request, RequestDispatcher dispatcher ) {
super( request, dispatcher );
_attributes.put( REQUEST_URI, ((RequestDispatcherImpl) dispatcher ).getRequestURI() );
_attributes.put( CONTEXT_PATH, request.getContextPath() );
_attributes.put( SERVLET_PATH, ((RequestDispatcherImpl) dispatcher ).getServletMetaData().getServletPath() );
final String pathInfo = ((RequestDispatcherImpl) dispatcher ).getServletMetaData().getPathInfo();
if (pathInfo != null) _attributes.put( PATH_INFO, pathInfo );
}
public Object getAttribute( String s ) {
Object result = _attributes.get( s );
return (result != null) ? result : super.getAttribute( s );
}
}
class ForwardRequestWrapper extends DispatchedRequestWrapper {
private RequestDispatcherImpl _requestContext;
ForwardRequestWrapper( HttpServletRequest request, RequestDispatcher dispatcher ) {
super( request, dispatcher );
_requestContext = (RequestDispatcherImpl) dispatcher;
}
public String getRequestURI() {
return _requestContext.getRequestURI();
}
public String getQueryString() {
return super.getQueryString();
}
public String getServletPath() {
return _requestContext.getServletMetaData().getServletPath();
}
public String getPathInfo() {
return _requestContext.getServletMetaData().getPathInfo();
}
}
|
<gh_stars>0
import React from 'react';
import { connect } from 'react-redux';
import { PieChart, Pie, Sector } from 'recharts';
import '../../stylesheets/PieChart.css';
const renderActiveShape = (props) => {
const RADIAN = Math.PI / 180;
const {
cx,
cy,
midAngle,
innerRadius,
outerRadius,
startAngle,
endAngle,
fill,
payload,
percent,
value
} = props;
const sin = Math.sin(-RADIAN * midAngle);
const cos = Math.cos(-RADIAN * midAngle);
const sx = cx + (outerRadius + 10) * cos;
const sy = cy + (outerRadius + 10) * sin;
const mx = cx + (outerRadius + 30) * cos;
const my = cy + (outerRadius + 30) * sin;
const ex = mx + (cos >= 0 ? 1 : -1) * 22;
const ey = my;
const textAnchor = cos >= 0 ? 'start' : 'end';
return (
<g>
<text x={cx} y={cy} dy={8} textAnchor='middle' fill={fill}>
{payload.name}
</text>
<Sector
cx={cx}
cy={cy}
innerRadius={innerRadius}
outerRadius={outerRadius}
startAngle={startAngle}
endAngle={endAngle}
fill={fill}
/>
<Sector
cx={cx}
cy={cy}
startAngle={startAngle}
endAngle={endAngle}
innerRadius={outerRadius + 6}
outerRadius={outerRadius + 10}
fill={fill}
/>
<path
d={`M${sx},${sy}L${mx},${my}L${ex},${ey}`}
stroke={fill}
fill='none'
/>
<circle cx={ex} cy={ey} r={2} fill={fill} stroke='none' />
<text
x={ex + (cos >= 0 ? 1 : -1) * 12}
y={ey}
textAnchor={textAnchor}
fill='#333'
>{`${value} min`}</text>
<text
x={ex + (cos >= 0 ? 1 : -1) * 12}
y={ey}
dy={18}
textAnchor={textAnchor}
fill='#999'
>
{`(${(percent * 100).toFixed(2)}%)`}
</text>
</g>
);
};
class TwoLevelPieChart extends React.Component {
state = {
activeIndex: 0
};
onPieEnter = (data, index) => {
this.setState({
activeIndex: index
});
};
render() {
let data;
if (this.props.sleep.summary) {
data = [
{
name: 'Deep',
value: this.props.sleep.summary.stages
? this.props.sleep.summary.stages.deep
: null
},
{
name: 'Light',
value: this.props.sleep.summary.stages
? this.props.sleep.summary.stages.light
: null
},
{
name: 'Rem',
value: this.props.sleep.summary.stages
? this.props.sleep.summary.stages.rem
: null
},
{
name: 'Wake',
value: this.props.sleep.summary.stages
? this.props.sleep.summary.stages.wake
: null
}
];
}
return (
<div className='pieChart'>
<PieChart width={500} height={350}>
<Pie
activeIndex={this.state.activeIndex}
activeShape={renderActiveShape}
data={data}
cx={300}
cy={200}
innerRadius={60}
outerRadius={80}
fill='rgba(255,163,0)'
onMouseEnter={this.onPieEnter}
/>
</PieChart>
</div>
);
}
}
const mapStateToProps = (state) => {
return {
sleep: state.user.sleep
};
};
export default connect(mapStateToProps)(TwoLevelPieChart); |
set -x
if [ "${TRAVIS_OS_NAME}" = "osx" ] || [ "${PLATFORM}" = "mac" ]; then
target=apple-darwin
elif [ "${TRAVIS_OS_NAME}" = "linux" ] || [ "${PLATFORM}" = "linux" ]; then
target=unknown-linux-gnu
elif [ "${OS}" = "Windows_NT" ] || [ "${PLATFORM}" = "win" ]; then
target=pc-mingw32
windows=1
fi
if [ "${TRAVIS}" = "true" ] && [ "${target}" = "unknown-linux-gnu" ]; then
# Install a 32-bit compiler for linux
sudo apt-get update
sudo apt-get install gcc-multilib lib32stdc++6
fi
# Install both 64 and 32 bit libraries. Apparently travis barfs if you try to
# just install the right ones? This should enable cross compilation in the
# future anyway.
if [ -z "${windows}" ]; then
curl -O http://static.rust-lang.org/dist/rust-nightly-i686-$target.tar.gz
tar xfz rust-nightly-i686-$target.tar.gz
curl -O http://static.rust-lang.org/dist/rust-nightly-x86_64-$target.tar.gz
tar xfz rust-nightly-x86_64-$target.tar.gz
if [ "${BITS}" = "32" ]; then
src=x86_64
dst=i686
else
src=i686
dst=x86_64
fi
cp -r rust-nightly-$src-$target/lib/rustlib/$src-$target \
rust-nightly-$dst-$target/lib/rustlib
(cd rust-nightly-$dst-$target && \
find lib/rustlib/$src-$target/lib -type f >> \
lib/rustlib/manifest.in)
./rust-nightly-$dst-$target/install.sh --prefix=rustc
rm -rf rust-nightly-$src-$target
rm -rf rust-nightly-$dst-$target
else
rm -rf *.exe rustc
curl -O http://static.rust-lang.org/dist/rust-nightly-install.exe
innounp -y -x rust-nightly-install.exe
mv '{app}' rustc
fi
set +x
|
<gh_stars>1-10
// Copyright (c) 2016 <NAME>
// Licensed under MIT, see LICENSE file.
package hap.ruleengine.parts.output;
import hap.ruleengine.parts.ConnectionPoint;
import hap.ruleengine.parts.IComponent;
import hap.ruleengine.parts.IValueChangeReceiver;
import hap.ruleengine.parts.data.CompositeDef;
import hap.ruleengine.parts.data.WireDef;
import hap.ruleengine.parts.input.Input;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public abstract class Output<T> extends ConnectionPoint implements IOutput
{
private T myValue;
private List<Input<T>> myRemote = new ArrayList<>();
private int myCallCount = 0;
Output( String name, UUID id, IComponent parent, T defaultValue, boolean isVisibleWhenParentIsVisualized )
{
super( name, id, parent, isVisibleWhenParentIsVisualized );
myValue = defaultValue;
}
public void set( T value )
{
// Prevent recursive call-chains
// Only allow update if value has changed, or if it is the first time it is set.
if( myCallCount == 0 )
{
try
{
++ myCallCount;
myValue = value;
myRemote.forEach( o -> o.set( value ) );
notifyValueSubscribers();
}
catch( Exception ignored )
{
// The remote component might leak exceptions so we protect our selves from them,
// but we can't do anything about them.
}
finally
{
-- myCallCount;
}
}
}
@Override
public void notifyValueSubscribers()
{
for( IValueChangeReceiver rec : valueChangeReceivers )
{
rec.newValue( getValue() == null ? "" : getValue().toString() );
}
}
public T getValue()
{
return myValue;
}
public boolean connect( Input<T> remote )
{
boolean res = ! remote.isConnected();
if( res )
{
myRemote.add( remote );
remote.markConnected();
// Transfer current value
set( getValue() );
}
return res;
}
public void disconnect( Input<T> remote )
{
myRemote.remove( remote );
remote.markDisconnected();
}
@Override
public void disconnectAll()
{
for( Input<T> input : myRemote )
{
input.markDisconnected();
}
myRemote.clear();
}
@Override
public void store( CompositeDef data )
{
for( Input<T> remote : myRemote )
{
WireDef wire = new WireDef();
wire.setSourceComponent( getOwningComponentId().toString() );
wire.setSourceOutput( getId().toString() );
wire.setTargetComponent( remote.getOwningComponentId().toString() );
wire.setTargetInput( remote.getId().toString() );
wire.setType( this.getClass().getSimpleName().replace( "Output", "Wire" ) );
data.getWires().getWireDef().add( wire );
}
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.common;
import com.facebook.drift.annotations.ThriftConstructor;
import com.facebook.drift.annotations.ThriftField;
import com.facebook.drift.annotations.ThriftStruct;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import javax.annotation.concurrent.Immutable;
import java.util.Objects;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
@Immutable
@ThriftStruct
public class QualifiedObjectName
{
private final String catalogName;
private final String schemaName;
private final String objectName;
@JsonCreator
public static QualifiedObjectName valueOf(String name)
{
if (name == null) {
throw new NullPointerException("name is null");
}
String[] parts = name.split("\\.");
if (parts.length != 3) {
throw new IllegalArgumentException("QualifiedObjectName should have exactly 3 parts");
}
return new QualifiedObjectName(parts[0], parts[1], parts[2]);
}
public static QualifiedObjectName valueOf(CatalogSchemaName catalogSchemaName, String objectName)
{
return new QualifiedObjectName(catalogSchemaName.getCatalogName(), catalogSchemaName.getSchemaName(), objectName.toLowerCase(ENGLISH));
}
public static QualifiedObjectName valueOf(String catalogName, String schemaName, String objectName)
{
return new QualifiedObjectName(catalogName, schemaName, objectName.toLowerCase(ENGLISH));
}
@ThriftConstructor
public QualifiedObjectName(String catalogName, String schemaName, String objectName)
{
checkLowerCase(catalogName, "catalogName");
checkLowerCase(schemaName, "schemaName");
checkLowerCase(objectName, "objectName");
this.catalogName = catalogName;
this.schemaName = schemaName;
this.objectName = objectName;
}
public CatalogSchemaName getCatalogSchemaName()
{
return new CatalogSchemaName(catalogName, schemaName);
}
@ThriftField(1)
public String getCatalogName()
{
return catalogName;
}
@ThriftField(2)
public String getSchemaName()
{
return schemaName;
}
@ThriftField(3)
public String getObjectName()
{
return objectName;
}
@Override
public boolean equals(Object obj)
{
if (obj == this) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
QualifiedObjectName o = (QualifiedObjectName) obj;
return Objects.equals(catalogName, o.catalogName) &&
Objects.equals(schemaName, o.schemaName) &&
Objects.equals(objectName, o.objectName);
}
@Override
public int hashCode()
{
return Objects.hash(catalogName, schemaName, objectName);
}
@JsonValue
@Override
public String toString()
{
return catalogName + '.' + schemaName + '.' + objectName;
}
private static void checkLowerCase(String value, String name)
{
requireNonNull(value, format("%s is null", name));
if (!value.equals(value.toLowerCase(ENGLISH))) {
throw new IllegalArgumentException(format("%s is not lowercase: %s", name, value));
}
}
}
|
import time
import urllib.request
import urllib.error
import urllib.parse
import logging
import json
from arbitrage import config
from arbitrage.fiatconverter import FiatConverter
from arbitrage.utils import log_exception
class Market(object):
def __init__(self, currency, cryptowatch_code=None):
self.name = self.__class__.__name__
self.currency = currency
self.cryptowatch_code = cryptowatch_code
self.cryptowatch_price = 0
self.depth_updated = 0
self.update_rate = 60
self.fc = FiatConverter()
self.fc.update()
def get_cryptowatch_price(self):
if not self.cryptowatch_code:
return
url = ('https://api.cryptowat.ch/markets/' +
self.name.lower().replace('usd', '').replace('eur', '') +
'/' + self.cryptowatch_code + '/price')
res = urllib.request.urlopen(url)
jsonstr = res.read().decode('utf8')
try:
price = json.loads(jsonstr)
except Exception:
logging.error("Cryptowatch %s - Can't parse json: %s" % (self.name, jsonstr))
self.cryptowatch_price = self.fc.convert(price["result"]["price"], self.currency , "USD")
def double_ckeck_price(self, price, direction, allowed_percent=None):
if self.cryptowatch_price == 0:
self.get_cryptowatch_price()
allowed_percent = allowed_percent if allowed_percent else 10
if abs(price - self.cryptowatch_price) > self.cryptowatch_price * allowed_percent / 100:
logging.error("Big diff. @%s depth price(%s) vs Cryptowatch price %f / %f" %
(self.name, direction, price, self.cryptowatch_price))
return False
return True
def get_depth(self):
timediff = time.time() - self.depth_updated
if timediff > self.update_rate:
self.ask_update_depth()
timediff = time.time() - self.depth_updated
if timediff > config.market_expiration_time:
logging.warning('Market: %s order book is expired' % self.name)
self.depth = {'asks': [{'price': 0, 'amount': 0}], 'bids': [
{'price': 0, 'amount': 0}]}
return self.depth
def convert_to_usd(self):
if self.currency == "USD":
return
for direction in ("asks", "bids"):
for order in self.depth[direction]:
# self.double_ckeck_price(order["price"], direction, order)
# we don't do it here any more
order["price"] = self.fc.convert(order["price"], self.currency, "USD")
# there are some prices inside the market depth that are ment to de destabilize the market
# clear them out
def sort_out_market_crush_prices(self):
new_depth = {'asks': [], 'bids': []}
for direction in ("asks", "bids"):
for order in self.depth[direction]:
if self.double_ckeck_price(order["price"], direction, 30):
new_depth[direction].append(order)
if len(new_depth["ask"]) != len(self.depth["ask"]) or len(new_depth["bids"]) != len(self.depth["bids"]):
logging.warning('Market: %s removed some market crush crush items' % self.name)
self.depth = new_depth
def ask_update_depth(self):
try:
self.update_depth()
self.convert_to_usd()
self.get_cryptowatch_price()
self.depth_updated = time.time()
except (urllib.error.HTTPError, urllib.error.URLError) as e:
logging.error("HTTPError, can't update market: %s" % self.name)
log_exception(logging.DEBUG)
except Exception as e:
logging.error("Can't update market: %s - %s" % (self.name, str(e)))
log_exception(logging.DEBUG)
def get_ticker(self):
depth = self.get_depth()
res = {'ask': 0, 'bid': 0}
if len(depth['asks']) > 0 and len(depth["bids"]) > 0:
res = {'ask': depth['asks'][0],
'bid': depth['bids'][0]}
return res
## Abstract methods
def update_depth(self):
pass
def buy(self, price, amount):
pass
def sell(self, price, amount):
pass
|
/* Copyright 2007-2015 QReal Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include "behaviourPage.h"
#include "ui_behaviourPage.h"
#include <QtCore/QDir>
#include <qrkernel/settingsManager.h>
#include <qrkernel/platformInfo.h>
using namespace qReal;
PreferencesBehaviourPage::PreferencesBehaviourPage(QWidget *parent)
: PreferencesPage(parent)
, mUi(new Ui::PreferencesBehaviourPage)
{
mUi->setupUi(this);
setObjectName("preferencesBehaviourPage");
setWindowIcon(QIcon(":/preferencesDialog/images/behaviour.png"));
initLanguages();
connect(mUi->autoSaveCheckBox, SIGNAL(clicked(bool)), this, SLOT(showAutoSaveBox(bool)));
connect(mUi->gesturesCheckBox, SIGNAL(toggled(bool)), SLOT(updateGesturesSettings(bool)));
restoreSettings();
}
PreferencesBehaviourPage::~PreferencesBehaviourPage()
{
delete mUi;
}
void PreferencesBehaviourPage::changeEvent(QEvent *e)
{
switch (e->type()) {
case QEvent::LanguageChange:
mUi->retranslateUi(this);
break;
default:
break;
}
}
void PreferencesBehaviourPage::save()
{
const QString language = mUi->languageComboBox->itemData(mUi->languageComboBox->currentIndex()).toString();
SettingsManager::setValue("systemLocale", language);
if (mOldLanguage != language) {
setRestartFlag();
}
SettingsManager::setValue("PaletteTabSwitching", mUi->paletteTabCheckBox->isChecked());
SettingsManager::setValue("Autosave", mUi->autoSaveCheckBox->isChecked());
SettingsManager::setValue("AutosaveInterval", mUi->autoSaveSpinBox->value());
SettingsManager::setValue("gesturesEnabled", mUi->gesturesCheckBox->isChecked());
SettingsManager::setValue("gestureDelay", mUi->gestureDelaySpinBox->value());
SettingsManager::setValue("touchMode", mUi->touchModeCheckBox->isChecked());
SettingsManager::setValue("dockableWidgets", mUi->dockableModeCheckBox->isChecked());
}
void PreferencesBehaviourPage::restoreSettings()
{
const QString locale = SettingsManager::value("systemLocale").toString();
mOldLanguage = locale;
for (int index = 0; index < mUi->languageComboBox->count(); ++index) {
if (locale == mUi->languageComboBox->itemData(index).toString()) {
mUi->languageComboBox->setCurrentIndex(index);
}
}
bool gesturesEnabled = SettingsManager::value("gesturesEnabled").toBool();
mUi->paletteTabCheckBox->setChecked(SettingsManager::value("PaletteTabSwitching").toBool());
mUi->autoSaveCheckBox->setChecked(SettingsManager::value("Autosave").toBool());
mUi->autoSaveSpinBox->setValue(SettingsManager::value("AutosaveInterval").toInt());
mUi->gestureDelaySpinBox->setValue(SettingsManager::value("gestureDelay").toInt());
mUi->touchModeCheckBox->setChecked(SettingsManager::value("touchMode").toBool());
mUi->dockableModeCheckBox->setChecked(SettingsManager::value("dockableWidgets").toBool());
mUi->gesturesCheckBox->setChecked(gesturesEnabled);
mUi->gestureDelaySpinBox->setVisible(gesturesEnabled);
mUi->gestureDelayLabel->setVisible(gesturesEnabled);
mUi->gestureDelayTimeUnitLabel->setVisible(gesturesEnabled);
showAutoSaveBox(mUi->autoSaveCheckBox->isChecked());
const int editorsLoadedCount = SettingsManager::value("EditorsLoadedCount").toInt();
mUi->paletteTabCheckBox->setVisible(editorsLoadedCount != 1);
}
void PreferencesBehaviourPage::showAutoSaveBox(bool show)
{
mUi->autoSaveSpinBox->setVisible(show);
mUi->autoSaveLabel->setVisible(show);
}
void PreferencesBehaviourPage::updateGesturesSettings(bool gesturesEnabled)
{
mUi->gestureDelaySpinBox->setVisible(gesturesEnabled);
mUi->gestureDelayLabel->setVisible(gesturesEnabled);
mUi->gestureDelayTimeUnitLabel->setVisible(gesturesEnabled);
}
void PreferencesBehaviourPage::initLanguages()
{
mUi->languageComboBox->addItem(tr("<System Language>"));
mUi->languageComboBox->addItem("English", "en");
QDir translationsDir(PlatformInfo::invariantSettingsPath("pathToTranslations"));
for (const QString &locale: translationsDir.entryList(QDir::Dirs)) {
const QString language = QLocale(locale).nativeLanguageName();
if (!language.isEmpty()) {
const QString capitalizedLanguage = language[0].toUpper() + language.mid(1);
mUi->languageComboBox->addItem(capitalizedLanguage, locale);
}
}
}
|
<gh_stars>1-10
// components/hot-list/index.js
Component({
/**
* 组件的属性列表
*/
properties: {
hotlist:Object
},
/**
* 组件的初始数据
*/
data: {
},
/**
* 组件的方法列表
*/
methods: {
ToThemeE(event){
wx.navigateTo({
url: `/pages/theme/index?tName=t-5`
})
},
GotoDetail(event){
const pid = event.currentTarget.dataset.pid
wx.navigateTo({
url: `/pages/detail/index?pid=${pid}`
})
}
}
})
|
<filename>wenqi_scripts/vector_distribution.py<gh_stars>0
import time
import sys
import numpy as np
from multiprocessing.dummy import Pool as ThreadPool
dim = 384
# number of vectors used for statistics
nb = 10 * int(1e3) * int(1e3)
# number of files to load
num_files = 60
pos_count_dim = np.zeros(dim)
neg_count_dim = np.zeros(dim)
vec_count = 0
for n in range(num_files):
vec = np.fromfile('../data/computed_embeddings/realnewslike/c4-train.00{}-of-00512.data'.format(str(n).zfill(3)), dtype='float32')
vec = vec.reshape(-1, dim)
nvec = vec.shape[0]
print('\r %d th file, %d th accumulated vector' % (n, vec_count), end=' ')
sys.stdout.flush()
if vec_count + nvec <= nb:
nscan = nvec
else:
nscan = nb - vec_count
for i in range(nscan):
for d in range(dim):
if vec[i][d] > 0:
pos_count_dim[d] += 1
elif vec[i][d] < 0:
neg_count_dim[d] += 1
vec_count += nscan
if vec_count >= nb:
break
if vec_count < nb: # files are not enough to provide such number of vecs
nb = vec_count
print("\nStatistics from {} vectors".format(nb))
unbalanced_count = 0
diff_perc = [[i, 0] for i in range(dim)] # dim ID & unbalance perc
diff_threshold = 5 # 5% absolute difference
pos_perc = np.zeros(dim)
neg_perc = np.zeros(dim)
for d in range(dim):
pos_perc[d] = pos_count_dim[d] / nb * 100
neg_perc[d] = neg_count_dim[d] / nb * 100
print("dim = {}\t Pos: {:.2f} %% Neg: {:.2f} %%".format(d, pos_perc[d], neg_perc[d]))
diff_perc[d][1] = np.absolute(pos_perc[d] - neg_perc[d])
if diff_perc[d][1] >= diff_threshold:
print(" The distribution of the {} th dimension looks unbalanced! diff = {:.2f} %".format(d, diff_perc[d][1]))
unbalanced_count += 1
diff_perc_sorted = sorted(diff_perc, key=lambda x:x[1], reverse=True)
print("\n ===== Dimension sorted by unbalance =====\n")
for d in range(dim):
dim_ID = diff_perc_sorted[d][0]
diff = diff_perc_sorted[d][1]
print("dim = {}\t Pos = {:.2f} %%\tNeg = {:.2f} %%\tDiff = {:.2f} %%".format(
dim_ID, pos_perc[dim_ID], neg_perc[dim_ID], diff))
print("\nOut of {} dimensions, {} are unbalanced, percentage = {:.2f} %%".format(dim, unbalanced_count, 100 * unbalanced_count / dim))
|
<gh_stars>1-10
import {isPresent, isBlank} from 'angular2/src/facade/lang';
import {List, ListWrapper} from 'angular2/src/facade/collection';
import {DOM} from 'angular2/src/dom/dom_adapter';
import {CompileElement} from './compile_element';
import {CompileControl} from './compile_control';
import {CompileStep} from './compile_step';
import {ProtoViewBuilder} from '../view/proto_view_builder';
import {ProtoViewDto, ViewType} from '../../api';
/**
* CompilePipeline for executing CompileSteps recursively for
* all elements in a template.
*/
export class CompilePipeline {
_control: CompileControl;
constructor(steps: List<CompileStep>) { this._control = new CompileControl(steps); }
process(rootElement, protoViewType: ViewType = null,
compilationCtxtDescription: string = ''): List<CompileElement> {
if (isBlank(protoViewType)) {
protoViewType = ViewType.COMPONENT;
}
var results = ListWrapper.create();
var rootCompileElement = new CompileElement(rootElement, compilationCtxtDescription);
rootCompileElement.inheritedProtoView = new ProtoViewBuilder(rootElement, protoViewType);
rootCompileElement.isViewRoot = true;
this._process(results, null, rootCompileElement, compilationCtxtDescription);
return results;
}
_process(results, parent: CompileElement, current: CompileElement,
compilationCtxtDescription: string = '') {
var additionalChildren = this._control.internalProcess(results, 0, parent, current);
if (current.compileChildren) {
var node = DOM.firstChild(DOM.templateAwareRoot(current.element));
while (isPresent(node)) {
// compiliation can potentially move the node, so we need to store the
// next sibling before recursing.
var nextNode = DOM.nextSibling(node);
if (DOM.isElementNode(node)) {
var childCompileElement = new CompileElement(node, compilationCtxtDescription);
childCompileElement.inheritedProtoView = current.inheritedProtoView;
childCompileElement.inheritedElementBinder = current.inheritedElementBinder;
childCompileElement.distanceToInheritedBinder = current.distanceToInheritedBinder + 1;
this._process(results, current, childCompileElement);
}
node = nextNode;
}
}
if (isPresent(additionalChildren)) {
for (var i = 0; i < additionalChildren.length; i++) {
this._process(results, current, additionalChildren[i]);
}
}
}
}
|
package openfaas
import (
"fmt"
"os"
"github.com/TIBCOSoftware/flogo-lib/core/trigger"
)
var jsonMetadata = `{
"name": "openfaas",
"type": "flogo:trigger",
"shim": "plugin",
"ref": "github.com/retgits/flogo-components/trigger/openfaas",
"version": "0.0.1",
"title": "OpenFaaS Trigger",
"description": "OpenFaaS Trigger used to start a flow as a function.",
"homepage": "https://github.com/retgits/flogo-contrib/tree/master/trigger/openfaas",
"settings": [
],
"output": [
{
"name": "context",
"type": "object"
},
{
"name": "evt",
"type": "object"
}
],
"reply": [
{
"name": "data",
"type": "any"
},
{
"name": "status",
"type": "integer",
"value": 200
}
]
}`
// init create & register trigger factory
func init() {
md := trigger.NewMetadata(jsonMetadata)
// Adding this to make sure it works with the OpenFaaS build process
// The way to check it actually runs in OpenFaaS is to see if the variable
// fprocess is set to "./handler", which is done in the OpenFaaS docker
// build process
if os.Getenv("fprocess") == "./handler" {
md.ID = fmt.Sprintf("handler/function/vendor/%s", md.ID)
}
factory := NewFactory(md)
trigger.RegisterFactory(md.ID, factory)
}
|
#!/usr/bin/env bash
if [ -n "$(command -v reattach-to-user-namespace)" ]; then
reattach-to-user-namespace $@
else
exec "$@"
fi |
// Copyright The Linux Foundation and each contributor to CommunityBridge.
// SPDX-License-Identifier: MIT
export const CINCO_API_URL: string = 'https://cinco_api_endpoint';
export const CLA_API_URL: string = 'https://cla_api_endpoint/dev-runze';
export const ANALYTICS_API_URL: string = 'https://analytics_api_endpoint';
|
package io.github.rcarlosdasilva.weixin.model.response.open.auth.bean;
import java.io.Serializable;
import java.util.List;
import com.google.common.collect.Lists;
import io.github.rcarlosdasilva.weixin.common.dictionary.OpenPlatformLisensableFunction;
/**
* 授权信息
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class LicensingInformation implements Serializable {
private static final long serialVersionUID = -1687096878280046105L;
private String appId;
private List<Integer> functionIds = Lists.newArrayList();
private List<OpenPlatformLisensableFunction> functions = null;
/**
* 授权方appid.
* <p>
* 即公众号或小程序的appid
*
* @return appid
*/
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
/**
* 添加授权方授权的功能id.
*
* @param functionId
* id
*/
public void addLicencedFunction(Integer functionId) {
this.functionIds.add(functionId);
}
/**
* 添加授权方授权的功能.
*
* @return 授权功能列表
*/
public synchronized List<OpenPlatformLisensableFunction> getLicencedFunctions() {
if (functions == null) {
functions = Lists.newArrayList();
for (Integer id : functionIds) {
functions.add(OpenPlatformLisensableFunction.byCode(id));
}
}
return functions;
}
}
|
<filename>tests/functional/memoryview_usage.py
def example_bytes_slice():
word = b'the lazy brown dog jumped'
for i in range(10):
# Memoryview slicing is 10x faster than bytes slicing
if word[0:i] == 'the':
return True
def example_bytes_slice_as_arg(word: bytes):
for i in range(10):
# Memoryview slicing is 10x faster than bytes slicing
if word[0:i] == 'the':
return True
|
#shellcheck disable=SC2034
#shellcheck disable=SC2039
#shellcheck disable=SC2154
#shellcheck disable=SC1091
test_name="chef-server-backup"
test_backup_restore=true
source .studio/chef-server-collection
# This test
# 1. deploys a2 with a chef server
# 2. converges the chef client and grabs the ohai_time
# 3. takes a backup, converge the chef-client, and grabs the ohai_time again
# 4. checks that the ohai_time in 2 and the ohai_time in 3 are different
# 5. restores from the backup
# 6. verifies the ohai_time matches the one in 2
do_deploy() {
chef-automate deploy config.toml \
--hartifacts "$test_hartifacts_path" \
--override-origin "$HAB_ORIGIN" \
--manifest-dir "$test_manifest_path" \
--enable-chef-server \
--admin-password chefautomate \
--accept-terms-and-mlsa
}
do_test_deploy() {
PATH="/hab/bin:/bin" chef-server-ctl test
test_chef_server_ctl
do_test_deploy_default
}
do_backup() {
# Get ohai_time before backup
converge_chef_client
initial_ohai_time=$(ohai_time)
# Backup
chef-automate backup create
# Get the backup id
test_backup_id=$(chef-automate backup list | tail -1 | awk '{print $1}')
# Get ohai_time after backup
converge_chef_client
updated_ohai_time=$(ohai_time)
if [[ $initial_ohai_time = "$updated_ohai_time" ]]
then
log_error "Initial ohai_time ($initial_ohai_time) should not match updated ohai_time ($updated_ohai_time)."
return 1
fi
log_info "Initial ohai_time ($initial_ohai_time) correctly does not match updated ohai_time ($updated_ohai_time)."
}
do_restore() {
chef-automate backup restore --debug --override-origin "$HAB_ORIGIN" "$test_backup_id"
restored_ohai_time=$(ohai_time)
# Check ohai_time after restore matches ohai_time from backup
if [[ $restored_ohai_time != "$initial_ohai_time" ]]
then
log_error "Restored ohai_time ($restored_ohai_time) should match ohai_time ($initial_ohai_time) from backup."
return 1
fi
log_info "Restored ohai_time ($restored_ohai_time) correctly matches ohai_time ($initial_ohai_time) from backup."
delete_backup_and_assert_idempotent
}
|
#!/bin/bash
NOW=$(date +"%Y-%m-%d_%H-%M-%S")
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
source $SCRIPTPATH/update_current_picture.conf
wget --user=$username --password=$password -O $storage_path/snap_$NOW.jpg $picture_url
find $storage_path/ -maxdepth 1 -name 'snap*' -type f -mmin +1 -exec rm -rfv {} \;
|
package com.johanneswolfgruber.learntolisten;
import android.speech.tts.TextToSpeech;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.ImageButton;
import android.widget.TextView;
import java.util.Locale;
public class GamesoundsActivity extends AppCompatActivity implements TextToSpeech.OnInitListener{
private TextView mTextView1, mTextView2, mTextView3, mTextView4, mTextView5, mTextView6,
mTextView7, mTextView8;
private int mButton1ID = 0, mButton2ID = 0, mButton3ID = 0, mButton4ID = 0, mButton5ID = 0,
mButton6ID = 0, mButton7ID = 0, mButton8ID = 0;
private Animation mAnimationBlendIn;
private TextToSpeech mTTS;
private Sound mSound;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_gamesounds);
mSound = MainMenuActivity.getSounds();
//mSound.initSounds(this);
mTTS = new TextToSpeech(this, this);
mAnimationBlendIn = AnimationUtils.loadAnimation(this, R.anim.blend_in);
mTextView1 = (TextView) findViewById(R.id.textView);
mTextView2 = (TextView) findViewById(R.id.textView2);
mTextView3 = (TextView) findViewById(R.id.textView3);
mTextView4 = (TextView) findViewById(R.id.textView4);
mTextView5 = (TextView) findViewById(R.id.textView5);
mTextView6 = (TextView) findViewById(R.id.textView6);
mTextView7 = (TextView) findViewById(R.id.textView7);
mTextView8 = (TextView) findViewById(R.id.textView8);
ImageButton mImageButton1 = (ImageButton) findViewById(R.id.imageButton);
mImageButton1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView1String = mTextView1.getText().toString();
if(mButton1ID == 0) {
mButton1ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView1String, mButton1ID);
} else if(mButton1ID == 1) {
mButton1ID = 0;
mSound.playSound(mSound.getSoundIDSteps(), 1.0f);
}
}
});
ImageButton mImageButton2 = (ImageButton) findViewById(R.id.imageButton2);
mImageButton2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView2String = mTextView2.getText().toString();
if(mButton2ID == 0) {
mButton2ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView2String, mButton2ID);
} else if(mButton2ID == 1) {
mButton2ID = 0;
mSound.playSound(mSound.getSoundIDExercise(), 1.0f);
}
}
});
ImageButton mImageButton3 = (ImageButton) findViewById(R.id.imageButton3);
mImageButton3.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView3String = mTextView3.getText().toString();
if(mButton3ID == 0) {
mButton3ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView3String, mButton3ID);
} else if(mButton3ID == 1) {
mButton3ID = 0;
mSound.playSound(mSound.getSoundIDRightAnswer(), 1.0f);
}
}
});
ImageButton mImageButton4 = (ImageButton) findViewById(R.id.imageButton4);
mImageButton4.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView4String = mTextView4.getText().toString();
if(mButton4ID == 0) {
mButton4ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView4String, mButton4ID);
} else if(mButton4ID == 1) {
mButton4ID = 0;
mSound.playSound(mSound.getSoundIDWrongAnswer(), 1.0f);
}
}
});
ImageButton mImageButton5 = (ImageButton) findViewById(R.id.imageButton5);
mImageButton5.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView5String = mTextView5.getText().toString();
if(mButton5ID == 0) {
mButton5ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView5String, mButton5ID);
} else if(mButton5ID == 1) {
mButton5ID = 0;
mSound.playSound(mSound.getSoundIDwallLeft(), 1.0f);
}
}
});
ImageButton mImageButton6 = (ImageButton) findViewById(R.id.imageButton6);
mImageButton6.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView6String = mTextView6.getText().toString();
if(mButton6ID == 0) {
mButton6ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView6String, mButton6ID);
} else if(mButton6ID == 1) {
mButton6ID = 0;
mSound.playSound(mSound.getSoundIDLadder(), 1.0f);
}
}
});
ImageButton mImageButton7 = (ImageButton) findViewById(R.id.imageButton7);
mImageButton7.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView7String = mTextView7.getText().toString();
if(mButton7ID == 0) {
mButton7ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView7String, mButton7ID);
} else if(mButton7ID == 1) {
mButton7ID = 0;
mSound.playSound(mSound.getSoundIDLadderDown(), 1.0f);
}
}
});
ImageButton mImageButton8 = (ImageButton) findViewById(R.id.imageButton8);
mImageButton8.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String mTextView8String = mTextView8.getText().toString();
if(mButton8ID == 0) {
mButton8ID = DoubleClick.doubleClick(GamesoundsActivity.this, mTTS,
mTextView8String, mButton8ID);
} else if(mButton8ID == 1) {
mButton8ID = 0;
mSound.playSound(mSound.getSoundIDDoor(), 1.0f);
}
}
});
}
@Override
public void onInit(int status) {
mTTS.setLanguage(Locale.US);
}
@Override
public void onResume() {
super.onResume();
mSound = MainMenuActivity.getSounds();
View v = findViewById(R.id.root_constraint_layout_gamesounds);
v.startAnimation(mAnimationBlendIn);
}
@Override
public void onDestroy(){
super.onDestroy();
}
}
|
<reponame>lerages/anarchy-source
package org.rs2server.rs2.model.map;
import org.rs2server.rs2.model.Location;
public class Directions {
public static enum RunningDirection {
EE(8), N_EE(10), N_WW(9), NN(13), NN_E(14), NN_EE(15), NN_W(12), NN_WW(11), S_EE(6), S_WW(5), SS(2), SS_E(3), SS_EE(4), SS_W(1), SS_WW(0), WW(7);
private int dir;
private RunningDirection(int dir) {
this.dir = dir;
}
public int intValue() {
return dir;
}
public int npcIntValue() {
throw new UnsupportedOperationException("The GNP protocol does not support 2 step running directions!");
}
@Override
public String toString() {
return "[run] [dir=" + dir + ", type=" + super.toString() + "]";
}
}
public static enum NormalDirection {
EAST(4, 2), NORTH(6, 0), NORTH_EAST(7, 1), NORTH_WEST(5, 7), SOUTH(1, 4), SOUTH_EAST(2, 3), SOUTH_WEST(0, 5), WEST(3, 6);
public static NormalDirection forIntValue(int value) {
switch (value) {
case 0:
return SOUTH_WEST;
case 1:
return SOUTH;
case 2:
return SOUTH_EAST;
case 3:
return WEST;
case 4:
return EAST;
case 5:
return NORTH_WEST;
case 6:
return NORTH;
case 7:
return NORTH_EAST;
}
return null;
}
public static NormalDirection forNpcDirValue(int value) {
switch (value) {
case 0:
return NORTH;
case 1:
return NORTH_EAST;
case 2:
return EAST;
case 3:
return SOUTH_EAST;
case 4:
return SOUTH;
case 5:
return SOUTH_WEST;
case 6:
return WEST;
case 7:
return NORTH_WEST;
}
return null;
}
private int dir;
private int npcDir;
private NormalDirection(int dir, int npcDir) {
this.dir = dir;
this.npcDir = npcDir;
}
public int intValue() {
return dir;
}
public int npcIntValue() {
return npcDir;
}
@Override
public String toString() {
return "[walk] [dir=" + dir + ", type=" + super.toString() + "]";
}
public String stringValue() {
return super.toString();
}
}
public static final byte[] DIRECTION_DELTA_X = new byte[]{-1, 0, 1, -1, 1, -1, 0, 1};
public static final byte[] DIRECTION_DELTA_Y = new byte[]{-1, -1, -1, 0, 0, 1, 1, 1};
public static NormalDirection directionFor(Location currentPos, Location nextPos) {
int dirX = (nextPos.getX() - currentPos.getX());
int dirY = (nextPos.getY() - currentPos.getY());
if (dirX < 0) {
if (dirY < 0)
return NormalDirection.SOUTH_WEST;
else if (dirY > 0)
return NormalDirection.NORTH_WEST;
else
return NormalDirection.WEST;
} else if (dirX > 0) {
if (dirY < 0)
return NormalDirection.SOUTH_EAST;
else if (dirY > 0)
return NormalDirection.NORTH_EAST;
else
return NormalDirection.EAST;
} else {
if (dirY < 0)
return NormalDirection.SOUTH;
else if (dirY > 0)
return NormalDirection.NORTH;
else
return null;
}
}
public static RunningDirection runningDirectionFor(int dirX, int dirY) {
switch (dirX) {
case -2:
switch (dirY) {
case -2:
return RunningDirection.SS_WW;
case -1:
return RunningDirection.S_WW;
case 0:
return RunningDirection.WW;
case 1:
return RunningDirection.N_WW;
case 2:
return RunningDirection.NN_WW;
}
return null;
case -1:
switch (dirY) {
case -2:
return RunningDirection.SS_W;
case 2:
return RunningDirection.NN_W;
}
return null;
case 0:
switch (dirY) {
case -2:
return RunningDirection.SS;
case 2:
return RunningDirection.NN;
}
return null;
case 1:
switch (dirY) {
case -2:
return RunningDirection.SS_E;
case 2:
return RunningDirection.NN_E;
}
return null;
case 2:
switch (dirY) {
case -2:
return RunningDirection.SS_EE;
case -1:
return RunningDirection.S_EE;
case 0:
return RunningDirection.EE;
case 1:
return RunningDirection.N_EE;
case 2:
return RunningDirection.NN_EE;
}
return null;
}
return null;
}
public static RunningDirection runningDirectionFor(int curX, int curY, int dstX, int dstY) {
int dirX = dstX - curX;
int dirY = dstY - curX;
switch (dirX) {
case -2:
switch (dirY) {
case -2:
return RunningDirection.SS_WW;
case -1:
return RunningDirection.S_WW;
case 0:
return RunningDirection.WW;
case 1:
return RunningDirection.N_WW;
case 2:
return RunningDirection.NN_WW;
}
return null;
case -1:
switch (dirY) {
case -2:
return RunningDirection.SS_W;
case 2:
return RunningDirection.NN_W;
}
return null;
case 0:
switch (dirY) {
case -2:
return RunningDirection.SS;
case 2:
return RunningDirection.NN;
}
return null;
case 1:
switch (dirY) {
case -2:
return RunningDirection.SS_E;
case 2:
return RunningDirection.NN_E;
}
return null;
case 2:
switch (dirY) {
case -2:
return RunningDirection.SS_EE;
case -1:
return RunningDirection.S_EE;
case 0:
return RunningDirection.EE;
case 1:
return RunningDirection.N_EE;
case 2:
return RunningDirection.NN_EE;
}
return null;
}
return null;
}
public static NormalDirection directionFor(int dirX, int dirY) {
if (dirX < 0) {
if (dirY < 0)
return NormalDirection.SOUTH_WEST;
else if (dirY > 0)
return NormalDirection.NORTH_WEST;
else
return NormalDirection.WEST;
} else if (dirX > 0) {
if (dirY < 0)
return NormalDirection.SOUTH_EAST;
else if (dirY > 0)
return NormalDirection.NORTH_EAST;
else
return NormalDirection.EAST;
} else {
if (dirY < 0)
return NormalDirection.SOUTH;
else if (dirY > 0)
return NormalDirection.NORTH;
else
return null;
}
}
public static NormalDirection directionFor(int curX, int curY, int dstX, int dstY) {
int dirX = dstX - curX;
int dirY = dstY - curX;
if (dirX < 0) {
if (dirY < 0)
return NormalDirection.SOUTH_WEST;
else if (dirY > 0)
return NormalDirection.NORTH_WEST;
else
return NormalDirection.WEST;
} else if (dirX > 0) {
if (dirY < 0)
return NormalDirection.SOUTH_EAST;
else if (dirY > 0)
return NormalDirection.NORTH_EAST;
else
return NormalDirection.EAST;
} else {
if (dirY < 0)
return NormalDirection.SOUTH;
else if (dirY > 0)
return NormalDirection.NORTH;
else
return null;
}
}
}
|
package auth
import (
"math/rand"
"strings"
"time"
"github.com/dgrijalva/jwt-go"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/khanakia/jgo/pkg/util"
"github.com/pkg/errors"
"golang.org/x/crypto/bcrypt"
"gorm.io/gorm"
)
func GenerateUID() string {
secret := uuid.New().String()
return secret
}
func GetUserSecret(user User, db *gorm.DB) string {
if len(user.Secret) == 0 {
user.Secret = util.GenerateUID()
db.Save(&user)
return user.Secret
}
return user.Secret
}
func GetUserUID(user User, db *gorm.DB) string {
if user.UID == "" {
user.UID = util.GenerateUID()
db.Save(&user)
return user.UID
}
return user.UID
}
/*
* This signature is used to create a JWT token
* Benefits - By adding the user secret with the appsecret it makes the app more secure
* Let say if appSecret is compromised then stil nobody can generate tokens without user secret
* If userSecret compromised it will affect only single user not all the users
* If user wants to focefully logout for all the applications we simply update his userSecret
* FUTURE CONSIDERATION - Add jwt to token to the blacklist if users logout
*/
func GetSignature(user User, db *gorm.DB) string {
userSecret := GetUserSecret(user, db)
appSecret := util.GetEnv("appSecret", "IBIrewORShiVReBASTer")
signature := appSecret + ":" + userSecret
return signature
}
// GeneratePassword - Create Bcrypt from string
func GeneratePassword(password string) string {
passwordHash, _ := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
return string(passwordHash)
}
// PasswordMatch - Compare two passwords are equal
func PasswordMatch(password string, password1 string) bool {
err := bcrypt.CompareHashAndPassword([]byte(password), []byte(password1))
if err == nil {
return true
}
return false
}
func RandomPass() string {
rand.Seed(time.Now().UnixNano())
chars := []rune("0123456789")
length := 8
var b strings.Builder
for i := 0; i < length; i++ {
b.WriteRune(chars[rand.Intn(len(chars))])
}
str := b.String() // E.g. "Ex<PASSWORD>"
return str
}
func CreateToken(user User, db *gorm.DB) (string, error) {
expirationTime := time.Now().Add(500 * time.Minute) // 500 minute
claims := &Claims{
// ID: user.ID,
Email: user.Email,
UID: user.UID,
StandardClaims: jwt.StandardClaims{
// In JWT, the expiry time is expressed as unix milliseconds
ExpiresAt: expirationTime.Unix(),
},
}
// Declare the token with the algorithm used for signing, and the claims
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
signature := []byte(GetSignature(user, db))
tokenString, err := token.SignedString(signature)
return tokenString, err
}
func GetUserFromContext(c *gin.Context) (User, error) {
userc, _ := c.Get("user")
if userc == nil {
return User{}, errors.New("User not found")
}
user, _ := userc.(User)
return user, nil
}
func CheckEmailExists(email string, db *gorm.DB) bool {
var count int64
db.Model(User{}).Where("email = ?", email).Count(&count)
if count > 0 {
return true
}
return false
}
func FindByEmail(email string, db *gorm.DB) *User {
var user User
email = strings.ToLower(email)
res := db.First(&user, &User{Email: email})
if res.Error != nil && errors.Is(res.Error, gorm.ErrRecordNotFound) {
return nil
}
return &user
}
func GetUser(id uint, db *gorm.DB) *User {
var user User
res := db.First(&user, id)
if res.Error != nil && errors.Is(res.Error, gorm.ErrRecordNotFound) {
return nil
}
return &user
}
|
package uk.joshiejack.husbandry.data;
import net.minecraft.block.Block;
import net.minecraft.data.DataGenerator;
import net.minecraft.util.Direction;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.client.model.generators.BlockStateProvider;
import net.minecraftforge.client.model.generators.ConfiguredModel;
import net.minecraftforge.client.model.generators.ModelFile;
import net.minecraftforge.client.model.generators.VariantBlockStateBuilder;
import net.minecraftforge.common.data.ExistingFileHelper;
import uk.joshiejack.husbandry.Husbandry;
import uk.joshiejack.husbandry.block.*;
@SuppressWarnings("ConstantConditions")
public class HusbandryBlockStates extends BlockStateProvider {
public HusbandryBlockStates(DataGenerator gen, ExistingFileHelper exFileHelper) {
super(gen, Husbandry.MODID, exFileHelper);
}
@Override
protected void registerStatesAndModels() {
model(HusbandryBlocks.NEST.get());
model(HusbandryBlocks.TRUFFLE_BLOCK.get());
ModelFile file = models().getExistingFile(HusbandryBlocks.INCUBATOR.get().getRegistryName());
VariantBlockStateBuilder builder = getVariantBuilder(HusbandryBlocks.INCUBATOR.get());
builder.partialState().with(IncubatorBlock.FACING, Direction.EAST).modelForState().modelFile(file).rotationY(90).addModel();
builder.partialState().with(IncubatorBlock.FACING, Direction.WEST).modelForState().modelFile(file).rotationY(270).addModel();
builder.partialState().with(IncubatorBlock.FACING, Direction.NORTH).modelForState().modelFile(file).rotationY(0).addModel();
builder.partialState().with(IncubatorBlock.FACING, Direction.SOUTH).modelForState().modelFile(file).rotationY(180).addModel();
bowl(HusbandryBlocks.BOWL.get());
trough(HusbandryBlocks.TROUGH.get());
feedingtray(HusbandryBlocks.FEEDING_TRAY.get());
}
private void feedingtray(Block block) {
getMultipartBuilder(block)
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "feeding_tray"))).addModel().end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "bird_feed_semi")))
.addModel().condition(FeedingTrayBlock.FILL, 1).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "bird_feed_full")))
.addModel().condition(FeedingTrayBlock.FILL, 2).end();
}
private void trough(Block block) {
getMultipartBuilder(block)
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "trough"))).addModel().end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "hay_one_quarter")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.HAY).condition(TroughBlock.FILL, 1).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "hay_two_quarters")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.HAY).condition(TroughBlock.FILL, 2).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "hay_three_quarters")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.HAY).condition(TroughBlock.FILL, 3).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "hay_four_quarters")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.HAY).condition(TroughBlock.FILL, 4).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "slop_one_quarter")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.SLOP).condition(TroughBlock.FILL, 1).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "slop_two_quarters")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.SLOP).condition(TroughBlock.FILL, 2).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "slop_three_quarters")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.SLOP).condition(TroughBlock.FILL, 3).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "slop_four_quarters")))
.addModel().condition(TroughBlock.TYPE, TroughBlock.FoodType.SLOP).condition(TroughBlock.FILL, 4).end();
}
private void bowl(Block block) {
getMultipartBuilder(block)
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "bowl"))).addModel().end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "cat_food_semi")))
.addModel().condition(BowlBlock.TYPE, BowlBlock.FoodType.CAT_FOOD).condition(TroughBlock.FILL, 1).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "cat_food_full")))
.addModel().condition(BowlBlock.TYPE, BowlBlock.FoodType.CAT_FOOD).condition(TroughBlock.FILL, 2).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "dog_food_semi")))
.addModel().condition(BowlBlock.TYPE, BowlBlock.FoodType.DOG_FOOD).condition(TroughBlock.FILL, 1).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "dog_food_full")))
.addModel().condition(BowlBlock.TYPE, BowlBlock.FoodType.DOG_FOOD).condition(TroughBlock.FILL, 2).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "rabbit_food_semi")))
.addModel().condition(BowlBlock.TYPE, BowlBlock.FoodType.RABBIT_FOOD).condition(TroughBlock.FILL, 1).end()
.part().modelFile(models().getExistingFile(new ResourceLocation(Husbandry.MODID, "rabbit_food_full")))
.addModel().condition(BowlBlock.TYPE, BowlBlock.FoodType.RABBIT_FOOD).condition(TroughBlock.FILL, 2).end();
}
protected void model(Block block) {
ModelFile file = models().getExistingFile(block.getRegistryName());
getVariantBuilder(block).forAllStates(state -> ConfiguredModel.builder().modelFile(file).build());
}
}
|
// https://uva.onlinejudge.org/external/4/459.pdf
#include<bits/stdc++.h>
using namespace std;
using vi=vector<int>;
using vvi=vector<vi>;
int main(){
int t;
string s;
cin>>t;
getline(cin, s);
getline(cin, s);
while(t--){
int n,u,v,c=0;
getline(cin, s);
n=s[0]-'A'+1;
vvi g(n);
while(1){
getline(cin,s);
if(s.empty())break;
u=s[0]-'A';
v=s[1]-'A';
g[u].push_back(v);
g[v].push_back(u);
}
vi s(n);
function<void(int)>dfs=[&](int i){
s[i]=1;
for(int j:g[i])
if(!s[j])
dfs(j);
};
for(int i=0;i<n;i++)
if(!s[i]){
c++;
dfs(i);
}
cout<<c<<"\n";
if(t)cout<<"\n";
}
}
|
def dataset2dict(file_path: str) -> dict:
result = {}
with open(file_path, 'r') as file:
for line in file:
key, value = line.strip().split(':')
result[key] = value
return result |
<filename>packages/drip-table-driver-antd/src/index.ts
/**
* This file is part of the drip-table project.
* @link : https://drip-table.jd.com/
* @author : <NAME> (<EMAIL>)
* @modifier : <NAME> (<EMAIL>)
* @copyright: Copyright (c) 2021 JD Network Technology Co., Ltd.
*/
import * as AntDesign from 'antd';
import zhCN from 'antd/lib/locale/zh_CN';
import * as AntDesignIcons from '@ant-design/icons';
const DripTableDriverAntDesign = {
components: AntDesign,
icons: AntDesignIcons,
locale: zhCN,
};
export default DripTableDriverAntDesign;
|
require 'spec_helper'
# our classes. note that room does not have a belongs_to definition
class House < Record
has_many :rooms
end
class Room < Record
end
RSpec.describe HasMany do
before(:each) do
@house = House.build
@room1 = Room.build(id:1)
@room2 = Room.build(id:2)
@room3 = Room.build(id:3)
@rooms = [@room1,@room2,@room3]
end
describe "#rooms" do
it "should exists" do
expect(@house.respond_to? :rooms).to eq(true)
end
end
describe "#rooms" do
it "should should be a HasMany relation" do
expect(@house.rooms).to be_a(HasMany)
end
end
describe "#relations['rooms']" do
it "should be a HasMany relation" do
expect(@house.relations['rooms']).to be_a(HasMany)
end
end
describe "#room" do
it "should not exist" do
expect(@house.respond_to? :room).to eq(false)
end
end
describe "#count" do
it "should return zero when empty" do
expect(@house.rooms.count).to eq(0)
end
it "should return number of items" do
@rooms.each { |room| @house.rooms.add room }
expect(@house.rooms.count).to eq(@rooms.size)
end
end
describe "#add" do
it "should not add nil" do
@house.rooms.add nil
expect(ids @house.rooms.all).to eq([])
end
it "should raise if adding anything but a Record" do
expect{ @house.rooms.add House}.to raise_exception(Redisant::InvalidArgument)
end
it "should raise if adding wrong Record type" do
expect{ @house.rooms.add "bad"}.to raise_exception(Redisant::InvalidArgument)
end
it "should add items" do
@rooms.each { |room| @house.rooms.add room }
expect(ids @house.rooms.all).to eq(ids @rooms)
end
it "should not add the same items twice" do
3.times { @house.rooms.add @rooms.first }
expect(@house.rooms.count).to eq(1)
expect(ids @house.rooms.all).to eq([@rooms.first.id])
end
it "should add array of items" do
@house.rooms.add @rooms
expect(ids @house.rooms.all).to eq(ids @rooms)
end
end
describe "#build" do
it "should build and add object" do
room = @house.rooms.build name:'suite'
expect(room.attribute :name).to eq('suite')
expect(@house.rooms.count).to eq(1)
expect(ids @house.rooms.all).to eq([4])
end
end
describe "#<<" do
it "should add items" do
@house.rooms.add @rooms
expect(ids @house.rooms.all).to eq(ids @rooms)
end
it "should not add the same items twice" do
3.times { @house.rooms << @rooms.first }
expect(@house.rooms.count).to eq(1)
expect(ids @house.rooms.all).to eq([@rooms.first.id])
end
it "should add array of items" do
@house.rooms << @rooms
expect(ids @house.rooms.all).to eq(ids @rooms)
end
end
describe "#remove" do
it "should ignore nil" do
@house.rooms.add @rooms
@house.rooms.remove nil
expect(ids @house.rooms.all).to eq([1,2,3])
expect(Room.count).to eq(3)
end
it "should remove first item" do
@house.rooms.add @rooms
@house.rooms.remove @room1
expect(ids @house.rooms.all).to eq([2,3])
expect(Room.count).to eq(3)
end
it "should remove last item" do
@house.rooms.add @rooms
@house.rooms.remove @room3
expect(ids @house.rooms.all).to eq([1,2])
expect(Room.count).to eq(3)
end
it "should remove middle item" do
@house.rooms.add @rooms
@house.rooms.remove @room2
expect(ids @house.rooms.all).to eq([1,3])
expect(Room.count).to eq(3)
end
it "should remove all items" do
@house.rooms.add @rooms
@house.rooms.remove @room1
@house.rooms.remove @room2
@house.rooms.remove @room3
expect(ids @house.rooms.all).to eq([])
expect(Room.count).to eq(3)
end
it "should remove array of items" do
@house.rooms.add @rooms
@house.rooms.remove [@room1,@room3]
expect(ids @house.rooms.all).to eq([2])
expect(Room.count).to eq(3)
end
end
describe "#remove_all" do
it "should ignore nil" do
@house.rooms.add @rooms
@house.rooms.remove nil
expect(ids @house.rooms.all).to eq([1,2,3])
expect(Room.count).to eq(3)
end
it "should remove all items" do
@house.rooms.add @rooms
@house.rooms.remove_all
expect(ids @house.rooms.all).to eq([])
expect(Room.count).to eq(3)
end
end
describe "#all" do
it "should return empty array when empty" do
expect(@house.rooms.all).to eq([])
end
it "should return all items" do
@house.rooms.add @rooms
expect(ids @house.rooms.all).to eq(ids @rooms)
end
end
describe "#ids" do
it "should return empty array when empty" do
expect(@house.rooms.ids).to eq([])
end
it "should return all items" do
@house.rooms.add @rooms
expect(@house.rooms.ids).to eq(ids @rooms)
end
end
end |
#!/usr/bin/env node
/* enpòt'
* https://github.com/leny/enpot
*
* JS Document - /src/enpot.js - cli entry point, setup and runner
*
* Copyright (c) 2015 Leny
* Licensed under the MIT license.
*/
import fs from "fs";
import argsParser from "./arguments-parser";
import { spinner, error, list, details, help, version, completion, success } from "./output";
import getUserGists from "./gist/list";
import downloadGistFiles from "./gist/download";
let aArgv = process.argv.slice( 2 );
if ( !aArgv[ 0 ] || [ "-h", "--help" ].indexOf( aArgv[ 0 ] ) > -1 ) {
help();
}
if ( [ "-v", "--version" ].indexOf( aArgv[ 0 ] ) > -1 ) {
version();
}
if ( aArgv[ 0 ] === "completion" ) {
completion();
} else {
let {
"user": sGistUser,
"gist": sGistID,
"destination": sDestinationPath,
"force": bForceReload,
"show": bShowFiles
} = argsParser( aArgv );
if ( fs.statSync( sDestinationPath ).isDirectory() === false ) {
error( "Destination path isn't a directory." );
}
spinner.start();
getUserGists( sGistUser, bForceReload && !sGistID )
.then( ( oGists ) => {
if ( sGistID ) {
let oGist;
if ( !( oGist = oGists[ sGistID ] ) ) {
error( `Unknown gist ${ chalk.yellow( sGistID ) } from user ${ chalk.cyan( sGistUser ) }.` );
}
if ( !oGist.files.length ) {
error( `No downloadable files in ${ chalk.yellow( sGistID ) } from user ${ chalk.cyan( sGistUser ) }.` );
}
if ( bShowFiles ) {
details( oGist );
}
return downloadGistFiles( oGist.files, sDestinationPath );
}
spinner.stop( true );
list( sGistUser, oGists );
} )
.then( ( aSavedFiles ) => {
spinner.stop( true );
success( sGistUser, sGistID, aSavedFiles );
} )
.catch( error );
}
|
#!/bin/bash
echo $3
for i in `ls $2/bundles/`
do
container=container-$i
touch /var/run/checkin-clear-containers/container-$i
# curl -i -s "127.0.0.1:9090/checkin?containerID='"$container"'&event=Starting"
echo "Running container-$i"
sudo $1 run -b $2/bundles/$i container-$i &
done
|
class Api::V4::Runners::GamesController < Api::V4::ApplicationController
before_action :set_runner
before_action :set_games, only: [:index]
def index
games = paginate @games
render json: Api::V4::GameBlueprint.render(games, root: :games, toplevel: :game)
end
private
def set_games
@games = @runner.games.includes(:srdc, categories: [:srdc])
end
end
|
require 'toyrobot/cli'
require 'toyrobot/command'
require 'toyrobot/robot'
require 'toyrobot/simulation'
require 'toyrobot/table'
require 'toyrobot/version'
module Toyrobot
# Your code goes here...
end
|
class Measurement:
def __init__(self):
self.measurements = []
def add_measurement(self, timestamp, value):
self.measurements.append((timestamp, value))
def get_all_measurements(self):
return self.measurements
def delete_all_measurements(self):
self.measurements = []
# Example usage
m = Measurement()
m.add_measurement('2022-01-01 08:00:00', 120)
m.add_measurement('2022-01-01 12:00:00', 130)
print(m.get_all_measurements()) # Output: [('2022-01-01 08:00:00', 120), ('2022-01-01 12:00:00', 130)]
m.delete_all_measurements()
print(m.get_all_measurements()) # Output: [] |
var searchData=
[
['gdllname',['gDLLName',['../namespacetest.html#a80f837d5da2fad005c3ba8151a77ff62',1,'test']]],
['gen_5fdata_5fcnt_5f',['gen_data_cnt_',['../class_a_m_i_model.html#ad8680b8371b9c476d046ff87d843fbe9',1,'AMIModel']]],
['gen_5fdata_5flast_5f',['gen_data_last_',['../class_a_m_i_model.html#aa9cacd5650a194a6f08552f2748bab7a',1,'AMIModel']]]
];
|
import { wrapCommand, switchSyncFlag, runAsync, expectAsyncShim } from '../src/shim'
describe('wrapCommand', () => {
it('should run command with before and after hook', async () => {
const commandFn = jest.fn().mockReturnValue(Promise.resolve('foobar'))
const beforeHook = jest.fn()
const afterHook = jest.fn()
const scope = {
options: {
beforeCommand: [beforeHook, beforeHook],
afterCommand: [afterHook, afterHook, afterHook]
}
}
const res = await wrapCommand('someCommand', commandFn).call(scope, 123, 'barfoo')
expect(res).toEqual('foobar')
expect(commandFn).toBeCalledTimes(1)
expect(commandFn).toBeCalledWith(123, 'barfoo')
expect(beforeHook).toBeCalledTimes(2)
expect(beforeHook).toBeCalledWith('someCommand', [123, 'barfoo'])
expect(afterHook).toBeCalledTimes(3)
expect(afterHook).toBeCalledWith('someCommand', [123, 'barfoo'], 'foobar', undefined)
})
it('should throw but still run after command hook', async () => {
const error = new Error('uups')
const commandFn = jest.fn().mockReturnValue(Promise.reject(error))
const afterHook = jest.fn()
const scope = {
options: {
beforeCommand: [],
afterCommand: [afterHook, afterHook, afterHook]
}
}
const res = await wrapCommand('someCommand', commandFn).call(scope, 123, 'barfoo').catch(err => err)
expect(res).toEqual(error)
expect(commandFn).toBeCalledTimes(1)
expect(commandFn).toBeCalledWith(123, 'barfoo')
expect(afterHook).toBeCalledTimes(3)
expect(afterHook).toBeCalledWith('someCommand', [123, 'barfoo'], undefined, error)
})
})
describe('switchSyncFlag', () => {
it('should switch runAsync flag', () => {
expect(runAsync).toBe(true)
switchSyncFlag(() => {
expect(runAsync).toBe(false)
return {}
})()
expect(runAsync).toBe(true)
})
it('should switch back when returning a promise', async () => {
expect(runAsync).toBe(true)
await switchSyncFlag(() => {
expect(runAsync).toBe(false)
return Promise.resolve(true)
})()
expect(runAsync).toBe(true)
})
it('should switch back when returning a function', () => {
expect(runAsync).toBe(true)
const fn = switchSyncFlag(() => {
expect(runAsync).toBe(false)
return () => {
expect(runAsync).toBe(true)
return {}
}
})()
expect(runAsync).toBe(false)
// eslint-disable-next-line
runAsync = true
fn()
expect(runAsync).toBe(true)
})
it('should switch back when returning a function with promise', async () => {
expect(runAsync).toBe(true)
const fn = switchSyncFlag(() => {
expect(runAsync).toBe(false)
return () => {
expect(runAsync).toBe(true)
return Promise.resolve({})
}
})()
expect(runAsync).toBe(false)
// eslint-disable-next-line
runAsync = true
await fn()
expect(runAsync).toBe(true)
})
})
test('expectAsyncShim', () => {
global.expectAsync = jest.fn()
const expectSync = jest.fn()
expectAsyncShim(undefined, expectSync)
expect(expectSync).toBeCalledTimes(1)
expect(global.expectAsync).toBeCalledTimes(0)
expectAsyncShim(42, expectSync)
expect(expectSync).toBeCalledTimes(2)
expect(global.expectAsync).toBeCalledTimes(0)
expectAsyncShim(Promise.resolve({}), expectSync)
expect(expectSync).toBeCalledTimes(2)
expect(global.expectAsync).toBeCalledTimes(1)
expectAsyncShim({ elementId: 42 }, expectSync)
expect(expectSync).toBeCalledTimes(2)
expect(global.expectAsync).toBeCalledTimes(2)
expectAsyncShim({ sessionId: '42' }, expectSync)
expect(expectSync).toBeCalledTimes(2)
expect(global.expectAsync).toBeCalledTimes(3)
})
|
#!/bin/bash
SERVER=$2
BASE_DIR=$PWD
INTERVAL=2
# 命令行参数,需要手动指定
ARGS=""
if [ "$SERVER" = "" ];then
echo `"文件不存在" $SERVER`
exit 1
fi
function start()
{
if [ "`pgrep $SERVER -u $UID`" != "" ];then
echo "$SERVER already running"
exit 1
fi
nohup $BASE_DIR/bin/$SERVER $ARGS server &>/dev/null &
echo "sleeping..." && sleep $INTERVAL
# check status
if [ "`pgrep $SERVER -u $UID`" == "" ];then
echo "$SERVER start failed"
exit 1
fi
}
function status()
{
if [ "`pgrep $SERVER -u $UID`" != "" ];then
echo $SERVER is running
else
echo $SERVER is not running
fi
}
function stop()
{
if [ "`pgrep $SERVER -u $UID`" != "" ];then
kill -9 `pgrep $SERVER -u $UID`
fi
echo "sleeping..." && sleep $INTERVAL
if [ "`pgrep $SERVER -u $UID`" != "" ];then
echo "$SERVER stop failed"
exit 1
fi
}
function restart()
{
if [ "`pgrep $SERVER -u $UID`" != "" ];then
kill -1 `pgrep $SERVER -u $UID`
else
nohup $BASE_DIR/bin/$SERVER $ARGS server &>/dev/null &
fi
echo "sleeping..." && sleep $INTERVAL
# check status
if [ "`pgrep $SERVER -u $UID`" == "" ];then
nohup $BASE_DIR/bin/$SERVER $ARGS server &>/dev/null &
if [ "`pgrep $SERVER -u $UID`" == "" ];then
echo "$SERVER start failed"
exit 1
fi
fi
}
case "$1" in
'start')
start
;;
'stop')
stop
;;
'status')
status
;;
'restart')
restart
;;
*)
echo "usage: $0 {start|stop|restart|status}"
exit 1
;;
esac
|
<reponame>dailave/oqs<gh_stars>0
/*
* $Id$
*
* Copyright 2006-2008 <NAME>. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opoo.oqs.type;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Types;
import java.text.ParseException;
import java.text.SimpleDateFormat;
/**
* <tt>time</tt>: A type that maps an SQL TIME to a Java
* java.util.Date or java.sql.Time.
*
* @author <NAME>
* @version 1.0
*/
public class TimeType extends MutableType {
public Object valueOf(String value) {
try {
return new SimpleDateFormat().parse(value);
} catch (ParseException pe) {
throw new RuntimeException("could not parse string", pe);
}
}
public Object get(ResultSet rs, String name) throws SQLException {
return rs.getTime(name);
}
public Object get(ResultSet rs, int index) throws SQLException {
return rs.getTime(index);
}
public Class getReturnedClass() {
return java.util.Date.class;
}
public void set(PreparedStatement st, Object value, int index) throws
SQLException {
Time time;
if (value instanceof Time) {
time = (Time) value;
} else {
time = new Time(((java.util.Date) value).getTime());
}
st.setTime(index, time);
}
public int sqlType() {
return Types.TIME;
}
public String toString(Object value) {
return new SimpleDateFormat("HH:mm:ss").format((java.util.Date) value);
}
}
|
<filename>scripts/sq2k_barrett_mont.py
#!/usr/bin/python3
'''
Copyright 2018-2019 <NAME>, SABANCI UNIVERSITY
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import math
import random
import sys
import textwrap
import binascii
def calc_mu(X, len):
T1 = X
for i in range(len-2):
T1 = (T1*T1) & (2**len-1)
T1 = (T1*X) & (2**len-1)
T1 = 2**len-T1
return T1
def modsq(Av, Mv, M2v, M3v, M4v, CMv, CBv, num_words):
Resv = num_words*2*[0]
Retv = num_words*[0]
Mult1L = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
Mult1H = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
Mult1M1 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
Mult1M2 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultCM1 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultCM2 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultCM = num_words*2*[0]
MultM1 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultM2 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultM3 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultM4 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultCB1 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultCB2 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultCB = num_words*2*[0]
MultB1 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultB2 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultB3 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
MultB4 = [[0 for x in range(num_words//2)] for y in range(num_words//2)]
# Multiply all words
# This generates all of the products needed
# Each product term is used once in the accumulation phase below,
# so these are single use temporary variables.
# High and low portions
# These are the irregular starting and ending parts of the multiply
for i in range (num_words//2):
for j in range (i,num_words//2,1):
Mult1L[i][j] = Av[i] * Av[j]
Mult1H[i][j] = Av[i + (num_words//2)] * Av[j + (num_words//2)]
# Middle portion
# This is the steady state middle portion
for i in range (num_words//2):
for j in range (num_words//4):
Mult1M1[i][j] = Av[i+ (num_words//2)] * Av[j]
Mult1M2[i][j] = Av[i+ (num_words//2)] * Av[j+ (num_words//4)]
# Accumulate C
# This accumulates the products to produce C, the result before reduction
# Accum L
for i in range (num_words//2):
for j in range (num_words//2-i):
if (i==0):
Resv[i+2*j] += Mult1L[j][j+i]
else:
Resv[i+2*j] += Mult1L[j][j+i] * 2
# Accum H
for i in range (num_words//2):
for j in range (num_words//2-i):
if (i==0):
Resv[i+2*j + num_words] += Mult1H[j][j+i]
else:
Resv[i+2*j + num_words] += Mult1H[j][j+i] * 2
# Accum M1
for i in range (num_words//4):
for j in range (num_words//2):
Resv[num_words//2 + j + i] += Mult1M1[j][i] * 2
# Accum M2
for i in range (num_words//4):
for j in range (num_words//2):
Resv[num_words//4 + num_words//2 + j + i] += Mult1M2[j][i] * 2
# How often do we really have to carry? Need to understand how it will work since this seems like
# a long chain.
for i in range (num_words*2-1):
Resv[i+1] += Resv[i]>>16
Resv[i] = Resv[i]&(2**16-1)
# Montgomery reduction
# C*MC
for i in range (num_words//2):
for j in range (num_words//4):
MultCM1[i][j] = Resv[i]*CMv[j]
MultCM2[i][j] = Resv[i]*CMv[j+ (num_words//4)]
# Accumulate in MultCM
for i in range (num_words//4):
for j in range (num_words//2):
MultCM[j + i] += MultCM1[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
MultCM[num_words//4 + j + i] += MultCM2[j][i]
# Carry MultCM
for i in range (num_words):
MultCM[i+1] = MultCM[i+1] + (MultCM[i]>>16)
MultCM[i] = MultCM[i]&(2**16-1)
# Generate T1L*M products
for i in range (num_words//2):
for j in range (num_words//4):
MultM1[i][j] = MultCM[i]*Mv[j]
MultM2[i][j] = MultCM[i]*Mv[j+ (num_words//4)]
MultM3[i][j] = MultCM[i]*Mv[j+ (2*(num_words//4))]
MultM4[i][j] = MultCM[i]*Mv[j+ (3*(num_words//4))]
# Accumulate T1L*M
for i in range (num_words//4):
for j in range (num_words//2):
Resv[j + i] += MultM1[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
Resv[1*(num_words//4) + j + i] += MultM2[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
Resv[2*(num_words//4) + j + i] += MultM3[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
Resv[3*(num_words//4) + j + i] += MultM4[j][i]
# Barrett part
# C*BC
for i in range (num_words//2):
for j in range (num_words//4):
MultCB1[i][j] = Resv[i + 3*(num_words//2)]*CBv[j]
MultCB2[i][j] = Resv[i + 3*(num_words//2)]*CBv[j+ (num_words//4)]
# Accumulate in MultCB
for i in range (num_words//4):
for j in range (num_words//2):
MultCB[j + i] += MultCB1[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
MultCB[num_words//4 + j + i] += MultCB2[j][i]
# Carry MultCB
for i in range (num_words-1):
MultCB[i+1] = MultCB[i+1] + (MultCB[i]>>16)
MultCB[i] = MultCB[i]&(2**16-1)
# Generate T2H*M products
for i in range (num_words//2):
for j in range (num_words//4):
MultB1[i][j] = MultCB[i+(num_words//2)]*Mv[j]
MultB2[i][j] = MultCB[i+(num_words//2)]*Mv[j+ (num_words//4)]
MultB3[i][j] = MultCB[i+(num_words//2)]*Mv[j+ (2*(num_words//4))]
MultB4[i][j] = MultCB[i+(num_words//2)]*Mv[j+ (3*(num_words//4))]
# Accumulate T2H*M
for i in range (num_words//4):
for j in range (num_words//2):
Resv[(num_words//2) + j + i] -= MultB1[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
Resv[(num_words//2) + 1*(num_words//4) + j + i] -= MultB2[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
Resv[(num_words//2) + 2*(num_words//4) + j + i] -= MultB3[j][i]
for i in range (num_words//4):
for j in range (num_words//2):
Resv[(num_words//2) + 3*(num_words//4) + j + i] -= MultB4[j][i]
# Partial reduction
for i in range (2*num_words-1):
Resv[i+1] = Resv[i+1] + (Resv[i]>>16)
Resv[i] = Resv[i]&(2**16-1)
# Extract the result from the middle of Resv
for i in range (num_words):
Retv[i] = Resv[i+(num_words//2)]
# Save any redundant bits
Retv[num_words-1] += Resv[num_words+(num_words//2)]<<16
# Final reduction based on most signifant word overflow
if ((Retv[num_words-1]>>16) == 1):
for i in range (num_words):
Retv[i] -= Mv[i]
elif ((Retv[num_words-1]>>16) == 2):
for i in range (num_words):
Retv[i] -= M2v[i]
elif ((Retv[num_words-1]>>16) == 3):
for i in range (num_words):
Retv[i] -= M3v[i]
else:
if ((Retv[num_words-1]>>16) != 0):
print("take care of this also", (Retv[num_words-1]>>16))
# Partial reduction
for i in range (num_words-1):
Retv[i+1] = Retv[i+1] + (Retv[i]>>16)
Retv[i] = Retv[i]&(2**16-1)
del Mult1L
del Mult1H
del Mult1M1
del Mult1M2
del MultCM1
del MultCM2
del MultCM
del MultM1
del MultM2
del MultM3
del MultM4
del MultCB1
del MultCB2
del MultCB
del MultB1
del MultB2
del MultB3
del MultB4
del Resv
return Retv
# Convert from polynomial to integer and out of montgomery space
def poltoint (Polv, num_coeff, M, mu):
Res = 0
for i in range (num_coeff):
Res = Res + ( Polv[i] << (16*i) )
T1 = Res & (2**((num_coeff*16)//2)-1)
T2 = (T1*mu) & (2**((num_coeff*16)//2)-1)
T3 = T2*M
Res = (Res + T3)
Res = Res >> ((num_coeff*16)//2)
Res = Res % M
return Res
# Solve a small timelock puzzle
size = 2048
num_words = size//16
X = 2
numtest = 1000
M = int(''.join(textwrap.dedent("""
105166528022527798431705271305083153271905167309082936062237
875135108916430921973837383917203706347775182028534615355223
417356941952078088378512191864458014795760160399651688747292
417054895625148685196270565622239606996305273360453763244589
678900932252222464531674533422500855492661601895626223029595
208807882791839550326277150183060433150214975293234171115461
501970809133171220204730435554841778490178338995725390364164
357656089835284535323531864081697993560557519746530724613154
323040692026418463571934152623560405791105826098599526772821
897662925573095464419934572972734862220466979588165351746682
81266248259210339""").split("\n")))
Z = int(''.join(textwrap.dedent("""
869460014143745198343907872022121547045298102699963289256818
682055608801137914744590737042198785541601713993261790859955
320045877588883312835732424347874921457602436314132375009085
907515462780106719552091250724077358808064334915246188158233
737872686183361703308411462503453416437767648592671459107151
424168479099704410312878298220701487856414239255033133157039
609423112868781436959889352211844164170132148475698871344018
257140452777126769499170418189455300534090263878123508822268
851503285278757685070743598918447811892907403238851522593464
637179741947173190032478938281192690043542660537523450003484
6541046351535602""").split("\n")))
M2 = 2*M
M3 = 3*M
M4 = 4*M
CB = (2**(size + (size//2) ) ) // M
CM = calc_mu(M, (size//2))
X = (X<<(size//2)) % M
Xiv = num_words*[0]
Mv = num_words*[0]
M2v = num_words*[0]
M3v = num_words*[0]
M4v = num_words*[0]
CMv = num_words*[0]
CBv = num_words*[0]
for i in range (num_words):
Xiv[i] = (X>>(16*i))&(2**16-1)
Mv[i] = (M>>(16*i))&(2**16-1)
M2v[i] = (M2>>(16*i))&(2**16-1)
M3v[i] = (M3>>(16*i))&(2**16-1)
M4v[i] = (M4>>(16*i))&(2**16-1)
M2v[num_words-1] = (M2>>(16*(num_words-1)))
M3v[num_words-1] = (M3>>(16*(num_words-1)))
M4v[num_words-1] = (M4>>(16*(num_words-1)))
for i in range (num_words//2):
CMv[i] = (CM>>(16*i))&(2**16-1)
CBv[i] = (CB>>(16*i))&(2**16-1)
CBv[num_words//2-1] = (CB>>(16*(num_words//2-1)))
Res1 = poltoint(Xiv, num_words, M, CM)
Res1 = (Res1*Res1) % M
for i in range (numtest):
# Xiv is the only thing that changes
Xiv = modsq(Xiv, Mv, M2v, M3v, M4v, CMv, CBv, num_words)
# convert intermediate result to integer (in software)
Res2 = poltoint(Xiv, num_words, M, CM)
if (Res1 != Res2):
print(i, " Wrong")
break
Res1 = (Res2*Res2) % M
if (i == numtest-1):
print ("Concept Design Validated!")
else:
print ("Concept Design Wrong!")
print (Res2)
print (binascii.unhexlify(hex(Res2 ^ Z)[2:]))
|
(defonce n (atom 0))
(defn track-func
[f]
(fn [& args]
(swap! n inc)
(apply f args))
) |
#!/bin/bash
# Copyright 2012-2014 Brno University of Technology (Author: Karel Vesely)
# Copyright 2012-2015 Shanghai Jiao Tong University (Author: Wei Deng)
# Apache 2.0
# Begin configuration.
config= # config, which is also sent to all other scripts
# NETWORK INITIALIZATION
nnet_init= # select initialized MLP (override initialization)
nnet_proto= # select network prototype (initialize it)
proto_opts= # non-default options for 'make_nnet_proto.py'
feature_transform= # provide feature transform (=splice,rescaling,...) (don't build new one)
network_type=dnn # (dnn,cnn1d,cnn2d,lstm) select type of neural network
cnn_proto_opts= # extra options for 'make_cnn_proto.py'
#
hid_layers=4 # nr. of hidden layers (prior to sotfmax or bottleneck)
hid_dim=1024 # select hidden dimension
bn_dim= # set a value to get a bottleneck network
dbn= # select DBN to prepend to the MLP initialization
momentum=0.9
#
init_opts= # options, passed to the initialization script
# FEATURE PROCESSING
copy_feats=false # resave the train/cv features into /tmp (disabled by default)
copy_feats_tmproot= # tmproot for copy-feats (optional)
# feature config (applies always)
online=false
cmvn_opts=
delta_opts=
# feature_transform:
splice=5 # temporal splicing
splice_step=1 # stepsize of the splicing (1 == no gap between frames)
feat_type=plain
# feature config (applies to feat_type traps)
traps_dct_basis=11 # nr. od DCT basis (applies to `traps` feat_type, splice10 )
# feature config (applies to feat_type transf) (ie. LDA+MLLT, no fMLLR)
transf=
splice_after_transf=5
# feature config (applies to feat_type lda)
lda_dim=300 # LDA dimension (applies to `lda` feat_type)
# LABELS
labels= # use these labels to train (override deafault pdf alignments, has to be in 'Posterior' format, see ali-to-post)
num_tgt= # force to use number of outputs in the MLP (default is autodetect)
# TRAINING SCHEDULER
learn_rate=0.008 # initial learning rate
train_opts= # options, passed to the training script
train_tool= # optionally change the training tool
frame_weights= # per-frame weights for gradient weighting
skip_opts=
# OTHER
seed=777 # seed value used for training data shuffling and initialization
skip_cuda_check=false
# End configuration.
echo "$0 $@" # Print the command line for logging
[ -f path.sh ] && . ./path.sh;
. parse_options.sh || exit 1;
if [ $# != 6 ]; then
echo "Usage: $0 <data-train> <data-dev> <lang-dir> <ali-train> <ali-dev> <exp-dir>"
echo " e.g.: $0 data/train data/cv data/lang exp/mono_ali_train exp/mono_ali_cv exp/mono_nnet"
echo ""
echo " Training data : <data-train>,<ali-train> (for optimizing cross-entropy)"
echo " Held-out data : <data-dev>,<ali-dev> (for learn-rate/model selection based on cross-entopy)"
echo " note.: <ali-train>,<ali-dev> can point to same directory, or 2 separate directories."
echo ""
echo "main options (for others, see top of script file)"
echo " --config <config-file> # config containing options"
echo ""
echo " --apply-cmvn <bool> # apply CMN"
echo " --norm-vars <bool> # add CVN if CMN already active"
echo " --splice <N> # concatenate input features"
echo " --feat-type <type> # select type of input features"
echo ""
echo " --mlp-proto <file> # use this NN prototype"
echo " --feature-transform <file> # re-use this input feature transform"
echo " --hid-layers <N> # number of hidden layers"
echo " --hid-dim <N> # width of hidden layers"
echo " --bn-dim <N> # make bottle-neck network with bn-with N"
echo ""
echo " --learn-rate <float> # initial leaning-rate"
echo " --copy-feats <bool> # copy input features to /tmp (it's faster)"
echo ""
exit 1;
fi
data=$1
data_cv=$2
lang=$3
alidir=$4
alidir_cv=$5
dir=$6
# Using alidir for supervision (default)
if [ -z "$labels" ]; then
#silphonelist=`cat $lang/phones/silence.csl` || exit 1;
for f in $alidir/final.mdl $alidir/ali.tr.scp $alidir_cv/ali.cv.scp; do
[ ! -f $f ] && echo "$0: no such file $f" && exit 1;
done
fi
for f in $data/feats.scp $data_cv/feats.scp; do
[ ! -f $f ] && echo "$0: no such file $f" && exit 1;
done
echo
echo "# INFO"
echo "$0 : Training Neural Network"
printf "\t dir : $dir \n"
printf "\t Train-set : $data $alidir \n"
printf "\t CV-set : $data_cv $alidir_cv \n"
mkdir -p $dir/{log,nnet}
# skip when already trained
[ -e $dir/final.nnet ] && printf "\nSKIPPING TRAINING... ($0)\nnnet already trained : $dir/final.nnet ($(readlink $dir/final.nnet))\n\n" && exit 0
# check if CUDA is compiled in,
if ! $skip_cuda_check; then
cuda-compiled || { echo 'CUDA was not compiled in, skipping! Check src/kaldi.mk and src/configure' && exit 1; }
fi
###### PREPARE ALIGNMENTS ######
echo
echo "# PREPARING ALIGNMENTS"
if [ ! -z "$labels" ]; then
echo "Using targets '$labels' (by force)"
labels_tr="$labels"
labels_cv="$labels"
else
echo "Using PDF targets from dirs '$alidir' '$alidir_cv'"
# define pdf-alignment rspecifiers
#labels_tr="ark:ali-to-pdf $alidir/final.mdl \"ark:gunzip -c $alidir/ali.*.gz |\" ark:- | ali-to-post ark:- ark:- |"
#labels_cv="ark:ali-to-pdf $alidir/final.mdl \"ark:gunzip -c $alidir_cv/ali.*.gz |\" ark:- | ali-to-post ark:- ark:- |"
labels_tr="ark,o:ali-to-pdf $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- | ali-to-post ark:- ark:- |"
labels_cv="ark,o:ali-to-pdf $alidir/final.mdl scp:$alidir_cv/ali.cv.scp ark:- | ali-to-post ark:- ark:- |"
#
#labels_tr_pdf="ark:ali-to-pdf $alidir/final.mdl \"ark:gunzip -c $alidir/ali.*.gz |\" ark:- |" # for analyze-counts.
#labels_tr_phn="ark:ali-to-phones --per-frame=true $alidir/final.mdl \"ark:gunzip -c $alidir/ali.*.gz |\" ark:- |"
labels_tr_pdf="ark:ali-to-pdf $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- |" # for analyze-counts.
labels_tr_phn="ark:ali-to-phones --per-frame=true $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- |"
# get pdf-counts, used later to post-process DNN posteriors
[ -e $dir/ali_train_pdf.counts ] || analyze-counts --verbose=1 --binary=false "$labels_tr_pdf" $dir/ali_train_pdf.counts 2>$dir/log/analyze_counts_pdf.log || exit 1
# copy the old transition model, will be needed by decoder
copy-transition-model --binary=false $alidir/final.mdl $dir/final.mdl || exit 1
# copy the tree
cp $alidir/tree $dir/tree || exit 1
# make phone counts for analysis
#[ -e $lang/phones.txt ] && analyze-counts --verbose=1 --symbol-table=$lang/phones.txt "$labels_tr_phn" /dev/null 2>$dir/log/analyze_counts_phones.log || exit 1
fi
###### PREPARE FEATURES ######
echo
echo "# PREPARING FEATURES"
# shuffle the list
echo "Preparing train/cv lists :"
#cat $data/feats.scp | utils/shuffle_list.pl --srand ${seed:-777} > $dir/train.scp
cp $data/feats.scp $dir/train.scp
cp $data_cv/feats.scp $dir/cv.scp
# print the list sizes
wc -l $dir/train.scp $dir/cv.scp
# re-save the train/cv features to /tmp, reduces LAN traffic, avoids disk-seeks due to shuffled features
if [ "$copy_feats" == "true" ]; then
tmpdir=$(mktemp -d $copy_feats_tmproot); mv $dir/train.scp{,_non_local}; mv $dir/cv.scp{,_non_local}
copy-feats scp:$dir/train.scp_non_local ark,scp:$tmpdir/train.ark,$dir/train.scp || exit 1
copy-feats scp:$dir/cv.scp_non_local ark,scp:$tmpdir/cv.ark,$dir/cv.scp || exit 1
trap "echo \"Removing features tmpdir $tmpdir @ $(hostname)\"; ls $tmpdir; rm -r $tmpdir" EXIT
fi
#create a 10k utt subset for global cmvn estimates
head -n 200000 $dir/train.scp > $dir/train.scp.10k
###### PREPARE FEATURE PIPELINE ######
# optionally import feature setup from pre-training,
if [ ! -z $feature_transform ]; then
D=$(dirname $feature_transform)
[ -e $D/norm_vars ] && cmvn_opts="--norm-means=true --norm-vars=$(cat $D/norm_vars)" # Bwd-compatibility,
[ -e $D/cmvn_opts ] && cmvn_opts=$(cat $D/cmvn_opts)
[ -e $D/delta_order ] && delta_opts="--delta-order=$(cat $D/delta_order)" # Bwd-compatibility,
[ -e $D/delta_opts ] && delta_opts=$(cat $D/delta_opts)
echo "Imported config : cmvn_opts='$cmvn_opts' delta_opts='$delta_opts'"
fi
# read the features,
feats_tr="ark:copy-feats scp:$dir/train.scp ark:- |"
feats_cv="ark:copy-feats scp:$dir/cv.scp ark:- |"
# optionally add per-speaker CMVN,
if [ ! -z "$cmvn_opts" -a "$online" == "false" ]; then
echo "Will use CMVN statistics : $data/cmvn.scp, $data_cv/cmvn.scp"
[ ! -r $data/cmvn.scp ] && echo "Missing $data/cmvn.scp" && exit 1;
[ ! -r $data_cv/cmvn.scp ] && echo "Missing $data_cv/cmvn.scp" && exit 1;
feats_tr="$feats_tr apply-cmvn $cmvn_opts --utt2spk=ark:$data/utt2spk scp:$data/cmvn.scp ark:- ark:- |"
feats_cv="$feats_cv apply-cmvn $cmvn_opts --utt2spk=ark:$data_cv/utt2spk scp:$data_cv/cmvn.scp ark:- ark:- |"
elif [ "$online" == "true" ];then
#[ ! -f $data/global_cmvn.stats ] && echo "$0: no such file $data/global_cmvn.stats" && exit 1;
feats_tr="$feats_tr apply-cmvn-sliding $cmvn_opts ark:- ark:- |"
feats_cv="$feats_cv apply-cmvn-sliding $cmvn_opts ark:- ark:- |"
else
echo "apply-cmvn is not used"
fi
# optionally add deltas,
if [ ! -z "$delta_opts" ]; then
feats_tr="$feats_tr add-deltas $delta_opts ark:- ark:- |"
feats_cv="$feats_cv add-deltas $delta_opts ark:- ark:- |"
echo "add-deltas with $delta_opts"
fi
# optionally skip frames,
if [ ! -z "$skip_opts" ]; then
train_tool="$train_tool $skip_opts"
echo "training with $skip_opts"
fi
# keep track of the config,
[ ! -z "$cmvn_opts" ] && echo "$cmvn_opts" >$dir/cmvn_opts
[ ! -z "$delta_opts" ] && echo "$delta_opts" >$dir/delta_opts
[ ! -z "$skip_opts" ] && echo "$skip_opts" >$dir/skip_opts
[ ! -z "$online" ] && echo "$online" >$dir/online
#
# get feature dim
echo "Getting feature dim : "
feat_dim=$(feat-to-dim --print-args=false "$feats_tr" -)
echo "Feature dim is : $feat_dim"
# Now we will start building complex feature_transform which will
# be forwarded in CUDA to have fast run-time.
#
# We will use 1GPU for both feature_transform and MLP training in one binary tool.
# This is against the kaldi spirit to have many independent small processing units,
# but it is necessary because of compute exclusive mode, where GPU cannot be shared
# by multiple processes.
if [ ! -z "$feature_transform" ]; then
echo "Using pre-computed feature-transform : '$feature_transform'"
tmp=$dir/$(basename $feature_transform)
cp $feature_transform $tmp; feature_transform=$tmp
else
# Generate the splice transform
echo "Using splice +/- $splice , step $splice_step"
feature_transform=$dir/tr_splice$splice-$splice_step.nnet
utils/nnet/gen_splice.py --fea-dim=$feat_dim --splice=$splice --splice-step=$splice_step > $feature_transform
# Choose further processing of spliced features
echo "Feature type : $feat_type"
case $feat_type in
plain)
;;
traps)
#generate hamming+dct transform
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_hamm_dct${traps_dct_basis}.nnet
echo "Preparing Hamming DCT transform into : $feature_transform"
#prepare matrices with time-transposed hamming and dct
utils/nnet/gen_hamm_mat.py --fea-dim=$feat_dim --splice=$splice > $dir/hamm.mat
utils/nnet/gen_dct_mat.py --fea-dim=$feat_dim --splice=$splice --dct-basis=$traps_dct_basis > $dir/dct.mat
#put everything together
compose-transforms --binary=false $dir/dct.mat $dir/hamm.mat - | \
transf-to-nnet - - | \
nnet-concat --binary=false $feature_transform_old - $feature_transform || exit 1
;;
transf)
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_transf_splice${splice_after_transf}.nnet
[ -z $transf ] && transf=$alidir/final.mat
[ ! -f $transf ] && echo "Missing transf $transf" && exit 1
feat_dim=$(feat-to-dim "$feats_tr nnet-forward 'nnet-concat $feature_transform_old \"transf-to-nnet $transf - |\" - |' ark:- ark:- |" -)
nnet-concat --binary=false $feature_transform_old \
"transf-to-nnet $transf - |" \
"utils/nnet/gen_splice.py --fea-dim=$feat_dim --splice=$splice_after_transf |" \
$feature_transform || exit 1
;;
lda)
transf=$dir/lda$lda_dim.mat
#get the LDA statistics
if [ ! -r "$dir/lda.acc" ]; then
echo "LDA: Converting alignments to posteriors $dir/lda_post.scp"
ali-to-post "ark:gunzip -c $alidir/ali.*.gz|" ark:- | \
weight-silence-post 0.0 $silphonelist $alidir/final.mdl ark:- ark,scp:$dir/lda_post.ark,$dir/lda_post.scp 2>$dir/log/ali-to-post-lda.log || exit 1;
echo "Accumulating LDA statistics $dir/lda.acc on top of spliced feats"
acc-lda --rand-prune=4.0 $alidir/final.mdl "$feats_tr nnet-forward $feature_transform ark:- ark:- |" scp:$dir/lda_post.scp $dir/lda.acc 2>$dir/log/acc-lda.log || exit 1;
else
echo "LDA: Using pre-computed stats $dir/lda.acc"
fi
#estimate the transform
echo "Estimating LDA transform $dir/lda.mat from the statistics $dir/lda.acc"
est-lda --write-full-matrix=$dir/lda.full.mat --dim=$lda_dim $transf $dir/lda.acc 2>$dir/log/lda.log || exit 1;
#append the LDA matrix to feature_transform
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_lda${lda_dim}.nnet
transf-to-nnet $transf - | \
nnet-concat --binary=false $feature_transform_old - $feature_transform || exit 1
#remove the temporary file
rm $dir/lda_post.{ark,scp}
;;
*)
echo "Unknown feature type $feat_type"
exit 1;
;;
esac
# keep track of feat_type
echo $feat_type > $dir/feat_type
# Renormalize the MLP input to zero mean and unit variance
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_cmvn-g.nnet
echo "Renormalizing MLP input features into $feature_transform"
nnet-forward --use-gpu=yes \
$feature_transform_old "$(echo $feats_tr | sed 's|train.scp|train.scp.10k|')" \
ark:- 2>$dir/log/nnet-forward-cmvn.log |\
compute-cmvn-stats ark:- - | cmvn-to-nnet - - |\
nnet-concat --binary=false $feature_transform_old - $feature_transform
[ ! -f $feature_transform ] && cat $dir/log/nnet-forward-cmvn.log && echo "Error: Global CMVN failed, was the CUDA GPU okay?" && echo && exit 1
fi
###### MAKE LINK TO THE FINAL feature_transform, so the other scripts will find it ######
(cd $dir; [ ! -f final.feature_transform ] && ln -s $(basename $feature_transform) final.feature_transform )
###### INITIALIZE THE NNET ######
echo
echo "# NN-INITIALIZATION"
[ ! -z "$nnet_init" ] && echo "Using pre-initialized network '$nnet_init'";
if [ ! -z "$nnet_proto" ]; then
echo "Initializing using network prototype '$nnet_proto'";
nnet_init=$dir/nnet.init; log=$dir/log/nnet_initialize.log
nnet-initialize $nnet_proto $nnet_init 2>$log || { cat $log; exit 1; }
fi
if [[ -z "$nnet_init" && -z "$nnet_proto" ]]; then
echo "Getting input/output dims :"
#initializing the MLP, get the i/o dims...
#input-dim
num_fea=$(feat-to-dim "$feats_tr nnet-forward $feature_transform ark:- ark:- |" - )
{ #optioanlly take output dim of DBN
[ ! -z $dbn ] && num_fea=$(nnet-forward "nnet-concat $feature_transform $dbn -|" "$feats_tr" ark:- | feat-to-dim ark:- -)
[ -z "$num_fea" ] && echo "Getting nnet input dimension failed!!" && exit 1
}
#output-dim
[ -z $num_tgt ] && num_tgt=$(hmm-info --print-args=false $alidir/final.mdl | grep pdfs | awk '{ print $NF }')
# make network prototype
nnet_proto=$dir/nnet.proto
echo "Genrating network prototype $nnet_proto"
case "$network_type" in
dnn)
utils/nnet/make_nnet_proto.py $proto_opts \
${bn_dim:+ --bottleneck-dim=$bn_dim} \
$num_fea $num_tgt $hid_layers $hid_dim >$nnet_proto || exit 1
;;
cnn1d)
delta_order=$([ -z $delta_opts ] && echo "0" || { echo $delta_opts | tr ' ' '\n' | grep "delta[-_]order" | sed 's:^.*=::'; })
echo "Debug : $delta_opts, delta_order $delta_order"
utils/nnet/make_cnn_proto.py $cnn_proto_opts \
--splice=$splice --delta-order=$delta_order --dir=$dir \
$num_fea >$nnet_proto || exit 1
cnn_fea=$(cat $nnet_proto | grep -v '^$' | tail -n1 | awk '{ print $5; }')
utils/nnet/make_nnet_proto.py $proto_opts \
--no-proto-head --no-smaller-input-weights \
${bn_dim:+ --bottleneck-dim=$bn_dim} \
"$cnn_fea" $num_tgt $hid_layers $hid_dim >>$nnet_proto || exit 1
;;
cnn2d)
#TODO, to be filled by Vijay...
;;
lstm)
utils/nnet/make_lstm_proto.py $proto_opts \
$num_fea $num_tgt >$nnet_proto || exit 1
;;
*) echo "Unknown : --network_type $network_type" && exit 1;
esac
# initialize
nnet_init=$dir/nnet.init; log=$dir/log/nnet_initialize.log
echo "Initializing $nnet_proto -> $nnet_init"
nnet-initialize $nnet_proto $nnet_init 2>$log || { cat $log; exit 1; }
# optionally prepend dbn to the initialization
if [ ! -z $dbn ]; then
nnet_init_old=$nnet_init; nnet_init=$dir/nnet_$(basename $dbn)_dnn.init
nnet-concat $dbn $nnet_init_old $nnet_init || exit 1
fi
fi
###### TRAIN ######
echo
echo "# RUNNING THE NN-TRAINING SCHEDULER"
steps/nnet/train_scheduler_asgd_sds.sh \
--feature-transform $feature_transform \
--learn-rate $learn_rate \
--momentum $momentum \
--randomizer-seed $seed \
${train_opts} \
${train_tool:+ --train-tool "$train_tool"} \
${frame_weights:+ --frame-weights "$frame_weights"} \
${config:+ --config $config} \
$nnet_init "$feats_tr" "$feats_cv" "$labels_tr" "$labels_cv" $dir || exit 1
if $prepend_cnn; then
echo "Preparing feature transform with CNN layers for RBM pre-training."
nnet-concat $dir/final.feature_transform "nnet-copy --remove-last-layers=$(((hid_layers+1)*2)) $dir/final.nnet - |" \
$dir/final.feature_transform_cnn 2>$dir/log/concat_transf_cnn.log || exit 1
fi
echo "$0 successfuly finished.. $dir"
sleep 3
exit 0
|
import pandas as pd
from pandas.core.accessor import register_index_accessor
import us
import requests
from can_tools.scrapers import variables
from can_tools.scrapers.official.base import StateDashboard
class NebraskaCases(StateDashboard):
has_location = True
location_type = "county"
state_fips = int(us.states.lookup("Nebraska").fips)
source = "https://experience.arcgis.com/experience/ece0db09da4d4ca68252c3967aa1e9dd"
fetch_url = (
"https://gis.ne.gov/Enterprise/rest/services/C19Combine/FeatureServer/0/query"
)
source_name = "Nebraska Department of Health and Human Services"
variables = {"county_cases": variables.CUMULATIVE_CASES_PEOPLE}
def fetch(self) -> requests.models.Response:
params = {
"f": "json",
"where": "0=0",
"returnGeometry": "false",
"outFields": "name,Counties_S,PosCases",
}
return requests.get(self.fetch_url, params=params)
def normalize(self, data: requests.models.Response) -> pd.DataFrame:
# fetch county population table, keep only cols and rows of interest
county_pops = (
pd.read_csv(
"https://media.githubusercontent.com/media/covid-projections/covid-data-public/main/data/misc/fips_population.csv"
)
.query("state == 'NE'")
.assign(county=lambda x: x["county"].str.replace(" County", ""))
.rename(columns={"population": "county_population"})
.loc[:, ["county", "fips", "county_population"]]
.set_index("county")
)
return (
# parse and format dashboard data
pd.json_normalize(data.json()["features"])
.assign(county=lambda x: x["attributes.Counties_S"].str.split(", "))
.drop(columns={"attributes.Counties_S"})
.rename(
columns={
"attributes.PosCases": "region_cases",
"attributes.name": "health_region",
}
)
.explode("county")
# left join with the county population table on county names
.set_index("county")
.join(county_pops, how="left")
.reset_index()
.assign(
# calculate health region populations by summing county populations
region_population=lambda x: (
x["health_region"].map(
x.groupby("health_region").sum()["county_population"].to_dict()
)
),
# calculate proportion of total health region population for each county
county_percent_pop=lambda x: (
x["county_population"] / x["region_population"]
),
# multiply proportion by region's total cases to get estimated number of cases for each county
county_cases=lambda x: x["region_cases"] * x["county_percent_pop"],
)
# add date, melt into CMU variables
.pipe(
self._rename_or_add_date_and_location,
location_column="fips",
timezone="US/Central",
)
.pipe(self._reshape_variables, variable_map=self.variables)
)
|
<reponame>mwilliamson/Stopify
import { setImmediate } from './setImmediate';
import { ElapsedTimeEstimator } from '@stopify/estimators';
import { Runtime, Result } from '@stopify/continuations-runtime';
export function badResume(x: Result): any {
throw new Error('program is not paused. (Did you call .resume() twice?)');
}
export function defaultDone(x: Result): any {
if (x.type === 'exception') {
console.error(x.value);
}
}
const normalUndefResult: Result = { type: 'normal', value: undefined };
/**
* Instance of a runtime extended with the suspend() function. Used by
* instrumented programs produced by stopify.
*/
export class RuntimeWithSuspend {
constructor(
/**
* Abstract runtime used to implement stack saving and restoring logic
*/
public rts: Runtime,
public yieldInterval: number,
public estimator: ElapsedTimeEstimator,
/** The runtime system yields control whenever this function produces
* 'true' or when the estimated elapsed time exceeds 'yieldInterval'.
*/
public mayYield = function(): boolean { return false; },
/** This function is applied immediately before stopify yields control to
* the browser's event loop. If the function produces 'false', the
* computation does not resume.
*/
public onYield = function(): boolean { return true; },
/**
* Called when execution reaches the end of any stopified module.
*/
public onEnd = function (this: RuntimeWithSuspend, x: Result): void { this.estimator.cancel(); defaultDone(x); },
public continuation = badResume,
public onDone = defaultDone,
/**
* Current line number in the source program. Used in `--debug` mode.
*/
public linenum: undefined | number = undefined) {
}
// Resume a suspended program.
resumeFromCaptured(): any {
const cont = this.continuation;
const onDone = this.onDone;
// Clear the saved continuation, or invoking .resumeFromCaptured() twice
// in a row will restart the computation.
this.continuation = badResume;
this.onDone = defaultDone;
return this.rts.runtime(() => cont(normalUndefResult), onDone);
}
/**
* Call this function to suspend a running program. When called, it initiates
* stack capturing by calling the `captureCC` function defined by the current
* runtime.
*
* Internally uses stopify's timing mechanism to decide whether or not to
* suspend.
*
* @param force forces a suspension when `true`.
*/
suspend(force?: boolean): void {
// If there are no more stack frame left to be consumed, save the stack
// and continue running the program.
if (isFinite(this.rts.stackSize) && this.rts.remainingStack <= 0) {
this.rts.remainingStack = this.rts.stackSize;
return this.rts.captureCC((continuation) => {
if(this.onYield()) {
return continuation(normalUndefResult);
}
});
}
if (force || this.mayYield() ||
(this.estimator.elapsedTime() >= this.yieldInterval)) {
if (isFinite(this.rts.stackSize)) {
this.rts.remainingStack = this.rts.stackSize;
}
this.estimator.reset();
return this.rts.captureCC((continuation) => {
return this.rts.endTurn((onDone) => {
this.continuation = continuation;
this.onDone = onDone;
if (this.onYield()) {
return setImmediate(() => {
this.rts.runtime(() => continuation(normalUndefResult), onDone);
});
}
});
});
}
}
}
|
#
# dotzsh : https://github.com/dotphiles/dotzsh
#
# Simple scrensaver module
#
# Authors:
# Ben O'Hara <bohara@gmail.com>
#
zstyle -a ':dotzsh:module:screensaver' timeout '_timeout'
if (( $#_timeout > 0 )); then
TMOUT="$_timeout[@]"
else
TMOUT=120
fi
TRAPALRM() {
if (( ! $+commands[cmatrix] )); then
cmatrix -s
fi
}
|
from typing import List, Tuple
import random
def switch_predictions(y_list: List[int], predictions: List[int], confidences: List[float], min_conf: float) -> Tuple[List[int], int]:
switched_counter = 0
for index, (y, prediction, confidence) in enumerate(zip(y_list, predictions, confidences)):
if confidence < min_conf or y == prediction:
continue
if random.uniform(0.0, 1.0) > confidence:
continue
y_list[index] = prediction
switched_counter += 1
return y_list, switched_counter |
import re
date_pattern = r"\d{2}\/\d{2}\/\d{4}"
date = re.search(date_pattern, text) |
angular.module("wust.config").config(SchemaConfig);
SchemaConfig.$inject = ["SchemaProvider", "DiscourseNodeProvider", "DiscourseNodeListProvider", "restmodProvider", "LiveServiceProvider"];
function SchemaConfig(SchemaProvider, DiscourseNodeProvider, DiscourseNodeListProvider, restmodProvider, LiveServiceProvider) {
let schema = window.globals.schema;
LiveServiceProvider.setBaseUrl(schema.api.websocketRoot);
restmodProvider.rebase({
$config: {
urlPrefix: schema.api.restRoot,
style: "wust",
primaryKey: "id"
},
$extend: {
Model: {
encodeUrlName: _.kebabCase
},
Record: {
encode: function() {
let encoded = this.$encode();
//TODO: more general solution, as encode loses all collection propertiees
if (this.tags !== undefined)
encoded.tags = angular.copy(this.tags.map(t => t.$encode ? t.$encode() : t));
if (this.classifications !== undefined)
encoded.classifications = angular.copy(this.classifications.map(t => t.$encode ? t.$encode() : t));
return encoded;
}
},
}
});
_.each(schema.models, model => {
DiscourseNodeProvider.setLabel(model.name, model.label);
DiscourseNodeListProvider.setList(model.name, model.path);
});
SchemaProvider.setup(schema);
}
|
# calculate the root mean square error
def root_mean_square_error(series1, series2):
# calculate the sum of the squared differences
sum_squared_differences = 0
for i in range(len(series1)):
sum_squared_differences += (series1[i] - series2[i])**2
# calculate the root mean square error
rmse = (sum_squared_differences/len(series1))**0.5
return rmse |
package io.sjitech.demo.model;
/**
* Created by wang on 2016/07/27.
*/
public class MijinNamespace {
private String fqn;
private String owner;
private int height;
public String getFqn() {
return fqn;
}
public void setFqn(String fqn) {
this.fqn = fqn;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
}
|
from application.resources.recommender.feat_extraction import User
from application.resources.utils.user_input import user_fields
from application.resources.recommender import recommendation
from flask_restx import Resource
from application import api
from flask import request
recommendation_api = api.namespace('recommendation', description='Operations related to Recommendation')
@recommendation_api.route('/', endpoint='/recommendation')
class RecommendationController(Resource):
def get(self):
return 'Recommendation GET method Called'
@api.expect(user_fields, validate=False)
@api.doc(responses={
200: 'Recommendation Given',
400: 'Validation Error'
})
def post(self):
json_data = request.get_json(force=True)
out_value = recommendation.recommendations(words=json_data['word'])
"""
# Currently not prepossessing the json
user = User(json_data)
data = user.pre_processing()
"""
return out_value
|
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.mips.instruction.basic;
import net.jamsimulator.jams.mips.instruction.assembled.AssembledInstruction;
import net.jamsimulator.jams.mips.instruction.assembled.AssembledRFPUInstruction;
import net.jamsimulator.jams.mips.parameter.InstructionParameterTypes;
import net.jamsimulator.jams.mips.parameter.ParameterType;
/**
* Represents an R-Type FPU basic instruction. This subclass adds the FMT
* of the instruction, allowing the simulator to find this instruction based on an FMT.
*/
public abstract class BasicRFPUInstruction<Inst extends AssembledInstruction> extends BasicRInstruction<Inst> {
private final int fmtCode;
/**
* Creates a basic instruction using a mnemonic, a parameter types array, an operation code,
* a function code and an operand type format specifier .
*
* @param mnemonic the mnemonic.
* @param parameters the parameter types.
* @param operationCode the operation code.
* @param functionCode the function code.
* @param fmtCode the operand type format specifier.
*/
public BasicRFPUInstruction(String mnemonic, ParameterType[] parameters, int operationCode,
int functionCode, int fmtCode) {
super(mnemonic, parameters, operationCode, functionCode);
this.fmtCode = fmtCode;
}
/**
* Creates a basic instruction using a mnemonic, a parameter types array, an operation code,
* a function code and an operand type format specifier .
*
* @param mnemonic the mnemonic.
* @param parameters the parameter types.
* @param operationCode the operation code.
* @param functionCode the function code.
* @param fmtCode the operand type format specifier.
*/
public BasicRFPUInstruction(String mnemonic, InstructionParameterTypes parameters, int operationCode,
int functionCode, int fmtCode) {
super(mnemonic, parameters, operationCode, functionCode);
this.fmtCode = fmtCode;
}
@Override
public boolean match(int instructionCode) {
return super.match(instructionCode) &&
((instructionCode >> AssembledRFPUInstruction.FMT_SHIFT) & AssembledRFPUInstruction.FMT_MASK) == fmtCode;
}
/**
* Returns the operand type format specifier of the instruction.
*
* @return the format specifier.
*/
public int getFmtCode() {
return fmtCode;
}
}
|
import subprocess
import sys
import os
def get_available_python_versions():
try:
output = subprocess.check_output(['conda', 'search', '--full-name', '^python$'], universal_newlines=True)
versions = [line.split()[-1] for line in output.splitlines() if line.startswith('python')]
return versions
except subprocess.CalledProcessError:
print("Error: Unable to retrieve available Python versions.")
sys.exit(1)
def build_conda_packages(folder_name, python_versions):
for version in python_versions:
try:
subprocess.check_call(['conda', 'build', '--py=' + version, './' + folder_name])
print(f"Package built successfully for Python {version}.")
except subprocess.CalledProcessError:
print(f"Error: Failed to build package for Python {version}.")
continue
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: python build_conda_packages.py <folder_name>")
sys.exit(1)
folder_name = sys.argv[1]
if not os.path.isdir(folder_name):
print(f"Error: {folder_name} is not a valid directory.")
sys.exit(1)
python_versions = get_available_python_versions()
if not python_versions:
print("Error: No Python versions found.")
sys.exit(1)
build_conda_packages(folder_name, python_versions) |
package parser
import (
"github.com/jensneuse/graphql-go-tools/pkg/document"
"github.com/jensneuse/graphql-go-tools/pkg/lexing/position"
"testing"
)
func TestParser_parseInputObjectTypeDefinition(t *testing.T) {
t.Run("simple", func(t *testing.T) {
run(`input Person {
name: String
}`,
mustParseInputObjectTypeDefinition(
node(
hasName("Person"),
hasInputFieldsDefinition(
hasInputValueDefinitions(
node(
hasName("name"),
),
),
),
hasPosition(position.Position{
LineStart: 1,
CharStart: 1,
LineEnd: 3,
CharEnd: 6,
}),
),
),
)
})
t.Run("multiple fields", func(t *testing.T) {
run(`input Person {
name: [String]!
age: [ Int ]
}`,
mustParseInputObjectTypeDefinition(
node(
hasName("Person"),
hasInputFieldsDefinition(
hasInputValueDefinitions(
node(hasName("age")),
node(hasName("name")),
),
),
),
),
)
})
t.Run("with default value", func(t *testing.T) {
run(`input Person {
name: String = "Gophina"
}`,
mustParseInputObjectTypeDefinition(
node(
hasName("Person"),
hasInputFieldsDefinition(
hasInputValueDefinitions(
node(
hasName("name"),
nodeType(
hasTypeKind(document.TypeKindNAMED),
hasTypeName("String"),
),
),
),
),
),
),
)
})
t.Run("all optional", func(t *testing.T) {
run("input Person", mustParseInputObjectTypeDefinition(
node(
hasName("Person"),
),
))
})
t.Run("complex", func(t *testing.T) {
run(`input Person @fromTop(to: "bottom") @fromBottom(to: "top"){
name: String
}`,
mustParseInputObjectTypeDefinition(
node(
hasName("Person"),
hasDirectives(
node(
hasName("fromTop"),
),
node(
hasName("fromBottom"),
),
),
hasInputFieldsDefinition(
hasInputValueDefinitions(
node(
hasName("name"),
),
),
),
),
),
)
})
t.Run("invalid 1", func(t *testing.T) {
run("input 1337 {}",
mustPanic(
mustParseInputObjectTypeDefinition(
node(
hasName("1337"),
),
)),
)
})
t.Run("invalid 2", func(t *testing.T) {
run("input Person @foo(bar: .) {}",
mustPanic(
mustParseInputObjectTypeDefinition(
node(
hasName("1337"),
),
)),
)
})
t.Run("invalid 3", func(t *testing.T) {
run("input Person { a: .}",
mustPanic(
mustParseInputObjectTypeDefinition(
node(
hasName("1337"),
),
)),
)
})
t.Run("invalid 4", func(t *testing.T) {
run("notinput Foo {}",
mustPanic(
mustParseInputObjectTypeDefinition(
node(
hasName("1337"),
),
)),
)
})
}
|
<reponame>markenwerk/java-commons-collections
/*
* Copyright (c) 2016 <NAME>, Markenwerk GmbH
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.markenwerk.commons.collections.sinks;
import org.junit.Assert;
import org.junit.Test;
import net.markenwerk.commons.collections.Nullity;
import net.markenwerk.commons.collections.sequences.ListSequence;
import net.markenwerk.commons.collections.sequences.Sequence;
@SuppressWarnings("javadoc")
public class SequenceSinkTests {
@Test
public void getSequence() {
Sequence<Object> sequence = new ListSequence<Object>();
SequenceSink<Object> sink = new SequenceSink<Object>(sequence);
Assert.assertSame(sequence, sink.getSequence());
}
@Test
public void getNullity() {
SequenceSink<Object> sink = new SequenceSink<Object>(Nullity.IGNORE, new ListSequence<Object>());
Assert.assertSame(Nullity.IGNORE, sink.getNullity());
}
@Test
public void add() {
Object object = new Object();
SequenceSink<Object> sink = new SequenceSink<Object>(new ListSequence<Object>());
sink.add(object);
Assert.assertEquals(1, sink.getSequence().size());
Assert.assertTrue(sink.getSequence().contains(object));
}
@Test
public void add_nullAllow() {
SequenceSink<Object> sink = new SequenceSink<Object>(Nullity.ALLOW, new ListSequence<Object>());
sink.add(null);
Assert.assertEquals(1, sink.getSequence().size());
Assert.assertTrue(sink.getSequence().contains((Object) null));
}
@Test
public void add_nullIgnore() {
SequenceSink<Object> sink = new SequenceSink<Object>(Nullity.IGNORE, new ListSequence<Object>());
sink.add(null);
Assert.assertTrue(sink.getSequence().isEmpty());
}
@Test(expected = IllegalArgumentException.class)
public void add_rejectNull() {
SequenceSink<Object> sink = new SequenceSink<Object>(Nullity.REJECT, new ListSequence<Object>());
sink.add(null);
}
@Test
public void addAll() {
Object first = new Object();
Object second = new Object();
SequenceSink<Object> sink = new SequenceSink<Object>(new ListSequence<Object>());
sink.addAll(first, second);
Assert.assertEquals(2, sink.getSequence().size());
Assert.assertTrue(sink.getSequence().containsAll(first, second));
}
@Test(expected = IllegalArgumentException.class)
public void addAll_nullArray() {
SequenceSink<Object> sink = new SequenceSink<Object>(new ListSequence<Object>());
sink.addAll((Object[]) null);
}
@Test(expected = IllegalArgumentException.class)
public void addAll_nullIterable() {
SequenceSink<Object> sink = new SequenceSink<Object>(new ListSequence<Object>());
sink.addAll((Iterable<Object>) null);
}
@Test
public void equals() {
ListSequence<Object> sequence = new ListSequence<Object>();
Assert.assertNotEquals(new SequenceSink<Object>(sequence), new SequenceSink<Object>(sequence));
}
@Test
public void haschCode() {
ListSequence<Object> sequence = new ListSequence<Object>();
Assert.assertNotEquals(new SequenceSink<Object>(sequence).hashCode(),
new SequenceSink<Object>(sequence).hashCode());
}
}
|
<gh_stars>10-100
import { ChakraProvider } from "@chakra-ui/react";
import React from "react";
import ReactDOM from "react-dom";
import theme from "../theme";
import Options from "./Options";
ReactDOM.render(
<React.StrictMode>
<ChakraProvider theme={theme}>
<Options />
</ChakraProvider>
</React.StrictMode>,
document.getElementById("options"),
);
|
#!/bin/bash -eu
DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch" | sed "s/.*: //")
COMMIT_ID=$(grep -Fxf \
<(git log --first-parent --merges --pretty=format:%H $DEFAULT_BRANCH; echo) \
<(git log --ancestry-path --merges --pretty=format:%H $1..$DEFAULT_BRANCH; echo) \
| tail -n 1)
COMMIT_MSG=$(git log $COMMIT_ID -n1 --pretty=format:%s)
PR_NO=$(echo $COMMIT_MSG | egrep --only-matching '#\d+')
GIT_URL=$(git remote get-url origin)
REPO_PATH=${GIT_URL#git@github.com:}
REPO_SIGN=${REPO_PATH%.git}
open "https://github.com/$REPO_SIGN/pull/${PR_NO#\#}"
|
<gh_stars>0
export const FETCH_PAGE_BEGIN = "FETCH_PAGE_BEGIN";
export const FETCH_PAGE_SUCCESS = "FETCH_PAGE_SUCCESS";
export const FETCH_PAGE_ERROR = "FETCH_PAGE_ERROR";
export const RECEIVE_POSTS = "RECEIVE_POSTS";
|
<gh_stars>1-10
import WebSocket from 'ws';
import { validate, execute, specifiedRules } from 'graphql';
import ApolloClient from 'apollo-client';
import {
SubscriptionClient,
addGraphQLSubscriptions,
} from 'subscriptions-transport-ws';
// Execute all GraphQL requests directly without
class ServerInterface {
constructor(optionsData) {
this.schema = optionsData.schema;
this.optionsData = optionsData;
}
async query({ query, variables, operationName }) {
try {
let validationRules = specifiedRules;
const customValidationRules = this.optionsData.validationRules;
if (customValidationRules) {
validationRules = validationRules.concat(customValidationRules);
}
const validationErrors = validate(this.schema, query, validationRules);
if (validationErrors.length > 0) {
return { errors: validationErrors };
}
const result = await execute(
this.schema,
query,
this.optionsData.rootValue,
this.optionsData.context,
variables,
operationName,
);
return result;
} catch (contextError) {
return { errors: [contextError] };
}
}
}
const wsClient = new SubscriptionClient(
`ws://localhost:8181/wgraphql`,
{
reconnect: true,
connectionParams: {},
},
WebSocket,
);
export { wsClient };
export default function createApolloClient(options) {
const networkInterface = new ServerInterface(options);
const networkInterfaceWithSubscriptions = addGraphQLSubscriptions(
networkInterface,
wsClient,
);
return new ApolloClient({
reduxRootSelector: state => state.apollo,
networkInterface: networkInterfaceWithSubscriptions,
queryDeduplication: true,
ssrMode: true,
});
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-whawhawhatLib_Tests/whawhawhatLib.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-whawhawhatLib_Tests/whawhawhatLib.framework"
fi
|
<reponame>dciborow/cortana-pixel-tracker-java
/*
* Copyright (c) Microsoft. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root for full license information.
*/
package com.microsoft.azure.pixeltracker.server;
import com.microsoft.azure.eventhubs.spring.EventHubAutoConfiguration;
import com.microsoft.azure.eventhubs.spring.EventHubTemplate;
import com.microsoft.azure.server.pixeltracker.api.handlers.Handler;
import com.microsoft.azure.server.pixeltracker.api.handlers.impl.EventHubSendHandler;
import com.microsoft.azure.server.pixeltracker.api.handlers.impl.JsonQueryStringHandler;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class Application {
private static Logger logger = LogManager.getLogger();
public static void main(String[] args) {
logger.info("Application Start");
SpringApplication.run(Application.class, args);
}
@Bean
@Autowired
@Qualifier(value = "enable")
Handler handlers(Handler... handlers) {
return chainHandlers(handlers);
}
@Bean
@Qualifier(value = "enable")
Handler jsonQueryStringHandler() {
return new JsonQueryStringHandler();
}
@Bean
@Qualifier(value = "enable")
Handler eventHubSendHandler() throws Exception {
return new EventHubSendHandler(eventHubAutoConfiguration());
}
@Bean
EventHubAutoConfiguration eventHubAutoConfiguration() throws Exception {
return new EventHubAutoConfiguration(eventHubClientProperties());
}
@Bean
EventHubTemplate eventHubClientProperties() throws Exception {
return new EventHubTemplate(eventHubName, serviceBusNamespaceName, sharedAccessSignatureKeyName, sharedAccessSignatureKey);
}
@Value(value = "#{environment.EventHubServiceNamespace}")
String serviceBusNamespaceName;
@Value("#{environment.EventHub}")
String eventHubName;
@Value("#{environment.EventHubServicePolicy}")
String sharedAccessSignatureKeyName;
@Value("#{environment.EventHubServiceKey}")
String sharedAccessSignatureKey;
private Handler chainHandlers(Handler[] handler) {
int length = handler.length;
if (length > 1) {
for (int i = 1; i < length; i++) {
handler[0].setNextOperation(handler[1]);
}
}
return handler[0];
}
}
|
package weixin.popular.bean.card.code.get;
import com.alibaba.fastjson.annotation.JSONField;
import weixin.popular.bean.BaseResult;
/**
* 查询会员信息接口-响应参数
*
* @author zhongmin
*
*/
public class MemberGetResult extends BaseResult {
/**
* 用户openid
*/
private String openid;
private String nickname;
private String code;
private String cardId;
/*
* 线下会员号
*/
@JSONField(name = "membership_number")
private String membershipNumber;
/**
* 当前code对应卡券的状态: <br>
* NORMAL 正常 <br>
* CONSUMED 已核销 <br>
* EXPIRE 已过期 <br>
* GIFTING 转赠中<br>
* GIFT_TIMEOUT 转赠超时<br>
* DELETE 已删除<br>
* UNAVAILABLE 已失效 <br>
* code未被添加或被转赠领取的情况则统一报错:invalid serial code
*/
@JSONField(name = "user_card_status")
private String userCardStatus;
/*
* 积分
*/
private Double bonus;
/*
* 性别
*/
private String sex;
/*
* 用户自定义信息
*/
@JSONField(name = "user_info")
private UserCustomInfo userInfo;
/**
* @return 用户openid
*/
public String getOpenid() {
return openid;
}
/**
* @param openid 用户openid
*/
public void setOpenid(String openid) {
this.openid = openid;
}
/**
* 当前code对应卡券的状态: <br>
* NORMAL 正常 <br>
* CONSUMED 已核销 <br>
* EXPIRE 已过期 <br>
* GIFTING 转赠中<br>
* GIFT_TIMEOUT 转赠超时<br>
* DELETE 已删除<br>
* UNAVAILABLE 已失效 <br>
* code未被添加或被转赠领取的情况则统一报错:invalid serial code
* @return 卡券的状态
*/
public String getUserCardStatus() {
return userCardStatus;
}
/**
* 当前code对应卡券的状态: <br>
* NORMAL 正常 <br>
* CONSUMED 已核销 <br>
* EXPIRE 已过期 <br>
* GIFTING 转赠中<br>
* GIFT_TIMEOUT 转赠超时<br>
* DELETE 已删除<br>
* UNAVAILABLE 已失效 <br>
* code未被添加或被转赠领取的情况则统一报错:invalid serial code
* @param userCardStatus 卡券的状态
*/
public void setUserCardStatus(String userCardStatus) {
this.userCardStatus = userCardStatus;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getCardId() {
return cardId;
}
public void setCardId(String cardId) {
this.cardId = cardId;
}
public String getMembershipNumber() {
return membershipNumber;
}
public void setMembershipNumber(String membershipNumber) {
this.membershipNumber = membershipNumber;
}
public Double getBonus() {
return bonus;
}
public void setBonus(Double bonus) {
this.bonus = bonus;
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex;
}
public UserCustomInfo getUserInfo() {
return userInfo;
}
public void setUserInfo(UserCustomInfo userInfo) {
this.userInfo = userInfo;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.