text stringlengths 1 1.05M |
|---|
<html>
<head>
<title>Report</title>
</head>
<body>
<h1>Report</h1>
<!--Table headings -->
<table>
<tr>
<th>Name</th>
<th>Age</th>
</tr>
<!--Loop through the data and populate the table. -->
<?php
for($i=0; $i<count($name); $i++)
{
echo "<tr>";
echo "<td>" . $name[$i] . "</td>";
echo "<td>" . $age[$i] . "</td>";
echo "</tr>";
}
?>
</table>
</body>
</html> |
export interface ConsolidatedWeather {
id: number;
weather_state_name: string;
weather_state_abbr: string;
wind_direction_compass: string;
created: string;
applicable_date: string;
min_temp: number;
max_temp: number;
the_temp: number;
wind_speed: number;
wind_direction: number;
air_pressure: number;
humidity: number;
visibility: number;
predictability: number;
}
|
// Copyright 2020 The Merlin Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"encoding/json"
"time"
"github.com/kelseyhightower/envconfig"
v1 "k8s.io/api/core/v1"
"github.com/gojek/merlin/pkg/transformer/feast"
"github.com/gojek/merlin/pkg/transformer/spec"
internalValidator "github.com/gojek/merlin/pkg/validator"
"github.com/gojek/mlp/api/pkg/instrumentation/newrelic"
"github.com/gojek/mlp/api/pkg/instrumentation/sentry"
)
const (
MaxDeployedVersion = 2
)
type Config struct {
Environment string `envconfig:"ENVIRONMENT" default:"dev"`
Port int `envconfig:"PORT" default:"8080"`
LoggerDestinationURL string `envconfig:"LOGGER_DESTINATION_URL"`
Sentry sentry.Config `envconfig:"SENTRY" split_words:"false"`
NewRelic newrelic.Config `envconfig:"NEWRELIC" split_words:"false" `
EnvironmentConfigPath string `envconfig:"DEPLOYMENT_CONFIG_PATH" required:"true"`
NumOfQueueWorkers int `envconfig:"NUM_OF_WORKERS" default:"2"`
SwaggerPath string `envconfig:"SWAGGER_PATH" default:"./swagger.yaml"`
DbConfig DatabaseConfig
VaultConfig VaultConfig
ImageBuilderConfig ImageBuilderConfig
EnvironmentConfigs []EnvironmentConfig
AuthorizationConfig AuthorizationConfig
MlpAPIConfig MlpAPIConfig
FeatureToggleConfig FeatureToggleConfig
ReactAppConfig ReactAppConfig
UI UIConfig
StandardTransformerConfig StandardTransformerConfig
MlflowConfig MlflowConfig
}
// UIConfig stores the configuration for the UI.
type UIConfig struct {
StaticPath string `envconfig:"UI_STATIC_PATH" default:"ui/build"`
IndexPath string `envconfig:"UI_INDEX_PATH" default:"index.html"`
}
type ReactAppConfig struct {
OauthClientID string `envconfig:"REACT_APP_OAUTH_CLIENT_ID"`
Environment string `envconfig:"REACT_APP_ENVIRONMENT"`
SentryDSN string `envconfig:"REACT_APP_SENTRY_DSN"`
DocURL Documentations `envconfig:"REACT_APP_MERLIN_DOCS_URL"`
AlertEnabled bool `envconfig:"REACT_APP_ALERT_ENABLED"`
MonitoringEnabled bool `envconfig:"REACT_APP_MONITORING_DASHBOARD_ENABLED"`
HomePage string `envconfig:"REACT_APP_HOMEPAGE"`
MerlinURL string `envconfig:"REACT_APP_MERLIN_API"`
MlpURL string `envconfig:"REACT_APP_MLP_API"`
FeastCoreURL string `envconfig:"REACT_APP_FEAST_CORE_API"`
DockerRegistries string `envconfig:"REACT_APP_DOCKER_REGISTRIES"`
MaxAllowedReplica int `envconfig:"REACT_APP_MAX_ALLOWED_REPLICA" default:"20"`
}
type Documentations []Documentation
type Documentation struct {
Label string `json:"label"`
Href string `json:"href"`
}
func (docs *Documentations) Decode(value string) error {
var listOfDoc Documentations
if err := json.Unmarshal([]byte(value), &listOfDoc); err != nil {
return err
}
*docs = listOfDoc
return nil
}
type DatabaseConfig struct {
Host string `envconfig:"DATABASE_HOST" required:"true"`
Port int `envconfig:"DATABASE_PORT" default:"5432"`
User string `envconfig:"DATABASE_USER" required:"true"`
Password string `envconfig:"DATABASE_PASSWORD" required:"true"`
Database string `envconfig:"DATABASE_NAME" default:"mlp"`
MigrationPath string `envconfig:"DATABASE_MIGRATIONS_PATH" default:"file://db-migrations"`
}
type ImageBuilderConfig struct {
ClusterName string `envconfig:"IMG_BUILDER_CLUSTER_NAME"`
GcpProject string `envconfig:"IMG_BUILDER_GCP_PROJECT"`
BuildContextURI string `envconfig:"IMG_BUILDER_BUILD_CONTEXT_URI"`
ContextSubPath string `envconfig:"IMG_BUILDER_CONTEXT_SUB_PATH"`
DockerfilePath string `envconfig:"IMG_BUILDER_DOCKERFILE_PATH" default:"./Dockerfile"`
BaseImage string `envconfig:"IMG_BUILDER_BASE_IMAGE"`
PredictionJobBuildContextURI string `envconfig:"IMG_BUILDER_PREDICTION_JOB_BUILD_CONTEXT_URI"`
PredictionJobContextSubPath string `envconfig:"IMG_BUILDER_PREDICTION_JOB_CONTEXT_SUB_PATH"`
PredictionJobDockerfilePath string `envconfig:"IMG_BUILDER_PREDICTION_JOB_DOCKERFILE_PATH" default:"./Dockerfile"`
PredictionJobBaseImage string `envconfig:"IMG_BUILDER_PREDICTION_JOB_BASE_IMAGE"`
BuildNamespace string `envconfig:"IMG_BUILDER_NAMESPACE" default:"mlp"`
DockerRegistry string `envconfig:"IMG_BUILDER_DOCKER_REGISTRY"`
BuildTimeout string `envconfig:"IMG_BUILDER_TIMEOUT" default:"10m"`
KanikoImage string `envconfig:"IMG_BUILDER_KANIKO_IMAGE" default:"gcr.io/kaniko-project/executor:v1.6.0"`
// How long to keep the image building job resource in the Kubernetes cluster. Default: 2 days (48 hours).
Retention time.Duration `envconfig:"IMG_BUILDER_RETENTION" default:"48h"`
Tolerations Tolerations `envconfig:"IMG_BUILDER_TOLERATIONS"`
NodeSelectors NodeSelectors `envconfig:"IMG_BUILDER_NODE_SELECTORS"`
MaximumRetry int32 `envconfig:"IMG_BUILDER_MAX_RETRY" default:"3"`
}
type Tolerations []v1.Toleration
func (spec *Tolerations) Decode(value string) error {
var tolerations Tolerations
if err := json.Unmarshal([]byte(value), &tolerations); err != nil {
return err
}
*spec = tolerations
return nil
}
type NodeSelectors map[string]string
func (ns *NodeSelectors) Decode(value string) error {
var nodeSelectors NodeSelectors
if err := json.Unmarshal([]byte(value), &nodeSelectors); err != nil {
return err
}
*ns = nodeSelectors
return nil
}
type VaultConfig struct {
Address string `envconfig:"VAULT_ADDRESS"`
Token string `envconfig:"VAULT_TOKEN"`
}
type AuthorizationConfig struct {
AuthorizationEnabled bool `envconfig:"AUTHORIZATION_ENABLED" default:"true"`
AuthorizationServerURL string `envconfig:"AUTHORIZATION_SERVER_URL" default:"http://localhost:4466"`
}
type FeatureToggleConfig struct {
MonitoringConfig MonitoringConfig
AlertConfig AlertConfig
}
type MonitoringConfig struct {
MonitoringEnabled bool `envconfig:"MONITORING_DASHBOARD_ENABLED" default:"false"`
MonitoringBaseURL string `envconfig:"MONITORING_DASHBOARD_BASE_URL"`
MonitoringJobBaseURL string `envconfig:"MONITORING_DASHBOARD_JOB_BASE_URL"`
}
type AlertConfig struct {
AlertEnabled bool `envconfig:"ALERT_ENABLED" default:"false"`
GitlabConfig GitlabConfig
WardenConfig WardenConfig
}
type GitlabConfig struct {
BaseURL string `envconfig:"GITLAB_BASE_URL"`
Token string `envconfig:"GITLAB_TOKEN"`
DashboardRepository string `envconfig:"GITLAB_DASHBOARD_REPOSITORY"`
DashboardBranch string `envconfig:"GITLAB_DASHBOARD_BRANCH" default:"master"`
AlertRepository string `envconfig:"GITLAB_ALERT_REPOSITORY"`
AlertBranch string `envconfig:"GITLAB_ALERT_BRANCH" default:"master"`
}
type WardenConfig struct {
APIHost string `envconfig:"WARDEN_API_HOST"`
}
type MlpAPIConfig struct {
APIHost string `envconfig:"MLP_API_HOST" required:"true"`
EncryptionKey string `envconfig:"MLP_API_ENCRYPTION_KEY" required:"true"`
}
type StandardTransformerConfig struct {
ImageName string `envconfig:"STANDARD_TRANSFORMER_IMAGE_NAME" required:"true"`
FeastServingURLs FeastServingURLs `envconfig:"FEAST_SERVING_URLS" required:"true"`
FeastCoreURL string `envconfig:"FEAST_CORE_URL" required:"true"`
FeastCoreAuthAudience string `envconfig:"FEAST_CORE_AUTH_AUDIENCE" required:"true"`
EnableAuth bool `envconfig:"FEAST_AUTH_ENABLED" default:"false"`
FeastRedisConfig *FeastRedisConfig `envconfig:"FEAST_REDIS_CONFIG"`
FeastBigtableConfig *FeastBigtableConfig `envconfig:"FEAST_BIG_TABLE_CONFIG"`
// Base64 Service Account
BigtableCredential string `envconfig:"FEAST_BIGTABLE_CREDENTIAL"`
DefaultFeastSource spec.ServingSource `envconfig:"DEFAULT_FEAST_SOURCE" default:"BIGTABLE"`
Jaeger JaegerConfig
}
func (stc *StandardTransformerConfig) ToFeastStorageConfigs() feast.FeastStorageConfig {
feastStorageConfig := feast.FeastStorageConfig{}
validate := internalValidator.NewValidator()
// need to validate redis and big table config, because of `FeastRedisConfig` and `FeastBigtableConfig` wont be null when environment variables not set
// this is due to bug in envconfig library https://github.com/kelseyhightower/envconfig/issues/113
if stc.FeastRedisConfig != nil && validate.Struct(stc.FeastRedisConfig) == nil {
feastStorageConfig[spec.ServingSource_REDIS] = stc.FeastRedisConfig.ToFeastStorage()
}
if stc.FeastBigtableConfig != nil && validate.Struct(stc.FeastBigtableConfig) == nil {
feastStorageConfig[spec.ServingSource_BIGTABLE] = stc.FeastBigtableConfig.ToFeastStorageWithCredential(stc.BigtableCredential)
}
return feastStorageConfig
}
type FeastServingURLs []FeastServingURL
type FeastServingURL struct {
Host string `json:"host"`
Label string `json:"label"`
Icon string `json:"icon"`
SourceType string `json:"source_type"`
}
func (u *FeastServingURLs) Decode(value string) error {
var urls FeastServingURLs
if err := json.Unmarshal([]byte(value), &urls); err != nil {
return err
}
*u = urls
return nil
}
func (u *FeastServingURLs) URLs() []string {
urls := []string{}
for _, url := range *u {
urls = append(urls, url.Host)
}
return urls
}
type JaegerConfig struct {
AgentHost string `envconfig:"JAEGER_AGENT_HOST"`
AgentPort string `envconfig:"JAEGER_AGENT_PORT"`
SamplerType string `envconfig:"JAEGER_SAMPLER_TYPE" default:"probabilistic"`
SamplerParam string `envconfig:"JAEGER_SAMPLER_PARAM" default:"0.01"`
Disabled string `envconfig:"JAEGER_DISABLED" default:"true"`
}
type MlflowConfig struct {
TrackingURL string `envconfig:"MLFLOW_TRACKING_URL" required:"true"`
}
func InitConfigEnv() (*Config, error) {
var cfg Config
if err := envconfig.Process("", &cfg); err != nil {
return nil, err
}
cfg.EnvironmentConfigs = initEnvironmentConfigs(cfg.EnvironmentConfigPath)
return &cfg, nil
}
|
#!/bin/sh
set -e
echo "Updating config"
echo "MQTT_HOST: ${MQTT_HOST}"
echo "MQTT_PORT: ${MQTT_PORT}"
echo "MQTT_USERNAME: ${MQTT_USERNAME}"
echo "MQTT_PASSWORD: ${MQTT_PASSWORD}"
sed -i "/mqtt_server_uri/c\ \"mqtt_server_uri\":\"tcp://${MQTT_HOST}:${MQTT_PORT}\"," config.json
sed -i "/mqtt_server_username/c\ \"mqtt_server_username\":\"${MQTT_USERNAME}\"," config.json
sed -i "/mqtt_server_password/c\ \"mqtt_server_password\":\"${MQTT_PASSWORD}\"," config.json
exec "$@" |
#!/bin/sh
# Copyright (c) 2017-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Install libdb5.3 (Berkeley DB).
export LC_ALL=C
set -e
if [ -z "${1}" ]; then
echo "Usage: $0 <base-dir> [<extra-bdb-configure-flag> ...]"
echo
echo "Must specify a single argument: the directory in which db5 will be built."
echo "This is probably \`pwd\` if you're at the root of the groestlcoin repository."
exit 1
fi
expand_path() {
cd "${1}" && pwd -P
}
BDB_PREFIX="$(expand_path ${1})/db5"; shift;
BDB_VERSION='db-5.3.28.NC'
BDB_HASH='76a25560d9e52a198d37a31440fd07632b5f1f8f9f2b6d5438f4bc3e7c9013ef'
BDB_URL="https://www.groestlcoin.org/${BDB_VERSION}.tar.gz"
check_exists() {
command -v "$1" >/dev/null
}
sha256_check() {
# Args: <sha256_hash> <filename>
#
if check_exists sha256sum; then
echo "${1} ${2}" | sha256sum -c
elif check_exists sha256; then
if [ "$(uname)" = "FreeBSD" ]; then
sha256 -c "${1}" "${2}"
else
echo "${1} ${2}" | sha256 -c
fi
else
echo "${1} ${2}" | shasum -a 256 -c
fi
}
http_get() {
# Args: <url> <filename> <sha256_hash>
#
# It's acceptable that we don't require SSL here because we manually verify
# content hashes below.
#
if [ -f "${2}" ]; then
echo "File ${2} already exists; not downloading again"
elif check_exists curl; then
curl --insecure --retry 5 "${1}" -o "${2}"
else
wget --no-check-certificate "${1}" -O "${2}"
fi
sha256_check "${3}" "${2}"
}
mkdir -p "${BDB_PREFIX}"
http_get "${BDB_URL}" "${BDB_VERSION}.tar.gz" "${BDB_HASH}"
tar -xzvf ${BDB_VERSION}.tar.gz -C "$BDB_PREFIX"
cd "${BDB_PREFIX}/${BDB_VERSION}/"
# Apply a patch necessary when building with clang and c++11 (see https://community.oracle.com/thread/3952592)
patch --ignore-whitespace -p2 << 'EOF'
--- original/src/dbinc/atomic.h 2013-09-09 17:35:08.000000000 +0200
+++ patched/src/dbinc/atomic.h 2020-12-15 17:47:20.535316800 +0100
@@ -70,7 +70,7 @@
* These have no memory barriers; the caller must include them when necessary.
*/
#define atomic_read(p) ((p)->value)
-#define atomic_init(p, val) ((p)->value = (val))
+#define atomic_init_db(p, val) ((p)->value = (val))
#ifdef HAVE_ATOMIC_SUPPORT
@@ -144,7 +144,7 @@
#define atomic_inc(env, p) __atomic_inc(p)
#define atomic_dec(env, p) __atomic_dec(p)
#define atomic_compare_exchange(env, p, o, n) \
- __atomic_compare_exchange((p), (o), (n))
+ __atomic_compare_exchange_db((p), (o), (n))
static inline int __atomic_inc(db_atomic_t *p)
{
int temp;
@@ -176,7 +176,7 @@
* http://gcc.gnu.org/onlinedocs/gcc-4.1.0/gcc/Atomic-Builtins.html
* which configure could be changed to use.
*/
-static inline int __atomic_compare_exchange(
+static inline int __atomic_compare_exchange_db(
db_atomic_t *p, atomic_value_t oldval, atomic_value_t newval)
{
atomic_value_t was;
@@ -206,7 +206,7 @@
#define atomic_dec(env, p) (--(p)->value)
#define atomic_compare_exchange(env, p, oldval, newval) \
(DB_ASSERT(env, atomic_read(p) == (oldval)), \
- atomic_init(p, (newval)), 1)
+ atomic_init_db(p, (newval)), 1)
#else
#define atomic_inc(env, p) __atomic_inc(env, p)
#define atomic_dec(env, p) __atomic_dec(env, p)
diff -Naur original/src/dbinc/win_db.h patched/src/dbinc/win_db.h
--- original/src/dbinc/win_db.h 2013-09-09 17:35:08.000000000 +0200
+++ patched/src/dbinc/win_db.h 2020-12-15 17:47:29.330049300 +0100
@@ -46,7 +46,7 @@
#include <windows.h>
#include <winsock2.h>
#ifndef DB_WINCE
-#include <WinIoCtl.h>
+#include <winioctl.h>
#endif
#ifdef HAVE_GETADDRINFO
diff -Naur original/src/mp/mp_fget.c patched/src/mp/mp_fget.c
--- original/src/mp/mp_fget.c 2013-09-09 17:35:09.000000000 +0200
+++ patched/src/mp/mp_fget.c 2020-12-15 17:47:20.618078700 +0100
@@ -649,7 +649,7 @@
/* Initialize enough so we can call __memp_bhfree. */
alloc_bhp->flags = 0;
- atomic_init(&alloc_bhp->ref, 1);
+ atomic_init_db(&alloc_bhp->ref, 1);
#ifdef DIAGNOSTIC
if ((uintptr_t)alloc_bhp->buf & (sizeof(size_t) - 1)) {
__db_errx(env, DB_STR("3025",
@@ -955,7 +955,7 @@
MVCC_MPROTECT(bhp->buf, mfp->pagesize,
PROT_READ);
- atomic_init(&alloc_bhp->ref, 1);
+ atomic_init_db(&alloc_bhp->ref, 1);
MUTEX_LOCK(env, alloc_bhp->mtx_buf);
alloc_bhp->priority = bhp->priority;
alloc_bhp->pgno = bhp->pgno;
diff -Naur original/src/mp/mp_mvcc.c patched/src/mp/mp_mvcc.c
--- original/src/mp/mp_mvcc.c 2013-09-09 17:35:09.000000000 +0200
+++ patched/src/mp/mp_mvcc.c 2020-12-15 17:47:20.593279600 +0100
@@ -276,7 +276,7 @@
#else
memcpy(frozen_bhp, bhp, SSZA(BH, buf));
#endif
- atomic_init(&frozen_bhp->ref, 0);
+ atomic_init_db(&frozen_bhp->ref, 0);
if (mutex != MUTEX_INVALID)
frozen_bhp->mtx_buf = mutex;
else if ((ret = __mutex_alloc(env, MTX_MPOOL_BH,
@@ -428,7 +428,7 @@
#endif
alloc_bhp->mtx_buf = mutex;
MUTEX_LOCK(env, alloc_bhp->mtx_buf);
- atomic_init(&alloc_bhp->ref, 1);
+ atomic_init_db(&alloc_bhp->ref, 1);
F_CLR(alloc_bhp, BH_FROZEN);
}
diff -Naur original/src/mp/mp_region.c patched/src/mp/mp_region.c
--- original/src/mp/mp_region.c 2013-09-09 17:35:09.000000000 +0200
+++ patched/src/mp/mp_region.c 2020-12-15 17:47:20.566905300 +0100
@@ -245,7 +245,7 @@
MTX_MPOOL_FILE_BUCKET, 0, &htab[i].mtx_hash)) != 0)
return (ret);
SH_TAILQ_INIT(&htab[i].hash_bucket);
- atomic_init(&htab[i].hash_page_dirty, 0);
+ atomic_init_db(&htab[i].hash_page_dirty, 0);
}
/*
@@ -302,7 +302,7 @@
} else
hp->mtx_hash = mtx_base + (i % dbenv->mp_mtxcount);
SH_TAILQ_INIT(&hp->hash_bucket);
- atomic_init(&hp->hash_page_dirty, 0);
+ atomic_init_db(&hp->hash_page_dirty, 0);
#ifdef HAVE_STATISTICS
hp->hash_io_wait = 0;
hp->hash_frozen = hp->hash_thawed = hp->hash_frozen_freed = 0;
diff -Naur original/src/mutex/mut_method.c patched/src/mutex/mut_method.c
--- original/src/mutex/mut_method.c 2013-09-09 17:35:09.000000000 +0200
+++ patched/src/mutex/mut_method.c 2020-12-15 17:47:20.642426300 +0100
@@ -474,7 +474,7 @@
MUTEX_LOCK(env, mtx);
ret = atomic_read(v) == oldval;
if (ret)
- atomic_init(v, newval);
+ atomic_init_db(v, newval);
MUTEX_UNLOCK(env, mtx);
return (ret);
diff -Naur original/src/mutex/mut_tas.c patched/src/mutex/mut_tas.c
--- original/src/mutex/mut_tas.c 2013-09-09 17:35:09.000000000 +0200
+++ patched/src/mutex/mut_tas.c 2020-12-15 17:47:20.662067500 +0100
@@ -47,7 +47,7 @@
#ifdef HAVE_SHARED_LATCHES
if (F_ISSET(mutexp, DB_MUTEX_SHARED))
- atomic_init(&mutexp->sharecount, 0);
+ atomic_init_db(&mutexp->sharecount, 0);
else
#endif
if (MUTEX_INIT(&mutexp->tas)) {
@@ -536,7 +536,7 @@
F_CLR(mutexp, DB_MUTEX_LOCKED);
/* Flush flag update before zeroing count */
MEMBAR_EXIT();
- atomic_init(&mutexp->sharecount, 0);
+ atomic_init_db(&mutexp->sharecount, 0);
} else {
DB_ASSERT(env, sharecount > 0);
MEMBAR_EXIT();
EOF
# The packaged config.guess and config.sub are ancient (2009) and can cause build issues.
# Replace them with modern versions.
# See https://github.com/bitcoin/bitcoin/issues/16064
CONFIG_GUESS_URL='https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=4550d2f15b3a7ce2451c1f29500b9339430c877f'
CONFIG_GUESS_HASH='c8f530e01840719871748a8071113435bdfdf75b74c57e78e47898edea8754ae'
CONFIG_SUB_URL='https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=4550d2f15b3a7ce2451c1f29500b9339430c877f'
CONFIG_SUB_HASH='3969f7d5f6967ccc6f792401b8ef3916a1d1b1d0f0de5a4e354c95addb8b800e'
rm -f "dist/config.guess"
rm -f "dist/config.sub"
http_get "${CONFIG_GUESS_URL}" dist/config.guess "${CONFIG_GUESS_HASH}"
http_get "${CONFIG_SUB_URL}" dist/config.sub "${CONFIG_SUB_HASH}"
cd build_unix/
"${BDB_PREFIX}/${BDB_VERSION}/dist/configure" \
--enable-cxx --disable-shared --disable-replication --with-pic --prefix="${BDB_PREFIX}" \
"${@}"
make install
echo
echo "db5 build complete."
echo
# shellcheck disable=SC2016
echo 'When compiling groestlcoind, run `./configure` in the following way:'
echo
echo " export BDB_PREFIX='${BDB_PREFIX}'"
# shellcheck disable=SC2016
echo ' ./configure BDB_LIBS="-L${BDB_PREFIX}/lib -ldb_cxx-5.3" BDB_CFLAGS="-I${BDB_PREFIX}/include" ...'
|
module Ror
class Actioncontroller
METHOD_DESCRIPTION_DIR = 'ror/method_descriptions/actioncontroller'
def self.show modus
self.file_location "#{modus}.txt"
end
private
def self.file_location file_name
File.join FILE_DIR, METHOD_DESCRIPTION_DIR, file_name
end
end
end
|
#!/bin/bash -f
# Vivado (TM) v2016.2 (64-bit)
#
# Filename : dmem.sh
# Simulator : Synopsys Verilog Compiler Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Fri Jun 15 01:37:35 +0800 2018
# IP Build 1577682 on Fri Jun 3 12:00:54 MDT 2016
#
# usage: dmem.sh [-help]
# usage: dmem.sh [-lib_map_path]
# usage: dmem.sh [-noclean_files]
# usage: dmem.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'dmem.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
# ********************************************************************************************************
# Directory path for design sources and include directories (if any) wrt this path
ref_dir="."
# Override directory with 'export_sim_ref_dir' env path value if set in the shell
if [[ (! -z "$export_sim_ref_dir") && ($export_sim_ref_dir != "") ]]; then
ref_dir="$export_sim_ref_dir"
fi
# Command line options
vlogan_opts="-full64 -timescale=1ps/1ps"
vhdlan_opts="-full64"
vcs_elab_opts="-full64 -debug_pp -t ps -licqueue -l elaborate.log"
vcs_sim_opts="-ucli -licqueue -l simulate.log"
# Design libraries
design_libs=(dist_mem_gen_v8_0_10 xil_defaultlib)
# Simulation root library directory
sim_lib_dir="vcs"
# Script info
echo -e "dmem.sh - Script generated by export_simulation (Vivado v2016.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
vlogan -work dist_mem_gen_v8_0_10 $vlogan_opts +v2k \
"$ref_dir/../../../ipstatic/dist_mem_gen_v8_0_10/simulation/dist_mem_gen_v8_0.v" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k \
"$ref_dir/../../../../Mips54_LoadBoard_vMDU.srcs/sources_1/ip/dmem/sim/dmem.v" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k \
glbl.v \
2>&1 | tee -a vlogan.log
}
# RUN_STEP: <elaborate>
elaborate()
{
vcs $vcs_elab_opts xil_defaultlib.dmem xil_defaultlib.glbl -o dmem_simv
}
# RUN_STEP: <simulate>
simulate()
{
./dmem_simv $vcs_sim_opts -do simulate.do
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./dmem.sh -help\" for more information)\n"
exit 1
fi
create_lib_mappings $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
create_lib_mappings $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Define design library mappings
create_lib_mappings()
{
file="synopsys_sim.setup"
if [[ -e $file ]]; then
if [[ ($1 == "") ]]; then
return
else
rm -rf $file
fi
fi
touch $file
lib_map_path=""
if [[ ($1 != "") ]]; then
lib_map_path="$1"
fi
for (( i=0; i<${#design_libs[*]}; i++ )); do
lib="${design_libs[i]}"
mapping="$lib:$sim_lib_dir/$lib"
echo $mapping >> $file
done
if [[ ($lib_map_path != "") ]]; then
incl_ref="OTHERS=$lib_map_path/synopsys_sim.setup"
echo $incl_ref >> $file
fi
}
# Create design library directory paths
create_lib_dir()
{
if [[ -e $sim_lib_dir ]]; then
rm -rf $sim_lib_dir
fi
for (( i=0; i<${#design_libs[*]}; i++ )); do
lib="${design_libs[i]}"
lib_dir="$sim_lib_dir/$lib"
if [[ ! -e $lib_dir ]]; then
mkdir -p $lib_dir
fi
done
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(ucli.key dmem_simv vlogan.log vhdlan.log compile.log elaborate.log simulate.log .vlogansetup.env .vlogansetup.args .vcs_lib_lock scirocco_command.log 64 AN.DB csrc dmem_simv.daidir)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./dmem.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: dmem.sh [-help]\n\
Usage: dmem.sh [-lib_map_path]\n\
Usage: dmem.sh [-reset_run]\n\
Usage: dmem.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
#!/bin/bash
midplane () {
ltr="$1" ; shift
lout="$1" ; shift
seg_maths "$ltr" -add 1 tr.nii.gz
read minx maxx miny maxy minz maxz <<< $(seg_stats tr -B)
n=$[$maxx/2]
mirtk extract-image-region "$ltr" "$lout" -Rx1 $n -Rx2 $n -Ry1 $miny -Ry2 $maxy -Rz1 $minz -Rz2 $maxz # -Rt1 $mint -Rt2 $maxt
}
|
const { describe, it } = require('eslint/lib/testers/event-generator-tester');
const { before, after } = require('mocha');
const expect = require('expect.js');
const sinon = require('sinon');
const request = require('supertest-as-promised');
const httpStatus = require('http-status');
const UptimeService = require('../../app/services/uptime.service');
const app = require('../../server').app;
const loginHelpers = require('../helpers/login');
const USER = require('../fixtures/user.json');
describe('UptimeController', () => {
let token = null;
before((done) => {
loginHelpers.createUser(USER)
.then(user => loginHelpers.getJWT(user.username))
.then(jwt => {
token = jwt;
done();
});
});
after((done) => {
loginHelpers.deleteUser(USER.username)
.then(() => {
token = null;
done();
});
});
describe('getSLA()', () => {
it('should return unauthorized status', (done) => {
const stub = sinon.stub(UptimeService, 'getSLA').resolves(100);
request(app)
.get('/uptimes/sla')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return 500 status', (done) => {
const stub = sinon.stub(UptimeService, 'getSLA').rejects('error');
request(app)
.get('/uptimes/sla')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return values, and pass queryParams', (done) => {
const stub = sinon.stub(UptimeService, 'getSLA').resolves(100);
request(app)
.get('/uptimes/sla?uptimeId=UptimeId&category=Service&kind=Ingress&namespace=console-server&downStartDate=4&duration=12')
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(res.body.result).to.equal(true);
expect(res.body.data).to.eql(100);
expect(stub.getCall(0).args[0]).to.eql({
uptimeId: 'UptimeId',
category: 'Service',
kind: 'Ingress',
namespace: 'console-server',
downStartDate: '4',
duration: '12'
});
stub.restore();
done();
})
.catch(done);
});
});
describe('getUptimes()', () => {
it('should return unauthorized status', (done) => {
const stub = sinon.stub(UptimeService, 'getUptimes').resolves(100);
request(app)
.get('/uptimes/uptimes')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return 500 status', (done) => {
const stub = sinon.stub(UptimeService, 'getUptimes').rejects('error');
request(app)
.get('/uptimes/uptimes')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return values, and pass queryParams', (done) => {
const stub = sinon.stub(UptimeService, 'getUptimes').resolves(100);
request(app)
.get('/uptimes/uptimes?uptimeId=UptimeId&category=Service&kind=Ingress&namespace=console-server&interval=daily&since=12&to=27')
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(res.body.result).to.equal(true);
expect(res.body.data).to.eql(100);
expect(stub.getCall(0).args[0]).to.eql({
uptimeId: 'UptimeId',
category: 'Service',
kind: 'Ingress',
namespace: 'console-server',
interval: 'daily',
since: '12',
to: '27'
});
stub.restore();
done();
})
.catch(done);
});
});
describe('getDowntimes()', () => {
it('should return unauthorized status', (done) => {
const stub = sinon.stub(UptimeService, 'getDowntimes').resolves(100);
request(app)
.get('/uptimes/downtimes')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return 500 status', (done) => {
const stub = sinon.stub(UptimeService, 'getDowntimes').rejects('error');
request(app)
.get('/uptimes/downtimes')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return values, and pass queryParams', (done) => {
const stub = sinon.stub(UptimeService, 'getDowntimes').resolves(100);
request(app)
.get('/uptimes/downtimes?uptimeId=UptimeId&category=Service&kind=Ingress&namespace=console-server&downStartDate=12&duration=27')
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(res.body.result).to.equal(true);
expect(res.body.data).to.eql(100);
expect(stub.getCall(0).args[0]).to.eql({
uptimeId: 'UptimeId',
category: 'Service',
kind: 'Ingress',
namespace: 'console-server',
downStartDate: '12',
duration: '27'
});
stub.restore();
done();
})
.catch(done);
});
});
describe('getInfras()', () => {
it('should return unauthorized status', (done) => {
const stub = sinon.stub(UptimeService, 'getInfras').resolves('infras');
request(app)
.get('/uptimes/infras')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return 500 status', (done) => {
const stub = sinon.stub(UptimeService, 'getInfras').rejects('error');
request(app)
.get('/uptimes/infras')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return values, and pass queryParams', (done) => {
const stub = sinon.stub(UptimeService, 'getInfras').resolves('infras');
request(app)
.get('/uptimes/infras?kind=Mongo')
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(res.body.result).to.equal(true);
expect(res.body.data).to.eql('infras');
expect(stub.getCall(0).args[0]).to.eql({
kind: 'Mongo'
});
stub.restore();
done();
})
.catch(done);
});
});
describe('getInfrasUptimes()', () => {
it('should return unauthorized status', (done) => {
const stub = sinon.stub(UptimeService, 'getInfrasUptimes').resolves('uptimes');
request(app)
.get('/uptimes/infras/uptimes')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return 500 status', (done) => {
const stub = sinon.stub(UptimeService, 'getInfrasUptimes').rejects('error');
request(app)
.get('/uptimes/infras/uptimes')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch(done);
});
it('should return values, and pass queryParams', (done) => {
const stub = sinon.stub(UptimeService, 'getInfrasUptimes').resolves('uptimes');
request(app)
.get('/uptimes/infras/uptimes?kind=Mongo')
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(res.body.result).to.equal(true);
expect(res.body.data).to.eql('uptimes');
expect(stub.getCall(0).args[0]).to.eql({
kind: 'Mongo'
});
stub.restore();
done();
})
.catch(done);
});
});
});
|
#!/bin/bash -ex
#
# (Re)starts the local metrics
#
cd "$(dirname "$0")"
# Stop if already running
./stop.sh
: "${INFLUXDB_IMAGE:=influxdb:1.6}"
: "${GRAFANA_IMAGE:=grafana/grafana:5.2.3}"
docker pull $INFLUXDB_IMAGE
docker pull $GRAFANA_IMAGE
docker network remove influxdb || true
docker network create influxdb
docker run \
--detach \
--name=influxdb \
--net=influxdb \
--publish 8086:8086 \
--user "$(id -u):$(id -g)" \
--volume "$PWD"/influxdb.conf:/etc/influxdb/influxdb.conf:ro \
--volume "$PWD"/lib/influxdb:/var/lib/influxdb \
--env INFLUXDB_DB=testnet \
--env INFLUXDB_ADMIN_USER=admin \
--env INFLUXDB_ADMIN_PASSWORD=admin \
$INFLUXDB_IMAGE -config /etc/influxdb/influxdb.conf /init-influxdb.sh
docker run \
--detach \
--name=grafana \
--net=influxdb \
--publish 3000:3000 \
--user "$(id -u):$(id -g)" \
--env GF_PATHS_CONFIG=/grafana.ini \
--volume "$PWD"/grafana.ini:/grafana.ini:ro \
--volume "$PWD"/lib/grafana:/var/lib/grafana \
--volume "$PWD"/grafana-provisioning/:/etc/grafana/provisioning \
$GRAFANA_IMAGE
sleep 5
./status.sh
exit 0
|
<filename>app/scripts/services/exporter.js<gh_stars>1-10
'use strict';
var fs = require('fs');
var remote = require('electron').remote;
var dialog = remote.dialog;
/**
* @ngdoc service
* @name electroCrudApp.exporter
* @description
* # exporter
* Service in the electroCrudApp.
*/
angular.module('electroCrudApp')
.service('exporter', function () {
var exportTypes = {
CSV: 'csv',
XLSX: 'xlsx'
};
function Workbook() {
if(!(this instanceof Workbook)) return new Workbook();
this.SheetNames = [];
this.Sheets = {};
};
function processCSVRow(row, fileName) {
var rowArray = [];
if ( ! Array.isArray(row)) {
for (var k in row) {
if (row.hasOwnProperty(k)) {
rowArray.push(row[k]);
}
}
} else {
rowArray = row;
}
fs.appendFileSync(fileName, '"'+rowArray.join('","')+'"\n');
}
return {
types: exportTypes,
exportAsFile: function(exportType, rows, columns){
var options = {
filters: [
{name: 'CSV', extensions: ['csv']}
],
};
dialog.showSaveDialog(options, function (filePath) {
switch(exportType) {
case exportTypes.CSV:
if (columns) {
processCSVRow(columns, filePath);
}
rows.forEach(function(item){
processCSVRow(item, filePath);
});
break;
}
});
}
}
});
|
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# Create a 3-dimensional numpy array
x = np.linspace(start=-2.0, stop=2.0, num=100)
y = np.linspace(start=-2.0, stop=2.0, num=100)
x, y = np.meshgrid(x, y)
# Generate the fractal
z = np.sin(x ** 2 + y ** 2)
# Plot the fractal
fig = plt.figure(figsize=(8,8))
ax = fig.add_subplot(111, projection='3d')
ax.plot_surface(x, y, z, cmap='RdBu_r')
plt.show() |
public static void SetRandomColor(ref Piece piece, bool safe)
{
PieceColor c = PieceColor.Undefined;
int max = 100;
for (int i = 0; i < max; i++)
{
int r = Random.Range(0, 6);
// Assuming PieceColor is an enum representing different colors
c = (PieceColor)r; // Assign a random color from the PieceColor enum
if (safe)
{
// Check if the selected color is safe for colorblind individuals
if (IsColorSafeForColorblind(c))
{
piece.Color = c; // Assign the safe color to the piece
break; // Exit the loop after assigning the color
}
}
else
{
piece.Color = c; // Assign the color to the piece without checking for colorblind safety
break; // Exit the loop after assigning the color
}
}
}
// Function to check if a color is safe for colorblind individuals
private static bool IsColorSafeForColorblind(PieceColor color)
{
// Implement the logic to determine if the color is safe for colorblind individuals
// Example: Check if the color is distinguishable for individuals with color vision deficiency
// Return true if the color is safe, false otherwise
} |
public class NameChecker
{
public static bool IsValidName(string name)
{
if(string.IsNullOrEmpty(name))
return false;
if(name.Length > 10)
return false;
if(!name.All(char.IsLetterOrDigit))
return false;
return true;
}
} |
<filename>app/revisioner/tests/e2e/inspected.py
# -*- coding: utf-8 -*-
tables_and_views = [
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16442,
"table_name": "customers",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16442/1",
"column_name": "customernumber",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16442/2",
"column_name": "customername",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/3",
"column_name": "contactlastname",
"column_description": None,
"ordinal_position": 3,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/4",
"column_name": "contactfirstname",
"column_description": None,
"ordinal_position": 4,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/5",
"column_name": "phone",
"column_description": None,
"ordinal_position": 5,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/6",
"column_name": "addressline1",
"column_description": None,
"ordinal_position": 6,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/7",
"column_name": "addressline2",
"column_description": None,
"ordinal_position": 7,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": "NULL::character varying"
},
{
"column_object_id": "16442/8",
"column_name": "city",
"column_description": None,
"ordinal_position": 8,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/9",
"column_name": "state",
"column_description": None,
"ordinal_position": 9,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": "NULL::character varying"
},
{
"column_object_id": "16442/10",
"column_name": "postalcode",
"column_description": None,
"ordinal_position": 10,
"data_type": "character varying",
"max_length": 15,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": "NULL::character varying"
},
{
"column_object_id": "16442/11",
"column_name": "country",
"column_description": None,
"ordinal_position": 11,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/12",
"column_name": "salesrepemployeenumber",
"column_description": None,
"ordinal_position": 12,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16442/13",
"column_name": "creditlimit",
"column_description": None,
"ordinal_position": 13,
"data_type": "numeric",
"max_length": 10,
"numeric_scale": 2,
"is_nullable": True,
"is_primary": False,
"default_value": "NULL::numeric"
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16522,
"table_name": "departments",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16522/1",
"column_name": "id",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": "nextval('app.departments_id_seq'::regclass)"
},
{
"column_object_id": "16522/2",
"column_name": "dept_name",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 40,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16501,
"table_name": "orderdetails",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16501/1",
"column_name": "ordernumber",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16501/2",
"column_name": "productcode",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 15,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16501/3",
"column_name": "quantityordered",
"column_description": None,
"ordinal_position": 3,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16501/4",
"column_name": "priceeach",
"column_description": None,
"ordinal_position": 4,
"data_type": "numeric",
"max_length": 10,
"numeric_scale": 2,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16501/5",
"column_name": "orderlinenumber",
"column_description": None,
"ordinal_position": 5,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16465,
"table_name": "orders",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16465/1",
"column_name": "ordernumber",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16465/2",
"column_name": "orderdate",
"column_description": None,
"ordinal_position": 2,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16465/3",
"column_name": "requireddate",
"column_description": None,
"ordinal_position": 3,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16465/4",
"column_name": "shippeddate",
"column_description": None,
"ordinal_position": 4,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16465/5",
"column_name": "status",
"column_description": None,
"ordinal_position": 5,
"data_type": "character varying",
"max_length": 15,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16465/6",
"column_name": "comments",
"column_description": None,
"ordinal_position": 6,
"data_type": "text",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16465/7",
"column_name": "customernumber",
"column_description": None,
"ordinal_position": 7,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16478,
"table_name": "payments",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16478/1",
"column_name": "customernumber",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16478/2",
"column_name": "checknumber",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16478/3",
"column_name": "paymentdate",
"column_description": None,
"ordinal_position": 3,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16478/4",
"column_name": "amount",
"column_description": None,
"ordinal_position": 4,
"data_type": "numeric",
"max_length": 10,
"numeric_scale": 2,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16456,
"table_name": "productlines",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16456/1",
"column_name": "productline",
"column_description": None,
"ordinal_position": 1,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16456/2",
"column_name": "textdescription",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 4000,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": "NULL::character varying"
},
{
"column_object_id": "16456/3",
"column_name": "htmldescription",
"column_description": None,
"ordinal_position": 3,
"data_type": "text",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16456/4",
"column_name": "image",
"column_description": None,
"ordinal_position": 4,
"data_type": "bytea",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16488,
"table_name": "products",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16488/1",
"column_name": "productcode",
"column_description": None,
"ordinal_position": 1,
"data_type": "character varying",
"max_length": 15,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16488/2",
"column_name": "productname",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 70,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/3",
"column_name": "productline",
"column_description": None,
"ordinal_position": 3,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/4",
"column_name": "productscale",
"column_description": None,
"ordinal_position": 4,
"data_type": "character varying",
"max_length": 10,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/5",
"column_name": "productvendor",
"column_description": None,
"ordinal_position": 5,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/6",
"column_name": "productdescription",
"column_description": None,
"ordinal_position": 6,
"data_type": "text",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/7",
"column_name": "quantityinstock",
"column_description": None,
"ordinal_position": 7,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/8",
"column_name": "buyprice",
"column_description": None,
"ordinal_position": 8,
"data_type": "numeric",
"max_length": 10,
"numeric_scale": 2,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16488/9",
"column_name": "msrp",
"column_description": None,
"ordinal_position": 9,
"data_type": "numeric",
"max_length": 10,
"numeric_scale": 2,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 16516,
"table_name": "sales_representatives",
"table_type": "view",
"properties": {},
"columns": [
{
"column_object_id": "16516/1",
"column_name": "customernumber",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16516/2",
"column_name": "customername",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 50,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16516/3",
"column_name": "salesrepemployeenumber",
"column_description": None,
"ordinal_position": 3,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16516/4",
"column_name": "emp_no",
"column_description": None,
"ordinal_position": 4,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16516/5",
"column_name": "name",
"column_description": None,
"ordinal_position": 5,
"data_type": "text",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16386,
"table_schema": "employees",
"table_object_id": 16437,
"table_name": "current_dept_emp",
"table_type": "view",
"properties": {},
"columns": [
{
"column_object_id": "16437/1",
"column_name": "emp_no",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16437/2",
"column_name": "dept_no",
"column_description": None,
"ordinal_position": 2,
"data_type": "character",
"max_length": 4,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16437/3",
"column_name": "from_date",
"column_description": None,
"ordinal_position": 3,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16437/4",
"column_name": "to_date",
"column_description": None,
"ordinal_position": 4,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16386,
"table_schema": "employees",
"table_object_id": 16392,
"table_name": "departments",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16392/1",
"column_name": "dept_no",
"column_description": None,
"ordinal_position": 1,
"data_type": "character",
"max_length": 4,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16392/2",
"column_name": "dept_name",
"column_description": None,
"ordinal_position": 2,
"data_type": "character varying",
"max_length": 40,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16392/3",
"column_name": "started_on",
"column_description": None,
"ordinal_position": 3,
"data_type": "timestamp without time zone",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16386,
"table_schema": "employees",
"table_object_id": 16418,
"table_name": "dept_emp",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16418/1",
"column_name": "emp_no",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16418/2",
"column_name": "dept_no",
"column_description": None,
"ordinal_position": 2,
"data_type": "character",
"max_length": 4,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16418/3",
"column_name": "from_date",
"column_description": None,
"ordinal_position": 3,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16418/4",
"column_name": "to_date",
"column_description": None,
"ordinal_position": 4,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16386,
"table_schema": "employees",
"table_object_id": 16433,
"table_name": "dept_emp_latest_date",
"table_type": "view",
"properties": {},
"columns": [
{
"column_object_id": "16433/1",
"column_name": "emp_no",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16433/2",
"column_name": "from_date",
"column_description": None,
"ordinal_position": 2,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16433/3",
"column_name": "to_date",
"column_description": None,
"ordinal_position": 3,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
}
]
},
{
"schema_object_id": 16386,
"table_schema": "employees",
"table_object_id": 16399,
"table_name": "dept_manager",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16399/1",
"column_name": "dept_no",
"column_description": None,
"ordinal_position": 1,
"data_type": "character",
"max_length": 4,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16399/2",
"column_name": "emp_no",
"column_description": None,
"ordinal_position": 2,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16399/3",
"column_name": "from_date",
"column_description": None,
"ordinal_position": 3,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16399/4",
"column_name": "to_date",
"column_description": None,
"ordinal_position": 4,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16399/5",
"column_name": "extras",
"column_description": None,
"ordinal_position": 5,
"data_type": "text",
"max_length": None,
"numeric_scale": None,
"is_nullable": True,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16399/6",
"column_name": "rating",
"column_description": "The NPS score for this manager",
"ordinal_position": 6,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": "5"
}
]
},
{
"schema_object_id": 16386,
"table_schema": "employees",
"table_object_id": 16387,
"table_name": "employees",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "16387/1",
"column_name": "emp_no",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "16387/2",
"column_name": "birth_date",
"column_description": None,
"ordinal_position": 2,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16387/3",
"column_name": "first_name",
"column_description": None,
"ordinal_position": 3,
"data_type": "character varying",
"max_length": 14,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16387/4",
"column_name": "last_name",
"column_description": None,
"ordinal_position": 4,
"data_type": "character varying",
"max_length": 16,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16387/5",
"column_name": "hire_date",
"column_description": None,
"ordinal_position": 5,
"data_type": "date",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "16387/6",
"column_name": "created_at",
"column_description": None,
"ordinal_position": 6,
"data_type": "timestamp without time zone",
"max_length": None,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
}
]
}
]
indexes = [
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "customers",
"table_object_id": 16442,
"index_name": "customers_pkey",
"index_object_id": 16449,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX customers_pkey ON app.customers USING btree (customernumber)",
"columns": [
{
"column_name": "customernumber",
"ordinal_position": 1
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "departments",
"table_object_id": 16522,
"index_name": "departments_dept_name_key",
"index_object_id": 16528,
"is_unique": True,
"is_primary": False,
"definition": "CREATE UNIQUE INDEX departments_dept_name_key ON app.departments USING btree (dept_name)",
"columns": [
{
"column_name": "dept_name",
"ordinal_position": 1
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "departments",
"table_object_id": 16522,
"index_name": "app_departments_pkey",
"index_object_id": 16526,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX app_departments_pkey ON app.departments USING btree (id)",
"columns": [
{
"column_name": "id",
"ordinal_position": 1
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "orderdetails",
"table_object_id": 16501,
"index_name": "orderdetails_pkey",
"index_object_id": 16504,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX orderdetails_pkey ON app.orderdetails USING btree (ordernumber, productcode)",
"columns": [
{
"column_name": "ordernumber",
"ordinal_position": 1
},
{
"column_name": "productcode",
"ordinal_position": 2
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "orders",
"table_object_id": 16465,
"index_name": "orders_pkey",
"index_object_id": 16471,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX orders_pkey ON app.orders USING btree (ordernumber)",
"columns": [
{
"column_name": "ordernumber",
"ordinal_position": 1
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "payments",
"table_object_id": 16478,
"index_name": "payments_pkey",
"index_object_id": 16481,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX payments_pkey ON app.payments USING btree (customernumber, checknumber)",
"columns": [
{
"column_name": "customernumber",
"ordinal_position": 1
},
{
"column_name": "checknumber",
"ordinal_position": 2
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "productlines",
"table_object_id": 16456,
"index_name": "productlines_pkey",
"index_object_id": 16463,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX productlines_pkey ON app.productlines USING btree (productline)",
"columns": [
{
"column_name": "productline",
"ordinal_position": 1
}
]
},
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "products",
"table_object_id": 16488,
"index_name": "products_pkey",
"index_object_id": 16494,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX products_pkey ON app.products USING btree (productcode)",
"columns": [
{
"column_name": "productcode",
"ordinal_position": 1
}
]
},
{
"schema_name": "employees",
"schema_object_id": 16386,
"table_name": "departments",
"table_object_id": 16392,
"index_name": "departments_dept_name_key",
"index_object_id": 16397,
"is_unique": True,
"is_primary": False,
"definition": "CREATE UNIQUE INDEX departments_dept_name_key ON employees.departments USING btree (dept_name)",
"columns": [
{
"column_name": "dept_name",
"ordinal_position": 1
}
]
},
{
"schema_name": "employees",
"schema_object_id": 16386,
"table_name": "departments",
"table_object_id": 16392,
"index_name": "employees_departments_pkey",
"index_object_id": 16395,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX employees_departments_pkey ON employees.departments USING btree (dept_no)",
"columns": [
{
"column_name": "dept_no",
"ordinal_position": 1
}
]
},
{
"schema_name": "employees",
"schema_object_id": 16386,
"table_name": "dept_emp",
"table_object_id": 16418,
"index_name": "dept_emp_pkey",
"index_object_id": 16421,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX dept_emp_pkey ON employees.dept_emp USING btree (emp_no, dept_no)",
"columns": [
{
"column_name": "emp_no",
"ordinal_position": 1
},
{
"column_name": "dept_no",
"ordinal_position": 2
}
]
},
{
"schema_name": "employees",
"schema_object_id": 16386,
"table_name": "dept_manager",
"table_object_id": 16399,
"index_name": "dept_manager_pkey",
"index_object_id": 16406,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX dept_manager_pkey ON employees.dept_manager USING btree (emp_no, dept_no)",
"columns": [
{
"column_name": "emp_no",
"ordinal_position": 1
},
{
"column_name": "dept_no",
"ordinal_position": 2
}
]
},
{
"schema_name": "employees",
"schema_object_id": 16386,
"table_name": "employees",
"table_object_id": 16387,
"index_name": "employees_pkey",
"index_object_id": 16390,
"is_unique": True,
"is_primary": True,
"definition": "CREATE UNIQUE INDEX employees_pkey ON employees.employees USING btree (emp_no)",
"columns": [
{
"column_name": "emp_no",
"ordinal_position": 1
}
]
}
]
|
import { Objects } from "./Objects";
class Operator extends Objects {
$ne? : string | number;
$lt? : string | number;
$lte?: string | number;
$gt? : string | number;
$gte?: string | number;
}
type OperatorObjects = {
[key: string]: string | Operator;
}
export { Operator, OperatorObjects };
|
#!/bin/bash
batterycsv="$HOME/.battery_profile.csv"
function charge(){
echo $(system_profiler SPPowerDataType | grep 'Charge Remaining' | sed 's/[^0-9]*//g')
}
function capacity(){
echo $(system_profiler SPPowerDataType | grep 'Full Charge Capacity' | sed 's/[^0-9]*//g')
}
function counts(){
echo $(system_profiler SPPowerDataType | grep 'Cycle Count' | sed 's/[^0-9]*//g')
}
function amperage(){
echo $(system_profiler SPPowerDataType | grep 'Amperage' | sed 's/[^0-9]*//g')
}
function voltage(){
echo $(system_profiler SPPowerDataType | grep 'Voltage' | sed 's/[^0-9]*//g')
}
export GREP_OPTIONS="--color=never"
# saving order:
# hh:mm:ss, dd.mm.yy, charge, capacity, counts, amperage, voltage
printf "$(date +%H:%M:%S,%d.%m.%Y),%s,%s,%s,%s,%s\n" $(charge) $(capacity) $(counts) $(amperage) $(voltage) >> $batterycsv |
package weka.attributeSelection;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.Range;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import java.util.Enumeration;
import java.util.Vector;
public class FCBFSearch extends ASSearch
implements RankedOutputSearch, StartSetHandler, OptionHandler,
TechnicalInformationHandler {
/**
* for serialization
*/
static final long serialVersionUID = 8209699587428369942L;
/**
* Holds the starting set as an array of attributes
*/
private int[] m_starting;
/**
* Holds the start set for the search as a range
*/
private Range m_startRange;
/**
* Holds the ordered list of attributes
*/
private int[] m_attributeList;
/**
* Holds the list of attribute merit scores
*/
private double[] m_attributeMerit;
/**
* Data has class attribute---if unsupervised evaluator then no class
*/
private boolean m_hasClass;
/**
* Class index of the data if supervised evaluator
*/
private int m_classIndex;
/**
* The number of attributes
*/
private int m_numAttribs;
/**
* A threshold by which to discard attributes---used by the
* AttributeSelection module
*/
private double m_threshold;
/**
* The number of attributes to select. -1 indicates that all attributes are
* to be retained. Has precedence over m_threshold
*/
private int m_numToSelect = -1;
/**
* Used to compute the number to select
*/
private int m_calculatedNumToSelect = -1;
/*-----------------add begin 2004-11-15 by alan-----------------*/
/**
* Used to determine whether we create a new dataset according to the
* selected features
*/
private boolean m_generateOutput = false;
/**
* Used to store the ref of the Evaluator we use
*/
private ASEvaluation m_asEval;
/**
* Holds the list of attribute merit scores generated by FCBF
*/
private double[][] m_rankedFCBF;
/**
* Hold the list of selected features
*/
private double[][] m_selectedFeatures;
/*-----------------add end 2004-11-15 by alan-----------------*/
/**
* Returns a string describing this search method
*
* @return a description of the search suitable for displaying in the
* explorer/experimenter gui
*/
public String globalInfo()
{
return "FCBF : \n\nFeature selection method based on correlation measure"
+ "and relevance&redundancy analysis. "
+ "Use in conjunction with an attribute set evaluator (SymmetricalUncertAttributeEval).\n\n"
+ "For more information see:\n\n"
+ getTechnicalInformation().toString();
}
/**
* Returns an instance of a TechnicalInformation object, containing detailed
* information about the technical background of this class, e.g., paper
* reference or book this class is based on.
*
* @return the technical information about this class
*/
public TechnicalInformation getTechnicalInformation()
{
TechnicalInformation result;
result = new TechnicalInformation(Type.INPROCEEDINGS);
result.setValue(Field.AUTHOR, "<NAME> and <NAME>");
result.setValue(Field.TITLE, "Feature Selection for High-Dimensional Data: A Fast Correlation-Based Filter Solution");
result.setValue(Field.BOOKTITLE, "Proceedings of the Twentieth International Conference on Machine Learning");
result.setValue(Field.YEAR, "2003");
result.setValue(Field.PAGES, "856-863");
result.setValue(Field.PUBLISHER, "AAAI Press");
return result;
}
/**
* Constructor
*/
public FCBFSearch()
{
resetOptions();
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for displaying in the
* explorer/experimenter gui
*/
public String numToSelectTipText()
{
return "Specify the number of attributes to retain. The default value "
+ "(-1) indicates that all attributes are to be retained. Use either "
+ "this option or a threshold to reduce the attribute set.";
}
/**
* Specify the number of attributes to select from the ranked list. -1
* indicates that all attributes are to be retained.
*
* @param n the number of attributes to retain
*/
public void setNumToSelect(int n)
{
m_numToSelect = n;
}
/**
* Gets the number of attributes to be retained.
*
* @return the number of attributes to retain
*/
public int getNumToSelect()
{
return m_numToSelect;
}
/**
* Gets the calculated number to select. This might be computed from a
* threshold, or if < 0 is set as the number to select then it is set to the
* number of attributes in the (transformed) data. @return the calculated
* number of attributes to select
*/
public int getCalculatedNumToSelect()
{
if (m_numToSelect >= 0) {
m_calculatedNumToSelect = m_numToSelect;
}
if (m_selectedFeatures.length > 0
&& m_selectedFeatures.length < m_calculatedNumToSelect) {
m_calculatedNumToSelect = m_selectedFeatures.length;
}
return m_calculatedNumToSelect;
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for displaying in the
* explorer/experimenter gui
*/
public String thresholdTipText()
{
return "Set threshold by which attributes can be discarded. Default value "
+ "results in no attributes being discarded. Use either this option or "
+ "numToSelect to reduce the attribute set.";
}
/**
* Set the threshold by which the AttributeSelection module can discard
* attributes.
*
* @param threshold the threshold.
*/
public void setThreshold(double threshold)
{
m_threshold = threshold;
}
/**
* Returns the threshold so that the AttributeSelection module can discard
* attributes from the ranking.
*
* @return the threshold
*/
public double getThreshold()
{
return m_threshold;
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for displaying in the
* explorer/experimenter gui
*/
public String generateRankingTipText()
{
return "A constant option. FCBF is capable of generating"
+ " attribute rankings.";
}
/**
* This is a dummy set method---Ranker is ONLY capable of producing a ranked
* list of attributes for attribute evaluators.
*
* @param doRank this parameter is N/A and is ignored
*/
public void setGenerateRanking(boolean doRank)
{
}
/**
* This is a dummy method. Ranker can ONLY be used with attribute evaluators
* and as such can only produce a ranked list of attributes
*
* @return true all the time.
*/
public boolean getGenerateRanking()
{
return true;
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for displaying in the
* explorer/experimenter gui
*/
public String generateDataOutputTipText()
{
return "Generating new dataset according to the selected features."
+ " ";
}
/**
* Sets the flag, by which the AttributeSelection module decide whether
* create a new dataset according to the selected features.
*
* @param doGenerate the flag, by which the AttributeSelection module decide
* whether create a new dataset according to the selected features
*/
public void setGenerateDataOutput(boolean doGenerate)
{
this.m_generateOutput = doGenerate;
}
/**
* Returns the flag, by which the AttributeSelection module decide whether
* create a new dataset according to the selected features.
*
* @return the flag, by which the AttributeSelection module decide whether
* create a new dataset according to the selected features.
*/
public boolean getGenerateDataOutput()
{
return this.m_generateOutput;
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for displaying in the
* explorer/experimenter gui
*/
public String startSetTipText()
{
return "Specify a set of attributes to ignore. "
+ " When generating the ranking, FCBF will not evaluate the attributes "
+ " in this list. "
+ "This is specified as a comma "
+ "seperated list off attribute indexes starting at 1. It can include "
+ "ranges. Eg. 1,2,5-9,17.";
}
/**
* Sets a starting set of attributes for the search. It is the search
* method's responsibility to report this start set (if any) in its
* toString() method.
*
* @param startSet a string containing a list of attributes (and or ranges),
* eg. 1,2,6,10-15.
* @throws Exception if start set can't be set.
*/
public void setStartSet(String startSet) throws Exception
{
m_startRange.setRanges(startSet);
}
/**
* Returns a list of attributes (and or attribute ranges) as a String
*
* @return a list of attributes (and or attribute ranges)
*/
public String getStartSet()
{
return m_startRange.getRanges();
}
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*
*/
public Enumeration listOptions()
{
Vector newVector = new Vector(4);
newVector.addElement(new Option(
"\tSpecify Whether the selector generates a new dataset.",
"D", 1, "-D <create dataset>"));
newVector.addElement(new Option(
"\tSpecify a starting set of attributes.\n"
+ "\t\tEg. 1,3,5-7.\n"
+ "\tAny starting attributes specified are\n"
+ "\tignored during the ranking.",
"P", 1, "-P <start set>"));
newVector.addElement(new Option(
"\tSpecify a theshold by which attributes\n"
+ "\tmay be discarded from the ranking.",
"T", 1, "-T <threshold>"));
newVector.addElement(new Option(
"\tSpecify number of attributes to select",
"N", 1, "-N <num to select>"));
return newVector.elements();
}
/**
* Parses a given list of options.
* <p/>
*
* <!-- options-start -->
* Valid options are:
* <p/>
*
* <pre> -D <create dataset>
* Specify Whether the selector generates a new dataset.</pre>
*
* <pre> -P <start set>
* Specify a starting set of attributes.
* Eg. 1,3,5-7.
* Any starting attributes specified are
* ignored during the ranking.</pre>
*
* <pre> -T <threshold>
* Specify a theshold by which attributes
* may be discarded from the ranking.</pre>
*
* <pre> -N <num to select>
* Specify number of attributes to select</pre>
*
* <!-- options-end -->
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*
*
*/
public void setOptions(String[] options)
throws Exception
{
String optionString;
resetOptions();
optionString = Utils.getOption('D', options);
if (optionString.length() != 0) {
setGenerateDataOutput(Boolean.getBoolean(optionString));
}
optionString = Utils.getOption('P', options);
if (optionString.length() != 0) {
setStartSet(optionString);
}
optionString = Utils.getOption('T', options);
if (optionString.length() != 0) {
Double temp;
temp = Double.valueOf(optionString);
setThreshold(temp.doubleValue());
}
optionString = Utils.getOption('N', options);
if (optionString.length() != 0) {
setNumToSelect(Integer.parseInt(optionString));
}
}
/**
* Gets the current settings of ReliefFAttributeEval.
*
* @return an array of strings suitable for passing to setOptions()
*/
public String[] getOptions()
{
String[] options = new String[8];
int current = 0;
options[current++] = "-D";
options[current++] = "" + getGenerateDataOutput();
if (!(getStartSet().equals(""))) {
options[current++] = "-P";
options[current++] = "" + startSetToString();
}
options[current++] = "-T";
options[current++] = "" + getThreshold();
options[current++] = "-N";
options[current++] = "" + getNumToSelect();
while (current < options.length) {
options[current++] = "";
}
return options;
}
/**
* converts the array of starting attributes to a string. This is used by
* getOptions to return the actual attributes specified as the starting set.
* This is better than using m_startRanges.getRanges() as the same start set
* can be specified in different ways from the command line---eg 1,2,3 ==
* 1-3. This is to ensure that stuff that is stored in a database is
* comparable.
*
* @return a comma seperated list of individual attribute numbers as a
* String
*/
private String startSetToString()
{
StringBuffer FString = new StringBuffer();
boolean didPrint;
if (m_starting == null) {
return getStartSet();
}
for (int i = 0; i < m_starting.length; i++) {
didPrint = false;
if ((m_hasClass == false)
|| (m_hasClass == true && i != m_classIndex)) {
FString.append((m_starting[i] + 1));
didPrint = true;
}
if (i == (m_starting.length - 1)) {
FString.append("");
} else {
if (didPrint) {
FString.append(",");
}
}
}
return FString.toString();
}
/**
* Kind of a dummy search algorithm. Calls a Attribute evaluator to evaluate
* each attribute not included in the startSet and then sorts them to
* produce a ranked list of attributes.
*
* @param ASEval the attribute evaluator to guide the search
* @param data the training instances.
* @return an array (not necessarily ordered) of selected attribute indexes
* @throws Exception if the search can't be completed
*/
public int[] search(ASEvaluation ASEval, Instances data)
throws Exception
{
int i, j;
if (!(ASEval instanceof AttributeSetEvaluator)) {
throw new Exception(ASEval.getClass().getName()
+ " is not an "
+ "Attribute Set evaluator!");
}
m_numAttribs = data.numAttributes();
if (ASEval instanceof UnsupervisedAttributeEvaluator) {
m_hasClass = false;
} else {
m_classIndex = data.classIndex();
if (m_classIndex >= 0) {
m_hasClass = true;
} else {
m_hasClass = false;
}
}
// get the transformed data and check to see if the transformer
// preserves a class index
if (ASEval instanceof AttributeTransformer) {
data = ((AttributeTransformer) ASEval).transformedHeader();
if (m_classIndex >= 0 && data.classIndex() >= 0) {
m_classIndex = data.classIndex();
m_hasClass = true;
}
}
m_startRange.setUpper(m_numAttribs - 1);
if (!(getStartSet().equals(""))) {
m_starting = m_startRange.getSelection();
}
int sl = 0;
if (m_starting != null) {
sl = m_starting.length;
}
if ((m_starting != null) && (m_hasClass == true)) {
// see if the supplied list contains the class index
boolean ok = false;
for (i = 0; i < sl; i++) {
if (m_starting[i] == m_classIndex) {
ok = true;
break;
}
}
if (ok == false) {
sl++;
}
} else {
if (m_hasClass == true) {
sl++;
}
}
m_attributeList = new int[m_numAttribs - sl];
m_attributeMerit = new double[m_numAttribs - sl];
// add in those attributes not in the starting (omit list)
for (i = 0, j = 0; i < m_numAttribs; i++) {
if (!inStarting(i)) {
m_attributeList[j++] = i;
}
}
this.m_asEval = ASEval;
AttributeSetEvaluator ASEvaluator = (AttributeSetEvaluator) ASEval;
for (i = 0; i < m_attributeList.length; i++) {
m_attributeMerit[i] = ASEvaluator.evaluateAttribute(m_attributeList[i]);
}
double[][] tempRanked = rankedAttributes();
int[] rankedAttributes = new int[m_selectedFeatures.length];
for (i = 0; i < m_selectedFeatures.length; i++) {
rankedAttributes[i] = (int) tempRanked[i][0];
}
return rankedAttributes;
}
/**
* Sorts the evaluated attribute list
*
* @return an array of sorted (highest eval to lowest) attribute indexes
* @throws Exception of sorting can't be done.
*/
public double[][] rankedAttributes()
throws Exception
{
int i, j;
if (m_attributeList == null || m_attributeMerit == null) {
throw new Exception("Search must be performed before a ranked "
+ "attribute list can be obtained");
}
int[] ranked = Utils.sort(m_attributeMerit);
// reverse the order of the ranked indexes
double[][] bestToWorst = new double[ranked.length][2];
for (i = ranked.length - 1, j = 0; i >= 0; i--) {
bestToWorst[j++][0] = ranked[i];
//alan: means in the arrary ranked, varialbe is from ranked as from small to large
}
// convert the indexes to attribute indexes
for (i = 0; i < bestToWorst.length; i++) {
int temp = ((int) bestToWorst[i][0]);
bestToWorst[i][0] = m_attributeList[temp]; //for the index
bestToWorst[i][1] = m_attributeMerit[temp]; //for the value of the index
}
if (m_numToSelect > bestToWorst.length) {
throw new Exception("More attributes requested than exist in the data");
}
this.FCBFElimination(bestToWorst);
if (m_numToSelect <= 0) {
if (m_threshold == -Double.MAX_VALUE) {
m_calculatedNumToSelect = m_selectedFeatures.length;
} else {
determineNumToSelectFromThreshold(m_selectedFeatures);
}
}
/* if (m_numToSelect > 0) {
determineThreshFromNumToSelect(bestToWorst);
} */
return m_selectedFeatures;
}
private void determineNumToSelectFromThreshold(double[][] ranking)
{
int count = 0;
for (int i = 0; i < ranking.length; i++) {
if (ranking[i][1] > m_threshold) {
count++;
}
}
m_calculatedNumToSelect = count;
}
private void determineThreshFromNumToSelect(double[][] ranking)
throws Exception
{
if (m_numToSelect > ranking.length) {
throw new Exception("More attributes requested than exist in the data");
}
if (m_numToSelect == ranking.length) {
return;
}
m_threshold = (ranking[m_numToSelect - 1][1]
+ ranking[m_numToSelect][1]) / 2.0;
}
/**
* returns a description of the search as a String
*
* @return a description of the search
*/
public String toString()
{
StringBuffer BfString = new StringBuffer();
BfString.append("\tAttribute ranking.\n");
if (m_starting != null) {
BfString.append("\tIgnored attributes: ");
BfString.append(startSetToString());
BfString.append("\n");
}
if (m_threshold != -Double.MAX_VALUE) {
BfString.append("\tThreshold for discarding attributes: "
+ Utils.doubleToString(m_threshold, 8, 4) + "\n");
}
BfString.append("\n\n");
BfString.append(" J || SU(j,Class) || I || SU(i,j). \n");
for (int i = 0; i < m_rankedFCBF.length; i++) {
BfString.append(Utils.doubleToString(m_rankedFCBF[i][0] + 1, 6, 0) + " ; "
+ Utils.doubleToString(m_rankedFCBF[i][1], 12, 7) + " ; ");
if (m_rankedFCBF[i][2] == m_rankedFCBF[i][0]) {
BfString.append(" *\n");
} else {
BfString.append(Utils.doubleToString(m_rankedFCBF[i][2] + 1, 5, 0) + " ; "
+ m_rankedFCBF[i][3] + "\n");
}
}
return BfString.toString();
}
/**
* Resets stuff to default values
*/
protected void resetOptions()
{
m_starting = null;
m_startRange = new Range();
m_attributeList = null;
m_attributeMerit = null;
m_threshold = -Double.MAX_VALUE;
}
private boolean inStarting(int feat)
{
// omit the class from the evaluation
if ((m_hasClass == true) && (feat == m_classIndex)) {
return true;
}
if (m_starting == null) {
return false;
}
for (int i = 0; i < m_starting.length; i++) {
if (m_starting[i] == feat) {
return true;
}
}
return false;
}
private void FCBFElimination(double[][] rankedFeatures)
throws Exception
{
int i, j;
m_rankedFCBF = new double[m_attributeList.length][4];
int[] attributes = new int[1];
int[] classAtrributes = new int[1];
int numSelectedAttributes = 0;
int startPoint = 0;
double tempSUIJ = 0;
AttributeSetEvaluator ASEvaluator = (AttributeSetEvaluator) m_asEval;
for (i = 0; i < rankedFeatures.length; i++) {
m_rankedFCBF[i][0] = rankedFeatures[i][0];
m_rankedFCBF[i][1] = rankedFeatures[i][1];
m_rankedFCBF[i][2] = -1;
}
while (startPoint < rankedFeatures.length) {
if (m_rankedFCBF[startPoint][2] != -1) {
startPoint++;
continue;
}
m_rankedFCBF[startPoint][2] = m_rankedFCBF[startPoint][0];
numSelectedAttributes++;
for (i = startPoint + 1; i < m_attributeList.length; i++) {
if (m_rankedFCBF[i][2] != -1) {
continue;
}
attributes[0] = (int) m_rankedFCBF[startPoint][0];
classAtrributes[0] = (int) m_rankedFCBF[i][0];
tempSUIJ = ASEvaluator.evaluateAttribute(attributes, classAtrributes);
if (m_rankedFCBF[i][1] < tempSUIJ || Math.abs(tempSUIJ - m_rankedFCBF[i][1]) < 1E-8) {
m_rankedFCBF[i][2] = m_rankedFCBF[startPoint][0];
m_rankedFCBF[i][3] = tempSUIJ;
}
}
startPoint++;
}
m_selectedFeatures = new double[numSelectedAttributes][2];
for (i = 0, j = 0; i < m_attributeList.length; i++) {
if (m_rankedFCBF[i][2] == m_rankedFCBF[i][0]) {
m_selectedFeatures[j][0] = m_rankedFCBF[i][0];
m_selectedFeatures[j][1] = m_rankedFCBF[i][1];
j++;
}
}
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision()
{
return RevisionUtils.extract("$Revision$");
}
}
|
SELECT
customer_name
FROM
customers
WHERE
city = 'New York'; |
import { NonEmpty, NonLone } from './utils'
export type CqlAtomicPrimitive = string | number | boolean | null
export type CqlPrimitive = CqlAtomicPrimitive | CqlAtomicPrimitive[]
export enum HttpVerb {
get = 'get',
post = 'post',
put = 'put',
patch = 'patch',
delete = 'delete',
}
export class Arg {
constructor(
readonly index: number,
readonly arg_name: string,
readonly arg_type: string,
readonly nullable: boolean,
readonly default_value: CqlPrimitive | undefined,
) {}
}
export class ColumnName {
constructor(readonly column_name: string) {}
}
export type DirectiveValue = ColumnName | Arg | CqlPrimitive
export class Delete {
readonly type: 'Delete' = 'Delete'
constructor(readonly name: string, readonly table_name: string, readonly args: Arg[], readonly where_directives: NonEmpty<WhereDirective>) {}
}
export type ActionManifest = {
Delete: Delete,
Query: Query,
}
export type Action = ActionManifest[keyof ActionManifest]
// thinking about it now, it probably makes more sense
// to simply automatically generate bindings for all non-queryable
// functions that are executable by the current role
export namespace Action {
export function http_verb(action: Action): HttpVerb {
switch (action.type) {
case 'Query': return HttpVerb.get
case 'Delete': return HttpVerb.delete
// case 'ImmutableFunction': return HttpVerb.get
// case 'StableFunction': return HttpVerb.get
// case 'VolatileFunction': return HttpVerb.post
// case 'Function': return HttpVerb.post
// case 'Insert': return HttpVerb.post
// case 'Put': return HttpVerb.put
// case 'Patch': return HttpVerb.patch
// case 'InsertDeep': return HttpVerb.post
// case 'PutDeep': return HttpVerb.post
// case 'PatchDeep': return HttpVerb.post
// case 'Update': return HttpVerb.patch
}
}
}
export enum BooleanOperator {
Eq = '=', Ne = '!=',
Lt = '<', Lte = '<=',
Gt = '>', Gte = '>=',
In = 'in', Nin = 'not in',
Is = 'is', Nis = 'is not',
Bet = 'between', Nbet = 'not between',
Symbet = 'between symmetric', Nsymbet = 'not between symmetric',
Dist = 'is distinct from', Ndist = 'is not distinct from',
}
export class GetDirective {
constructor(readonly args: DirectiveValue[], readonly column_names?: string[]) {}
}
export class WhereDirective {
constructor(readonly left: DirectiveValue, readonly right: DirectiveValue, readonly operator: BooleanOperator) {}
}
export enum OrderByNullsPlacement { First = 'first', Last = 'last' }
export class OrderDirective {
// TODO probably should be column_display_name: string
constructor(readonly column: string, readonly ascending?: boolean, readonly nulls_placement?: OrderByNullsPlacement) {}
}
export class Query {
readonly type: 'Query' = 'Query'
constructor(readonly name: string, readonly args: Arg[], readonly block: QueryBlock) {}
}
export type QueryObject = QueryBlock | QueryColumn | QueryRawColumn
export class QueryBlock {
readonly type: 'QueryBlock' = 'QueryBlock'
constructor(
readonly display_name: string,
readonly target_table_name: string,
readonly access_object: TableAccessor,
readonly is_many: boolean,
readonly entities: QueryObject[],
readonly where_directives: GetDirective | WhereDirective[],
readonly order_directives: OrderDirective[],
readonly limit: DirectiveValue | undefined,
readonly offset: DirectiveValue | undefined,
readonly use_left: boolean,
) {}
}
export class QueryColumn {
readonly type: 'QueryColumn' = 'QueryColumn'
constructor(readonly column_name: string, readonly display_name?: string) {}
}
export class QueryRawColumn {
readonly type: 'QueryRawColumn' = 'QueryRawColumn'
constructor(readonly display_name: string, readonly sql_text: string) {}
}
export type TableAccessor =
| SimpleTable
| TableChain
| ForeignKeyChain
// | ColumnKeyChain
// export type TableAccessor =
// | TableChain
// | ForeignKeyChain
// | ColumnKeyChain
// export class TableChain {
// readonly type: 'TableChain' = 'TableChain'
// constructor(readonly table_names: NonEmpty<string>) {}
// }
export class SimpleTable {
readonly type: 'SimpleTable' = 'SimpleTable'
constructor(readonly table_name: string) {}
}
export class TableChain {
readonly type: 'TableChain' = 'TableChain'
constructor(readonly table_names: NonLone<string>) {}
}
// this is going to be a chain of only foreign_key's, not any column
// which means it will just be useful to disambiguate normal joins
// ~~some_key~~some_other~~table_name.key~~key->destination_table_name
// for composite keys, must give table_name and use parens
// ~~some_key~~some_other~~table_name(key, other_key)~~key->destination_table_name
export class ForeignKeyChain {
readonly type: 'ForeignKeyChain' = 'ForeignKeyChain'
constructor(readonly key_references: NonEmpty<KeyReference>, readonly destination_table_name: string) {}
}
export class KeyReference {
constructor(readonly key_names: string[], readonly table_name?: string) {}
}
// // this is for lining up arbitrary columns, no restrictions at all (except for column type)
// // ~local_col=some_col~same_table_col=qualified.other_col->destination_table_name
// export class ColumnKeyChain {
// readonly type: 'ColumnKeyChain' = 'ColumnKeyChain'
// constructor() {}
// }
// export class KeyEquality {
// constructor(readonly left: KeyReference, readonly right: KeyReference) {}
// }
// export enum MutationLevel { ASSOCIATION_ONLY, PUT, PATCH, PUT_FORCE, PATCH_FORCE }
// readonly mutation_level: MutationLevel = MutationLevel.ASSOCIATION_ONLY,
export class Insert {
readonly type: 'Insert' = 'Insert'
readonly args = [] as []
constructor(readonly name: string, readonly block: InsertBlock) {}
}
export class InsertBlock {
constructor(
readonly given_name: string, readonly target_table_name: string,
readonly is_many: boolean, readonly blocks: InsertBlock[],
) {}
}
|
import angr
project = angr.Project('./yakisoba')
entry = project.factory.entry_state()
simgr = project.factory.simgr(entry)
simgr.explore()
states = simgr.deadended
for state in states:
flag = b"".join(state.posix.stdin.concretize())
print(flag)
|
import React, { createContext, ReactNode, useContext } from 'react';
export interface BaseProviderProps {
children: ReactNode;
// Add any additional props here
}
export const CustomContext = createContext<Partial<BaseProviderProps>>({});
export const CustomProvider: React.FC<BaseProviderProps> = ({ children }) => {
return (
<CustomContext.Provider value={{ children }}>
{children}
</CustomContext.Provider>
);
}; |
<filename>atoi.js
"use strict"
/**
* @param {string} str
* @return {number}
*/
var myAtoi = function(str) {
let INT_MAX = 2147483647
let INT_MIN = -2147483648
str = str.replace(/(^\s*)|(\s*$)/g, "")
var index = 0
var isNegative = false
if (str[0] == '-') {
isNegative = true
index++
} else if (str[0] == '+') {
index++
}
var result = ""
for (;index < str.length; index++) {
let c = str[index]
if (c > '9' || c < '0') {
break
} else {
result += c
}
}
var resultInt = 0
for (var i = 0 ; i < result.length ; i++) {
let c = result[i]
resultInt += (c-'0') * Math.pow(10, (result.length - i - 1))
}
if (isNegative) {
resultInt = -resultInt
}
if (resultInt > INT_MAX) {
return INT_MAX
}
if (resultInt < INT_MIN) {
return INT_MIN
}
return resultInt
};
console.log(myAtoi(" -0012a42")) |
package org.rzo.netty.ahessian.rpc.message;
public interface GroupedMessage
{
public Integer getGroup();
}
|
package io.cattle.platform.engine.process.log;
import io.cattle.platform.util.type.Named;
public interface ParentLog extends Named {
ProcessLog newChildLog();
}
|
package com.ilscipio.scipio.solr;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
/**
* High-level util to help dealing with SolrQuery object and high-level query ops.
* For low-level query expression parsing and string/term/phrase manipulation, see {@link SolrExprUtil}.
* @see SolrExprUtil
*/
public abstract class SolrQueryUtil {
public static final String module = SolrQueryUtil.class.getName();
protected SolrQueryUtil() {
}
public static void addFilterQueries(SolrQuery solrQuery, Collection<String> queryFilters) {
if (queryFilters != null) {
for(String filter : queryFilters) {
solrQuery.addFilterQuery(filter);
}
}
}
public static List<String> copyQueryFilters(List<String> queryFilters) {
return queryFilters != null ? new ArrayList<String>(queryFilters) : new ArrayList<String>();
}
/**
* Returns the closest whole viewIndex.
*/
public static Integer calcResultViewIndex(SolrDocumentList results, Integer viewSize) {
Integer viewIndex = null;
if (results != null && viewSize != null && viewSize > 0) {
long start = results.getStart();
viewIndex = (int) (start / (long) viewSize);
}
return viewIndex;
}
/**
* Checks if the exception extracted by {@link #getSolrNestedException} is a syntax error.
* FIXME: AWFUL HEURISTIC
*/
public static boolean isSolrQuerySyntaxError(Throwable t) {
// exception message usually contains the string: "org.apache.solr.search.SyntaxError"
// hopefully this is accurate enough... how else to check? cause is not set and
// the root SyntaxError is from an inaccessible jar (CANNOT add it to classpath)
return ((t instanceof SolrException) && t.getMessage().toLowerCase().contains("syntax"));
}
}
|
<filename>src/database/jsonhelpers/JSONToObjectConverter.java
package database.jsonhelpers;
import database.firebase.TrackableObject;
import org.json.JSONArray;
import org.json.JSONObject;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Class used for the conversion of JSON files into their appropriate TrackableObject
* representations
* @param <T> is the class of TrackableObject to be created from the JSON file
*
* @author <NAME>
*/
public class JSONToObjectConverter<T extends TrackableObject> {
/* Instance Variables */
private Class<T> myClass;
/**
* Creates a new JSONToObjectConverter that is capable of creating objects
* of the passed Class
* @param className is a {@code Class<T>} representing the class of objects to
* be created by the converter
*/
public JSONToObjectConverter(Class<T> className) {
myClass = className;
}
/**
* Converts the JSONObject into a map reflecting the parameters of the
* object being created from the JSON file
* @param json is a {@code JSONObject} that represents the JSON
* serialization of the object to be created by the converter
* @return A {@code Map<String, Object>} containing the
* Instance Variable -> Value pairs
*/
private Map<String, Object> parseParameters(JSONObject json) {
Map<String, Object> params = new HashMap<>();
for(String key : json.keySet()) {
params.put(key, json.get(key));
if(json.get(key).getClass() == JSONObject.class) {
// Create map for params of object that is being held by the overall object
params.put(key, parseParameters((JSONObject) json.get(key)));
} else if(json.get(key).getClass() == JSONArray.class) {
params.put(key, ((JSONArray) json.get(key)).toList());
}
}
return params;
}
/**
* Creates a new object instance for the class provided
* @param myClass is the {@code Class<G>} to be created
* @return A new {@code G} instance that DOES NOT HAVE any
* instance variables initialized
*/
private <G extends TrackableObject> G createNewInstance(Class<G> myClass) {
try {
G newObject;
// Retrieve the empty constructor
Constructor<G> constructor = myClass.getDeclaredConstructor();
if (!constructor.isAccessible()) {
// If private, then make public in order to create new instance
constructor.setAccessible(true);
newObject = constructor.newInstance();
constructor.setAccessible(false);
} else {
newObject = constructor.newInstance();
}
return newObject;
} catch(Exception e) {
return null;
}
}
private <G extends TrackableObject> void setUIDField(Class<G> myClass, Map<String, Object> params, G newObject) {
try {
Class<?> trackableClass = newObject.getClass().getSuperclass();
while (trackableClass != TrackableObject.class)
trackableClass = trackableClass.getSuperclass();
Field UIDField = trackableClass.getDeclaredField("UID");
UIDField.setAccessible(true);
if(params.containsKey("UID")) {
UIDField.set(newObject, params.get("UID"));
} else {
UIDField.set(newObject, newObject.UIDforObject());
}
UIDField.setAccessible(false);
params.remove("UID");
} catch (Exception e) {
return;
}
}
/**
* Instantiates the specified parameter for the TrackableObject passed in
* @param instanceVar is a {@code Field} that represents the instance variable being
* instantiated
* @param param is a {@code String} representing the name of the field being instantiated
* @param params is a {@code Map<String, Object>} of all the parsed in parameters for the object
* @param <G> is a {@code G} representing the new instance created for the TrackableObject
*/
private <G extends TrackableObject> void instantiateParmeter(Field instanceVar, String param, Map<String,Object> params) {
// First need to check special case where you're storing a List<? extends TrackableObject> variable
if(List.class.isAssignableFrom(instanceVar.getType())) {
// Get generic parameter of list
Class listType = (Class<?>) ((ParameterizedType) instanceVar.getGenericType()).getActualTypeArguments()[0];
// Check to see if the list type extends TrackableObject
if(TrackableObject.class.isAssignableFrom(listType)){
// Case where List<? extends TrackableObject>
List<HashMap<String,Object>> trackableObjects = (List<HashMap<String,Object>>) params.get(param);
// Create new List<Objects> for each object
List<Object> objectsList = new ArrayList<>();
for(HashMap<String,Object> obj : trackableObjects) {
JSONObject heldObjectJSON = new JSONObject((HashMap<String, Object>) obj);
JSONObject m = new JSONObject(parseParameters(heldObjectJSON));
TrackableObject heldObject = (TrackableObject) createObjectFromJSON(listType, m);
objectsList.add(heldObject);
}
params.put(param, objectsList);
}
// Next need to check if the parameter is actually just a TrackableObject
} else if(TrackableObject.class.isAssignableFrom(instanceVar.getType())) {
JSONObject heldObjectJSON = new JSONObject((HashMap<String, Object>) params.get(param));
Object heldObject = createObjectFromJSON((Class<G>)instanceVar.getType(), heldObjectJSON);
params.put(param, heldObject);
// Finally check if the parameter is a Map
} else if(instanceVar.getType().isAssignableFrom(Map.class)) {
JSONObject objectForMap = new JSONObject((HashMap<String, Object>) params.get(param));
params.put(param, parseParameters(objectForMap));
}
}
private <G extends TrackableObject> void setInstanceVariable(Field instanceVar, G newObject, Map<String, Object> params, String param) {
try {
if(!instanceVar.isAccessible()) {
instanceVar.setAccessible(true);
instanceVar.set(newObject, params.get(param));
instanceVar.setAccessible(false);
return;
}
instanceVar.set(newObject, params.get(param));
} catch (Exception e) {}
}
/**
* Creates the appropriate initialized object corresponding to the passed in JSONObject
* @param myClass is the {@Class} of the object being returned
* @param json is the {@code JSONObject} holding all the object instance variable values
* @param <G> is the type that is to be created
* @return A new {@code G} object that has been initialized using the values in the JSONObject
*/
public <G extends TrackableObject> G createObjectFromJSON(Class<G> myClass, JSONObject json) {
Map<String, Object> params = parseParameters(json);
try {
// Create a new instance of the object class
G newObject = createNewInstance(myClass);
// Set UID field of TrackableObject
if(TrackableObject.class.isAssignableFrom(myClass)) setUIDField(myClass,params, newObject);
// Set the instance variables of the object being created
for(String param : params.keySet()) {
// First get the instance variable
Field instanceVar = newObject.getClass().getDeclaredField(param);
// Then Instantiate properly the corresponding parameter if it is an object
instantiateParmeter(instanceVar, param, params);
// Set the instance variable in the newly created object
setInstanceVariable(instanceVar, newObject, params, param);
}
// Add object to tracking map
TrackableObject.trackTrackableObject(newObject);
// Call class defined extra initialization
newObject.initialize();
return newObject;
} catch (Exception e){
return null; }
}
}
|
import { combineReducers } from '../common';
import counter from './counter';
const rooReducer = combineReducers({ counter });
export default rooReducer;
|
#!/usr/bin/env bash
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
set -e
yarn rimraf dist/
yarn
yarn build
yarn rollup -c --visualize --npm
# Use minified files for miniprogram
mkdir dist/miniprogram
cp dist/tf-backend-webgl.min.js dist/miniprogram/index.js
cp dist/tf-backend-webgl.min.js.map dist/miniprogram/index.js.map
echo "Stored standalone library at dist/tf-backend-webgl(.min).js"
|
set :application, 'conditions'
set :repo_url, '<EMAIL>:alisdair/conditions.git'
set :branch, 'master'
set :deploy_to, '/home/deploy/conditions'
set :format, :pretty
set :log_level, :info
# set :pty, true
# set :linked_files, %w{.env}
set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
set :keep_releases, 5
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
execute :touch, release_path.join('tmp/restart.txt')
end
end
after :publishing, :restart
end
|
from compas_ags.diagrams import FormGraph
from compas_ags.diagrams import FormDiagram
from compas_ags.diagrams import ForceDiagram
from compas_ags.viewers import Viewer
from compas_ags.ags import graphstatics
from compas_ags.ags import loadpath
# ------------------------------------------------------------------------------
# 1. create a planar truss structure, its applied loads and boundary conditions
# from nodes and edges
# make form and force diagrams
# ------------------------------------------------------------------------------
nodes = [
[0.0, 0.0, 0],
[1.0, 0.0, 0],
[2.0, 0.0, 0],
[3.0, 0.0, 0],
[4.0, 0.0, 0],
[5.0, 0.0, 0],
[6.0, 0.0, 0],
[0.0, -1.0, 0],
[1.0, -1.0, 0],
[2.0, -1.0, 0],
[3.0, -1.0, 0],
[4.0, -1.0, 0],
[5.0, -1.0, 0],
[6.0, -1.0, 0],
[1.0, +1.0, 0],
[2.0, +1.0, 0],
[3.0, +1.0, 0],
[4.0, +1.0, 0],
[5.0, +1.0, 0]]
edges = [
(0, 1),
(1, 2),
(2, 3),
(3, 4),
(4, 5),
(5, 6),
(0, 7),
(1, 8),
(2, 9),
(3, 10),
(4, 11),
(5, 12),
(6, 13),
(0, 14),
(14, 15),
(15, 16),
(16, 17),
(17, 18),
(18, 6),
(1, 14),
(2, 15),
(3, 16),
(4, 17),
(5, 18)]
graph = FormGraph.from_nodes_and_edges(nodes, edges)
form = FormDiagram.from_graph(graph)
force = ForceDiagram.from_formdiagram(form)
# ------------------------------------------------------------------------------
# 2. assign applied loads to bottom chord
# ------------------------------------------------------------------------------
edges = [(8, 1), (9, 2), (10, 3), (11, 4), (12, 5)]
for edge in edges:
form.edge_attribute(edge, 'is_ind', True)
form.edge_attribute(edge, 'q', 1.0)
# update force densities of form and force diagram
graphstatics.form_update_q_from_qind(form)
graphstatics.force_update_from_form(force, form)
# ------------------------------------------------------------------------------
# 3. optimize the loadpath
# ------------------------------------------------------------------------------
# modify force in the truss by updating vertex coordinates of the force diagram
# force in members of the top chord and bottom chord are set to be the same
# now the form is no longer in equilibrium
force.vertex_attributes(1, 'xy', [0, 2.5])
force.vertex_attributes(2, 'xy', [0, 1.5])
force.vertex_attributes(3, 'xy', [0, 0.5])
force.vertex_attributes(0, 'xy', [0, 0])
force.vertex_attributes(4, 'xy', [0, -0.5])
force.vertex_attributes(5, 'xy', [0, -1.5])
force.vertex_attributes(6, 'xy', [0, -2.5])
force.vertex_attributes(12, 'xy', [-2, 2.5])
force.vertex_attributes(11, 'xy', [-2, 1.5])
force.vertex_attributes(10, 'xy', [-2, 0.5])
force.vertex_attributes(9, 'xy', [-2, -0.5])
force.vertex_attributes(8, 'xy', [-2, -1.5])
force.vertex_attributes(7, 'xy', [-2, -2.5])
# forces in members of top chord and connecting struts are force domain parameters
force.vertices_attribute('is_param', True, keys=[7, 8, 9, 10, 11, 12])
# fix boundary vertices, the nodes of the bottom chord
form.vertices_attribute('is_fixed', True, keys=[0, 1, 2, 3, 4, 5, 6])
# optimize the loadpath and output the optimal distribution of forces that
# results in overall minimum-volumn solution for given form diagram
loadpath.optimise_loadpath(form, force)
# ------------------------------------------------------------------------------
# 4. display force and form diagrams
# ------------------------------------------------------------------------------
viewer = Viewer(form, force, delay_setup=False, figsize=(12, 7.5))
viewer.draw_form(
forcescale=5,
vertexlabel={key: str(key) for key in form.vertices()},
vertexsize=0.2)
viewer.draw_force(
vertexlabel={key: str(key) for key in force.vertices()},
vertexsize=0.2)
viewer.show()
|
<reponame>harsh2602/reads-graphql
const fetch = require('node-fetch');
const util = require('util');
const parseXML = util.promisify(require('xml2js').parseString);
const express = require('express');
const graphqlHTTP = require('express-graphql');
const DataLoader = require('dataloader');
const app = express();
const keys = require('./keys');
// Schema is what contains type and information on how to get that data
const schema = require('./schema');
const fetchAuthor = id =>
fetch(`https://www.goodreads.com/author/show.xml?id=${id}&key=${keys.apiKey}`)
.then(response => response.text())
.then(parseXML);
const fetchBooks = id =>
fetch(`https://www.goodreads.com/book/show/${id}.xml?key=${keys.apiKey}`)
.then(response => response.text())
.then(parseXML);
app.use(
'/graphql',
graphqlHTTP(req => {
// Create loaders on every request
// Author loader
const authorLoader = new DataLoader(keys =>
Promise.all(keys.map(key => fetchAuthor(key)))
);
// Books loader
const bookLoader = new DataLoader(keys =>
Promise.all(keys.map(key => fetchBooks(key)))
);
return {
schema,
context: {
authorLoader,
bookLoader
},
graphiql: true
};
})
);
app.listen(4000);
console.log('Listening on port: 4000');
|
<gh_stars>1-10
package org.cnt.ts.bean;
import org.springframework.stereotype.Component;
import lombok.Data;
/**
* @author lixinjie
* @since 2019-06-25
*/
@Component
@Data
public class Boss {
private String name;
}
|
#!/bin/bash
#set -x
if [[ $# -eq 0 ]] ; then
echo 'Please set the module name like "asr" or "all" to create snmp config'
exit 1
fi
if [ $1 = "all" ]; then
export modules=`ls ../_snmp-ntp-exporter-*|awk -F- '{ print $4}'|awk -F. '{ print $1}'`
else
export modules=`ls ../_snmp-ntp-exporter-*|awk -F- '{ print $4}'|awk -F. '{ print $1}'`
echo $modules |grep -qw $1
if [ $? -eq 0 ] ; then
export modules=$1
else
echo "no module named $1"
exit 1
fi
fi
# cd
#git clone https://github.com/sapcc/helm-charts.git
# cd ./helm-charts/prometheus-exporters/snmp-ntp-exporter/generator/
mv /usr/share/snmp/mibs/CISCO-UNIFIED-COMPUTING-TC-MIB.mib /usr/share/snmp/ # This mib makes other generators fail...
for i in $modules;
do
cp -f ./${i}-generator.yaml ./generator.yml
echo "##############################################"
echo "############### config for ${i} ##############"
echo "##############################################"
if [ $i = "ucs" ]; then # This mib makes other generators fail...
mv /usr/share/snmp/CISCO-UNIFIED-COMPUTING-TC-MIB.mib /usr/share/snmp/mibs/
fi
/gopath/bin/generator generate || exit
if [ $i = "ucs" ]; then # This mib makes other generators fail...
mv /usr/share/snmp/mibs/CISCO-UNIFIED-COMPUTING-TC-MIB.mib /usr/share/snmp/
fi
mv -f ./snmp.yml ./_snmp-ntp-exporter-${i}.yaml.tmp
rm -d ./generator.yml
if test -f "${i}-additional-oids.yaml"; then
awk -v f=$i '{ print; } /walk:/ { system ( "cat "f"-additional-oids.yaml" ) } \' _snmp-ntp-exporter-${i}.yaml.tmp > ../_snmp-ntp-exporter-${i}.yaml
else
mv -f ./_snmp-ntp-exporter-${i}.yaml.tmp ../_snmp-ntp-exporter-${i}.yaml
fi
if [[ "$i" =~ ^(f5mgmt|f5physical|f5customer)$ ]]; then
sed -i "s/- name: /- name: snmp_f5_/g" ../_snmp-ntp-exporter-${i}.yaml
else
sed -i "s/- name: /- name: snmp_${i}_/g" ../_snmp-ntp-exporter-${i}.yaml
fi
if test -f "${i}-additional-metrics.yaml"; then
cat ${i}-additional-metrics.yaml >> ../_snmp-ntp-exporter-${i}.yaml
rm -f ./_snmp-ntp-exporter-${i}.yaml.tmp
fi
done
if grep -q "arista:" ../_snmp-ntp-exporter-arista.yaml; then
sed -i '2d' ../_snmp-ntp-exporter-arista.yaml
fi
if grep -q "n7k:" ../_snmp-ntp-exporter-n7k.yaml; then
sed -i '2d' ../_snmp-ntp-exporter-n7k.yaml
fi
if grep -q "n7k:" ../_snmp-ntp-exporter-n7kcontext.yaml; then
sed -i '2d' ../_snmp-ntp-exporter-n7kcontext.yaml
fi
if grep -q "asw:" ../_snmp-ntp-exporter-arista.yaml; then
sed -i '2d' ../_snmp-ntp-exporter-arista.yaml
fi
|
<reponame>Grasea/Grandroid2<filename>grandroid-ble/src/main/java/com/grasea/grandroid/ble/controller/BleDevice.java
package com.grasea.grandroid.ble.controller;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGattService;
import android.support.annotation.Nullable;
import com.grasea.grandroid.ble.BluetoothLeService;
import com.grasea.grandroid.ble.Config;
import com.grasea.grandroid.ble.data.GattServiceChannelHandler;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by <NAME> on 2016/5/13.
*/
public class BleDevice extends BaseBleDevice {
private ConnectionState state = ConnectionState.Uninitialized;
private BluetoothDevice bluetoothDevice;
private BluetoothLeService bluetoothLeService;
private BleDevice(BluetoothLeService bluetoothLeService, BluetoothDevice bluetoothDevice) {
this.bluetoothLeService = bluetoothLeService;
this.bluetoothDevice = bluetoothDevice;
init();
}
public
@Nullable
GattServiceChannelHandler findService(String serviceUUID) {
return serviceHandlerMap.get(serviceUUID);
}
private void init() {
state = ConnectionState.Disconnected;
}
@Override
public void onGattServicesDiscovered() {
// Show all the supported services and characteristics on the user interface.
serviceHandlerMap.clear();
Config.logd("on GattServicesDiscovered");
List<BluetoothGattService> supportedGattServices = bluetoothLeService.getSupportedGattServices(this);
if (supportedGattServices != null) {
for (BluetoothGattService service : supportedGattServices) {
Config.logi("Add a Service:" + service.getUuid().toString());
serviceHandlerMap.put(service.getUuid().toString(), new GattServiceChannelHandler(this, service));
}
state = ConnectionState.Connecting;
} else {
}
}
@Override
public void onGattServicesConnected() {
}
@Override
public void onGattServicesDisconnected() {
state = ConnectionState.Disconnected;
}
@Override
public ConnectionState getState() {
return state;
}
public boolean isConnecting() {
return state == ConnectionState.Connecting;
}
@Override
public boolean connect() {
if (bluetoothLeService != null) {
bluetoothLeService.connect(this);
return true;
}
return false;
}
public boolean connect(int delay, boolean autoConnect) {
if (bluetoothLeService != null) {
bluetoothLeService.connect(this, delay, autoConnect);
return true;
}
return false;
}
@Override
public void disconnect() {
if (bluetoothLeService != null) {
bluetoothLeService.disconnect(this);
}
}
@Override
public boolean send(String serviceUUID, String channelUUID, String protocol) {
return findService(serviceUUID).getChannel(channelUUID).send(protocol);
}
@Override
public boolean send(String serviceUUID, String channelUUID, byte[] protocol) {
return findService(serviceUUID).getChannel(channelUUID).send(protocol);
}
@Override
public String getAddress() {
return bluetoothDevice.getAddress();
}
@Override
public String getName() {
return bluetoothDevice.getName();
}
@Override
public BluetoothDevice getBluetoothDevice() {
return bluetoothDevice;
}
@Override
public BluetoothLeService getBluetoothLeService() {
return bluetoothLeService;
}
public static class Builder {
private BluetoothDevice device;
private BluetoothLeService bluetoothLeService;
public Builder(BluetoothLeService bluetoothLeService) {
this.bluetoothLeService = bluetoothLeService;
}
public Builder setDevice(BluetoothDevice device) {
this.device = device;
return this;
}
public BleDevice build() {
return new BleDevice(bluetoothLeService, device);
}
}
public enum ConnectionState {
Uninitialized, Disconnected, Connecting
}
}
|
#!/usr/bin/bash
set -e
HERE=$( dirname -- "$(readlink -f -- "${0}")" ) #"
mnet="10.0.0"
ip_file="last_ip"
if type wg &>/dev/null ; then
pre=$(wg genpsk)
else
unset pre
fi
pre_line="PresharedKey = $pre"
c_template="0client_template.conf"
srv_conf="../wg0-server.conf"
# Client Network - to be added into cluster config.
c_net="${mnet}.0/24"
# Client EndPoint - to be added into cluster config.
wg_srvip="1.1.1.1:5128"
function check_template(){
# Too late for better solution...tiered now...go to sllep
set +e
grep 'Endpoint = $\|PublicKey = $\|Endpoint = 1.1.1.1:5128$' "${c_template}" &>/dev/null
ret=$?
if [ "$ret" -eq 0 ] ;then
echo "Client config not configured...exiting."
echo "Please fill-in server's: PublicKey and Endpoint, do not use default 1.1.1.1:5128"
echo "Note: to change Endpoint, change the variable srv_conf in this file, and $c_template"
exit 1
fi
set -e
}
function check_srv_conf(){
if [ ! -f "${srv_conf}" ] ;then
echo "Server config Not found exiting"
exit 1
fi
}
function valid_ip(){
## Check if the passed variable is a valid v4 IP.
## Returns 0 True or 1 False.
local ip=$1
local stat=1
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
OIFS=$IFS
IFS='.'
ip=($ip)
IFS=$OIFS
[[ ${ip[0]} -le 255 && ${ip[1]} -le 255 \
&& ${ip[2]} -le 255 && ${ip[3]} -le 255 ]]
stat=$?
fi
return $stat
}
### MAIN ###
sed -i "s|@@@SRVEDNPOINT@@@|${wg_srvip}|" "${c_template}"
check_srv_conf
check_template
# Init the IP File, if not present
if [ ! -f last_ip ];then
echo $mnet.64 > "$ip_file"
fi
addr=$(cat "$ip_file" )
# Extract last octet of IP.
cn="${addr##*.}"
# we need last octet plus one for next run, if this success.
(( cn++ ))
# Check if we got a good IP.
if [ -n "${addr}" ];then
valid_ip "${addr}" || ( echo Address is Not Valid, so we wont change config file - check manuall y)
fi
cd $HERE
# Check number of argumets..or die
[ -z $1 ] && ( echo You should add username ; echo "Usage: $(basename $0) \"client_name\"" ;echo; exit 1 )
musr="$1"
[ -f "${musr}.key" ] && ( echo this user: \"${musr}\" is already configured - please try onther or clean-up ; exit 1 )
if [ -f ./go.sh ] ;then
./go.sh "${musr}" || ( echo Something went wrong. Check mannally ; exit 1 )
else
echo Script for generating keys is not presented...quiting.
exit 1
fi
if [ -f "${musr}".key ];then
client_priv=$(cat "${musr}".key)
client_pub=$(cat "${musr}".pub)
else
echo Client key not found
fi
if [ -f "${c_template}" ] ;then
cp "${c_template}" "${musr}".conf
else
echo Client template not found
exit 1
fi
sed -i "s|@@@client@|${musr}|" "${musr}".conf
sed -i "s|@@@ADDR@@@|${addr}|" "${musr}".conf
sed -i "s|@@@PRIVATEKEY@@@|${client_priv}|" "${musr}".conf
sed -i "s|@@@PRESHARED@@@|${pre}|" "${musr}".conf
if [ -n "${srv_conf}" ] ;then
cat >> "${srv_conf}" <<EOF
[Peer] # ${musr}
PublicKey = ${client_pub}
${pre_line}
AllowedIPs = ${addr}
EOF
fi
# If we are here - then all is OK, and we can write next calculated IP.
echo ${addr%.*}.${cn} > "$ip_file"
echo "Client config is "${musr}".conf. You visualize it with:"
echo "qrencode -lL -t ANSIutf8 < "${musr}".conf"
echo "Or... qrencode -lL -t PNG < "${musr}".conf -o "${musr}".png"
[ -n "${srv_conf}" ] && echo "It shuold be added to server confing in: "${srv_conf}""
|
import {
ApiOrganization,
ApiOrganizationRequest
} from '../Shared/httpClient/apiTypes';
import {
OrganizationEntity,
MemberEntity,
DEFAULT_ORGANIZATION
} from './state';
export const mapApiOrganizationToOrganizationEntity = (
apiOrganization: ApiOrganization
): OrganizationEntity => ({
id: apiOrganization.id,
name: apiOrganization.name,
slug: apiOrganization.slug,
members: apiOrganization.members || []
});
export const mapMemberEntityToApiOrganizationMember = (
member: MemberEntity
) => ({
username: member.username
});
export const mapOrganizationEntityToApiOrganizationRequest = (
organization: OrganizationEntity
): ApiOrganizationRequest => ({
organization: {
id: organization.id,
name: organization.name,
slug: organization.slug,
members:
organization.members.length > 0
? organization.members.map(mapMemberEntityToApiOrganizationMember)
: undefined
}
});
export const buildNewOrganizationWithMember = () => {
const currentUsername = localStorage.getItem('username') || '';
return {
...DEFAULT_ORGANIZATION,
members: [{ username: currentUsername }]
};
};
|
<filename>src/br/com/papyrus/model/ModelAcervoDAO.java
package br.com.papyrus.model;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
/**
* Classe que acessa os dados, fazendo um CRUD para Acervo
*
* @author <NAME>
*/
public class ModelAcervoDAO {
public static String edVO;
/**
* Método responsável pelo acesso a dados, inserindo registros em Acervo,
* chamado pelo AbstractTableModel
*
* @param Acervo os dados a serem salvos no registro para Acervo
* @return Retorna true se conseguiu inserir o registro e false caso dê
* algum problema
*/
public boolean inserirAcervo(ModelAcervoVO Acervo) {
try {
Connection conn = CriarConexao.abrirConexao();
String SQL = "INSERT INTO Acervo (Titulo,SubTitulo,Serie,Idioma,"
+ "Exemplar,Edicao,Paginas,Volume,Ano,Aquisicao,Local,"
+ "Editoras_Id,Classificacao_Id,Tipos_Id,Tombo,CDU,CDD,"
+ "CUTTER,ISBN,Observacoes,Disponivel, Autores_Id) "
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
PreparedStatement pstm = conn.prepareStatement(SQL);
pstm.setString(1, Acervo.getTitulo());
pstm.setString(2, Acervo.getSubTitulo());
pstm.setString(3, Acervo.getSerie());
pstm.setString(4, Acervo.getIdioma());
pstm.setInt(5, Acervo.getExemplar());
pstm.setString(6, Acervo.getEdicao());
pstm.setInt(7, Acervo.getPaginas());
pstm.setInt(8, Acervo.getVolume());
pstm.setInt(9, Acervo.getAno());
pstm.setString(10, Acervo.getAquisicao());
pstm.setString(11, Acervo.getLocal());
pstm.setInt(12, Acervo.getEditoras_Id());
pstm.setInt(13, Acervo.getClassificacao_Id());
pstm.setInt(14, Acervo.getTipos_Id());
pstm.setString(15, Acervo.getTombo());
pstm.setString(16, Acervo.getCDU());
pstm.setString(17, Acervo.getCDD());
pstm.setString(18, Acervo.getCUTTER());
pstm.setString(19, Acervo.getISBN());
pstm.setString(20, Acervo.getObservacoes());
pstm.setString(21, Acervo.getDisponivel());
pstm.setInt(22, Acervo.getAutores_Id());
pstm.execute();
} catch (ClassNotFoundException | SQLException ex) {
ex.printStackTrace();
return false;
}
return true;
}
/**
* Método responsável pelo acesso a dados, alterando registros em Acervo,
* chamado pelo AbstractTableModel
*
* @param Acervo os dados a serem alterados no registro para Acervo
* @return Retorna true se conseguiu alterar o registro e false caso dê
* algum problema
*/
public boolean alterarAcervo(ModelAcervoVO Acervo) {
try {
Connection conn = CriarConexao.abrirConexao();
String SQL = "UPDATE Acervo SET Titulo=?,SubTitulo=?,Serie=?,"
+ "Idioma=?,Exemplar=?,Edicao=?,Paginas=?,Volume=?,Ano=?,"
+ "Aquisicao=?,Local=?,Editoras_Id=?,Classificacao_Id=?,"
+ "Tipos_Id=?,Tombo=?,CDU=?,CDD=?,CUTTER=?,ISBN=?,"
+ "Observacoes=?,Disponivel=?, Autores_Id=? WHERE id = ?";
PreparedStatement pstm = conn.prepareStatement(SQL);
pstm.setString(1, Acervo.getTitulo());
pstm.setString(2, Acervo.getSubTitulo());
pstm.setString(3, Acervo.getSerie());
pstm.setString(4, Acervo.getIdioma());
pstm.setInt(5, Acervo.getExemplar());
pstm.setString(6, Acervo.getEdicao());
pstm.setInt(7, Acervo.getPaginas());
pstm.setInt(8, Acervo.getVolume());
pstm.setInt(9, Acervo.getAno());
pstm.setString(10, Acervo.getAquisicao());
pstm.setString(11, Acervo.getLocal());
pstm.setInt(12, Acervo.getEditoras_Id());
pstm.setInt(13, Acervo.getClassificacao_Id());
pstm.setInt(14, Acervo.getTipos_Id());
pstm.setString(15, Acervo.getTombo());
pstm.setString(16, Acervo.getCDU());
pstm.setString(17, Acervo.getCDD());
pstm.setString(18, Acervo.getCUTTER());
pstm.setString(19, Acervo.getISBN());
pstm.setString(20, Acervo.getObservacoes());
pstm.setString(21, Acervo.getDisponivel());
pstm.setInt(22, Acervo.getAutores_Id());
pstm.setInt(23, Acervo.getId());
pstm.execute();
} catch (ClassNotFoundException | SQLException ex) {
ex.printStackTrace();
return false;
}
return true;
}
/**
* Método responsável pelo acesso a dados, excluindo registros em Acervo,
* chamado pelo AbstractTableModel
*
* @param Acervo os dados a serem excluidos no registro para Acervo
* @return Retorna true se conseguiu excluir o registro e false caso dê
* algum problema
*/
public boolean excluirAcervo(ModelAcervoVO Acervo) {
try {
Connection conn = CriarConexao.abrirConexao();
String SQL = "DELETE FROM Acervo WHERE Id = ?";
PreparedStatement pstm = conn.prepareStatement(SQL);
pstm.setInt(1, Acervo.getId());
pstm.execute();
} catch (ClassNotFoundException | SQLException ex) {
ex.printStackTrace();
return false;
}
return true;
}
/**
* Método responsável pelo acesso a dados, listando os registros de Acervo,
* para um ArrayList para ser carregado no AbstractTableModel
*
* @return listaRetorno Retorna um ArrayList com os registros de Acervo.
*/
public List<ModelAcervoVO> listarAcervo() {
List<ModelAcervoVO> listaRetorno = new ArrayList<ModelAcervoVO>();
try {
Connection conn = CriarConexao.abrirConexao();
String SQL = "SELECT a.Id, a.Titulo, a.Subtitulo, a.Serie, a.Idioma,"
+ " a.Exemplar, a.Edicao, a.Paginas, a.Volume, a.Ano, a.Aquisicao,"
+ " a.Local, a.Editoras_Id, a.Classificacao_Id, a.Tipos_Id,"
+ " a.Tombo, a.CDU, a.CDD, a.CUTTER, a.ISBN, a.Observacoes,"
+ " a.Disponivel, a.Autores_Id, e.Nome AS NomeEditoras,"
+ " c.Nome AS NomeClassificacoes, t.Nome AS NomeTipos,"
+ " au.Id, au.Nome AS AutoresNome "
+ " FROM acervo a"
+ " JOIN editoras e ON e.Id = a.Editoras_Id"
+ " JOIN tipos t ON t.Id = a.Tipos_Id"
+ " JOIN classificacoes c ON c.Id = a.Classificacao_Id"
+ " JOIN autores au ON au.Id = a.Autores_Id;";
Statement stm = conn.createStatement();
ResultSet rs = stm.executeQuery(SQL);
while (rs.next()) {
ModelAcervoVO acervoVO = new ModelAcervoVO(); //Instancia um novo objeto
acervoVO.setId(rs.getInt("Id"));
acervoVO.setTitulo(rs.getString("Titulo"));
acervoVO.setSubTitulo(rs.getString("SubTitulo"));
acervoVO.setSerie(rs.getString("Serie"));
acervoVO.setIdioma(rs.getString("Idioma"));
acervoVO.setExemplar(rs.getInt("Exemplar"));
acervoVO.setEdicao(rs.getString("Edicao"));
acervoVO.setPaginas(rs.getInt("Paginas"));
acervoVO.setVolume(rs.getInt("Volume"));
acervoVO.setAno(rs.getInt("Ano"));
acervoVO.setAquisicao(rs.getString("Aquisicao"));
acervoVO.setLocal(rs.getString("Local"));
acervoVO.setEditoras_Id(rs.getInt("Editoras_Id"));
acervoVO.setClassificacao_Id(rs.getInt("Classificacao_Id"));
acervoVO.setTipos_Id(rs.getInt("Tipos_Id"));
acervoVO.setTombo(rs.getString("Tombo"));
acervoVO.setCDU(rs.getString("CDU"));
acervoVO.setCDD(rs.getString("CDD"));
acervoVO.setCUTTER(rs.getString("CUTTER"));
acervoVO.setISBN(rs.getString("ISBN"));
acervoVO.setObservacoes(rs.getString("Observacoes"));
acervoVO.setDisponivel(rs.getString("Disponivel"));
acervoVO.setNomeEditoras(rs.getString("NomeEditoras"));
acervoVO.setNomeClassificacoes(rs.getString("NomeClassificacoes"));
acervoVO.setNomeTipos(rs.getString("NomeTipos"));
acervoVO.setAutores_Id(rs.getInt("Autores_Id"));
acervoVO.setAutoresNome(rs.getString("AutoresNome"));
listaRetorno.add(acervoVO);
}
} catch (ClassNotFoundException | SQLException ex) {
ex.printStackTrace();
return null;
}
return listaRetorno;
}
}
|
<gh_stars>1-10
export { default as Welcome } from './Welcome';
export { default as WelcomeCtrl } from './WelcomeCtrl';
|
<reponame>gfrodriguez/yii2-ckeditor<filename>assets/ckeditor/plugins/quicktable/Gruntfile.js<gh_stars>0
/* jshint node: true */
var fs = require('fs');
module.exports = function (grunt) {
"use strict";
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
jshint: {
all: [
"lang/*.js",
"*.js",
"*.json"
],
options: {
jshintrc: '.jshintrc'
}
},
lint5: {
dirPath: "samples",
templates: [
"quicktable.html"
]
//,
// ignoreList: [
// ]
},
compress: {
main: {
options: {
archive: 'release/<%= pkg.name %>-<%= pkg.version %>.zip',
level: 9,
pretty: true
},
files: [
{
src: [
'**',
// Exclude files and folders
'!node_modules/**',
'!release/**',
'!.*',
'!*.log',
'!Gruntfile.js',
'!package.json',
'!LICENSE',
'!*.md',
'!template.jst',
'!*.zip'
],
dest: '<%= pkg.name %>/'
}
]
}
},
markdown: {
all: {
files: [
{
expand: true,
src: '*.md',
dest: 'release/docs/',
ext: '.html'
}
],
options: {
template: 'template.jst',
//preCompile: function(src, context) {},
//postCompile: function(src, context) {},
//templateContext: {},
markdownOptions: {
gfm: true,
highlight: 'manual'
}
}
}
}
});
function replaceContent(file, searchArray) {
fs.readFile(file, 'utf8', function (err,data) {
if (err) {
return grunt.log.writeln(err);
}
var result = data;
for (var i = 0; i < searchArray.length;i++){
result = result.replace(searchArray[i][0], searchArray[i][1]);
}
fs.writeFile(file, result, 'utf8', function (err) {
if (err) {
return grunt.log.writeln(err);
}
});
});
}
grunt.loadNpmTasks('grunt-markdown');
grunt.loadNpmTasks('grunt-contrib-compress');
grunt.loadNpmTasks('grunt-lint5');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.registerTask('test', ['jshint', 'lint5']);
grunt.registerTask('build-only', ['beforeCompress', 'compress', 'afterCompress']);
grunt.registerTask('build', ['test', 'beforeCompress', 'compress', 'afterCompress', 'markdown']);
grunt.registerTask('default', ['test']);
//Custom tasks
grunt.registerTask('beforeCompress', 'Running before Compression', function() {
replaceContent('samples/quicktable.html', [
[/http\:\/\/cdn.ckeditor.com\/4.4.3\/full-all\//g, '../../../'],
[/language: 'en'/g, '// language: \'en\''],
[/<!-- REMOVE BEGIN -->/g, '<!-- REMOVE BEGIN --><!--']
]);
});
grunt.registerTask('afterCompress', 'Running after Compression', function() {
replaceContent('samples/quicktable.html', [
[/\.\.\/\.\.\/\.\.\//g, 'http://cdn.ckeditor.com/4.4.3/full-all/'],
[/\/\/ language: 'en'/g, 'language: \'en\''],
[/<!-- REMOVE BEGIN --><!--/g, '<!-- REMOVE BEGIN -->']
]);
});
};
|
"use strict";
const fromPairs = require("lodash/fromPairs");
const pick = require("lodash/pick");
// eslint-disable-next-line no-restricted-modules
const prettier = require("../index");
const {
optionsModule,
optionsNormalizer: { normalizeCliOptions },
utils: { arrayify },
} = require("./prettier-internal");
const minimist = require("./minimist");
const constant = require("./constant");
const {
createDetailedOptionMap,
normalizeDetailedOptionMap,
} = require("./option-map");
const createMinimistOptions = require("./create-minimist-options");
/**
* @typedef {Object} Context
* @property logger
* @property {string[]} rawArguments
* @property argv
* @property {string[]} filePatterns
* @property {any[]} supportOptions
* @property detailedOptions
* @property detailedOptionMap
* @property apiDefaultOptions
* @property languages
* @property {Partial<Context>[]} stack
* @property pushContextPlugins
* @property popContextPlugins
*/
class Context {
constructor({ rawArguments, logger }) {
this.rawArguments = rawArguments;
this.logger = logger;
this.stack = [];
const {
plugin: plugins,
"plugin-search-dir": pluginSearchDirs,
} = parseArgvWithoutPlugins(
rawArguments,
["plugin", "plugin-search-dir"],
logger
);
this.pushContextPlugins(plugins, pluginSearchDirs);
const argv = parseArgv(
rawArguments,
this.detailedOptions,
undefined,
logger
);
this.argv = argv;
this.filePatterns = argv._.map((file) => String(file));
}
/**
* @param {string[]} plugins
* @param {string[]=} pluginSearchDirs
*/
pushContextPlugins(plugins, pluginSearchDirs) {
this.stack.push(
pick(this, [
"supportOptions",
"detailedOptions",
"detailedOptionMap",
"apiDefaultOptions",
"languages",
])
);
Object.assign(this, getContextOptions(plugins, pluginSearchDirs));
}
popContextPlugins() {
Object.assign(this, this.stack.pop());
}
}
function getContextOptions(plugins, pluginSearchDirs) {
const { options: supportOptions, languages } = prettier.getSupportInfo({
showDeprecated: true,
showUnreleased: true,
showInternal: true,
plugins,
pluginSearchDirs,
});
const detailedOptionMap = normalizeDetailedOptionMap({
...createDetailedOptionMap(supportOptions),
...constant.options,
});
const detailedOptions = arrayify(detailedOptionMap, "name");
const apiDefaultOptions = {
...optionsModule.hiddenDefaults,
...fromPairs(
supportOptions
.filter(({ deprecated }) => !deprecated)
.map((option) => [option.name, option.default])
),
};
return {
supportOptions,
detailedOptions,
detailedOptionMap,
apiDefaultOptions,
languages,
};
}
function parseArgv(rawArguments, detailedOptions, keys, logger) {
const minimistOptions = createMinimistOptions(detailedOptions);
let argv = minimist(rawArguments, minimistOptions);
if (keys) {
detailedOptions = detailedOptions.filter((option) =>
keys.includes(option.name)
);
argv = pick(argv, keys);
}
return normalizeCliOptions(argv, detailedOptions, { logger });
}
const detailedOptionsWithoutPlugins = getContextOptions().detailedOptions;
function parseArgvWithoutPlugins(rawArguments, keys, logger) {
return parseArgv(
rawArguments,
detailedOptionsWithoutPlugins,
typeof keys === "string" ? [keys] : keys,
logger
);
}
module.exports = { Context, parseArgvWithoutPlugins };
|
#!/bin/bash
#SBATCH -J Act_cube_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py cube 122 Nadam 3 0.1939530587462125 0.0021589139413375645 runiform 0.3
|
import { buildRouteTree } from 'shared/helpers/buildRouteTree';
export const routes = buildRouteTree({
account: {
'profile': null,
'security': null,
'api-keys': null,
},
});
|
<gh_stars>1-10
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.selenium.page.qpool;
import java.util.List;
import org.olat.selenium.page.graphene.OOGraphene;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.Select;
/**
*
* Initial date: 23 sept. 2019<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class QuestionMetadataPage {
private final WebDriver browser;
private QuestionMetadataPage(WebDriver browser) {
this.browser = browser;
}
public static QuestionMetadataPage getPage(WebDriver browser) {
return new QuestionMetadataPage(browser);
}
public QuestionMetadataPage openGeneral() {
return openMetadata("o_sel_qpool_metadata_general");
}
public QuestionMetadataPage openItemAnalyse() {
return openMetadata("o_sel_qpool_metadata_item_analyse");
}
private QuestionMetadataPage openMetadata(String panelClass) {
By hrefBy = By.cssSelector("div." + panelClass + ">div>h4>a");
OOGraphene.waitElement(hrefBy, browser);
By panelInBy = By.cssSelector("div." + panelClass + " div.panel-collapse.collapse.in");
List<WebElement> panelInEls = browser.findElements(panelInBy);
if(panelInEls.isEmpty()) {
browser.findElement(hrefBy).click();
OOGraphene.waitElement(panelInBy, browser);
OOGraphene.waitingALittleLonger();// wait the accordion opens up
}
By formBy = By.cssSelector("div." + panelClass + " div.panel-body fieldset.o_form");
OOGraphene.waitElement(formBy, browser);
return this;
}
public QuestionMetadataPage setGeneralMetadata(String topic, String taxonomy, String level,
String keywords, String additionalInfos, String coverage, String assessmentType) {
if(topic != null) {
By topicBy = By.cssSelector("div.o_sel_qpool_metadata_topic input[type='text']");
browser.findElement(topicBy).sendKeys(topic);
}
if(taxonomy != null) {
By taxonomyBy = By.cssSelector("div.o_sel_qpool_metadata_taxonomy select");
new Select(browser.findElement(taxonomyBy)).selectByVisibleText(taxonomy);
}
if(level != null) {
By levelBy = By.cssSelector("div.o_sel_qpool_metadata_context select");
new Select(browser.findElement(levelBy)).selectByVisibleText(level);
}
if(keywords != null) {
By keywordsBy = By.cssSelector("div.o_sel_qpool_metadata_keywords input[type='text']");
browser.findElement(keywordsBy).sendKeys(keywords);
}
if(additionalInfos != null) {
By additionalInfosBy = By.cssSelector("div.o_sel_qpool_metadata_add_infos input[type='text']");
browser.findElement(additionalInfosBy).sendKeys(additionalInfos);
}
if(coverage != null) {
By coverageBy = By.cssSelector("div.o_sel_qpool_metadata_coverage input[type='text']");
browser.findElement(coverageBy).sendKeys(coverage);
}
if(assessmentType != null) {
By assessmentTypeBy = By.cssSelector("div.o_sel_qpool_metadata_assessment_type select");
new Select(browser.findElement(assessmentTypeBy)).selectByValue(assessmentType);
}
return this;
}
public QuestionMetadataPage setLearningTime(Integer days, Integer hours, Integer minutes, Integer seconds) {
if(days != null) {
By dayBy = By.cssSelector("div.o_sel_qpool_metadata_item_analyse input.o_sel_learning_time_d[type='text']");
WebElement dayEl = browser.findElement(dayBy);
dayEl.clear();
dayEl.sendKeys(days.toString());
}
if(hours != null) {
By hourBy = By.cssSelector("div.o_sel_qpool_metadata_item_analyse input.o_sel_learning_time_H[type='text']");
WebElement hourEl = browser.findElement(hourBy);
hourEl.clear();
hourEl.sendKeys(hours.toString());
}
if(minutes != null) {
By minuteBy = By.cssSelector("div.o_sel_qpool_metadata_item_analyse input.o_sel_learning_time_m[type='text']");
WebElement minuteEl = browser.findElement(minuteBy);
minuteEl.clear();
minuteEl.sendKeys(minutes.toString());
}
if(seconds != null) {
By secondBy = By.cssSelector("div.o_sel_qpool_metadata_item_analyse input.o_sel_learning_time_s[type='text']");
WebElement secondEl = browser.findElement(secondBy);
secondEl.clear();
secondEl.sendKeys(seconds.toString());
}
return this;
}
/**
*
* @param difficulty Value between 0.0 and 1.0
* @param standardDeviation Value between 0.0 and 1.0
* @param discriminationIndex Value between -1.0 and 1.0
* @param distractors The number of distractors
* @param usage Number of times this questions is used
* @param correctionTime Time in minutes to correction the question
* @return Itself
*/
public QuestionMetadataPage setItemAnalyse(Double difficulty, Double standardDeviation,
Double discriminationIndex, Integer distractors, Integer usage, Integer correctionTime) {
if(difficulty != null) {
By difficultyBy = By.cssSelector(".o_sel_qpool_metadata_item_analyse .o_sel_difficulty input[type='text']");
browser.findElement(difficultyBy).sendKeys(difficulty.toString());
}
if(standardDeviation != null) {
By deviationBy = By.cssSelector(".o_sel_qpool_metadata_item_analyse .o_sel_std_dev_difficulty input[type='text']");
browser.findElement(deviationBy).sendKeys(standardDeviation.toString());
}
if(discriminationIndex != null) {
By discriminationBy = By.cssSelector(".o_sel_qpool_metadata_item_analyse .o_sel_std_differentation input[type='text']");
browser.findElement(discriminationBy).sendKeys(discriminationIndex.toString());
}
if(distractors != null) {
By distractorsBy = By.cssSelector(".o_sel_qpool_metadata_item_analyse .o_sel_distractors input[type='text']");
WebElement distractorsEl = browser.findElement(distractorsBy);
distractorsEl.clear();
distractorsEl.sendKeys(distractors.toString());
}
if(usage != null) {
By usageBy = By.cssSelector(".o_sel_qpool_metadata_item_analyse .o_sel_usage input[type='text']");
WebElement usageEl = browser.findElement(usageBy);
usageEl.clear();
usageEl.sendKeys(usage.toString());
}
if(correctionTime != null) {
By correctionTimeBy = By.cssSelector(".o_sel_qpool_metadata_item_analyse .o_sel_correction_time input[type='text']");
browser.findElement(correctionTimeBy).sendKeys(correctionTime.toString());
}
return this;
}
public QuestionMetadataPage assertTopic(String topic) {
By topicBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_topic')]//input[@value='" + topic + "']");
OOGraphene.waitElement(topicBy, browser);
return this;
}
public QuestionMetadataPage assertTaxonomy(String taxonomy) {
By taxonomyBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_taxonomy')]//select/option[@selected='selected'][text()[contains(.,'" + taxonomy + "')]]");
OOGraphene.waitElement(taxonomyBy, browser);
return this;
}
public QuestionMetadataPage assertLevel(String level) {
By levelBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_context')]//select/option[@selected='selected'][text()[contains(.,'" + level + "')]]");
OOGraphene.waitElement(levelBy, browser);
return this;
}
public QuestionMetadataPage assertKeywords(String keywords) {
By keywordsBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_keywords')]//input[@value='" + keywords + "']");
OOGraphene.waitElement(keywordsBy, browser);
return this;
}
public QuestionMetadataPage assertCoverage(String coverage) {
By coverageBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_coverage')]//input[@value='" + coverage + "']");
OOGraphene.waitElement(coverageBy, browser);
return this;
}
public QuestionMetadataPage assertAdditionalInfos(String infos) {
By infosBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_add_infos')]//input[@value='" + infos + "']");
OOGraphene.waitElement(infosBy, browser);
return this;
}
public QuestionMetadataPage assertAssessmentType(String assessmentType) {
By assessmentTypeBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_assessment_type')]//select/option[@selected='selected'][@value='" + assessmentType + "']");
OOGraphene.waitElement(assessmentTypeBy, browser);
return this;
}
public QuestionMetadataPage assertLearningTime(Integer days, Integer hours, Integer minutes, Integer seconds) {
if(days != null) {
By dayBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_item_analyse')]//input[@value='" + days + "'][contains(@class,'o_sel_learning_time_d')]");
OOGraphene.waitElement(dayBy, browser);
}
if(hours != null) {
By hourBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_item_analyse')]//input[@value='" + hours + "'][contains(@class,'o_sel_learning_time_H')]");
OOGraphene.waitElement(hourBy, browser);
}
if(minutes != null) {
By minuteBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_item_analyse')]//input[@value='" + minutes + "'][contains(@class,'o_sel_learning_time_m')]");
OOGraphene.waitElement(minuteBy, browser);
}
if(seconds != null) {
By secondBy = By.xpath("//div[contains(@class,'o_sel_qpool_metadata_item_analyse')]//input[@value='" + seconds + "'][contains(@class,'o_sel_learning_time_s')]");
OOGraphene.waitElement(secondBy, browser);
}
return this;
}
public QuestionMetadataPage assertDifficulty(Double diffculty) {
By difficultyBy = By.xpath("//div[contains(@class,'o_sel_difficulty')]//input[@value='" + diffculty + "']");
OOGraphene.waitElement(difficultyBy, browser);
return this;
}
public QuestionMetadataPage assertDiscriminationIndex(Double discriminationIndex) {
By discriminationIndexBy = By.xpath("//div[contains(@class,'o_sel_std_differentation')]//input[@value='" + discriminationIndex + "']");
OOGraphene.waitElement(discriminationIndexBy, browser);
return this;
}
public QuestionMetadataPage assertStandardDeviation(Double stdDeviation) {
By deviationBy = By.xpath("//div[contains(@class,'o_sel_std_dev_difficulty')]//input[@value='" + stdDeviation + "']");
OOGraphene.waitElement(deviationBy, browser);
return this;
}
public QuestionMetadataPage assertDistractors(Integer distractors) {
By distractorsBy = By.xpath("//div[contains(@class,'o_sel_distractors')]//input[@value='" + distractors + "']");
OOGraphene.waitElement(distractorsBy, browser);
return this;
}
public QuestionMetadataPage assertUsage(Integer usage) {
By usageBy = By.xpath("//div[contains(@class,'o_sel_usage')]//input[@value='" + usage + "']");
OOGraphene.waitElement(usageBy, browser);
return this;
}
public QuestionMetadataPage assertCorrectionTime(Integer timeInMinutes) {
By correctionTimeBy = By.xpath("//div[contains(@class,'o_sel_correction_time')]//input[@value='" + timeInMinutes + "']");
OOGraphene.waitElement(correctionTimeBy, browser);
return this;
}
public QuestionMetadataPage saveGeneralMetadata() {
return saveMetadata("o_sel_qpool_metadata_general");
}
public QuestionMetadataPage saveItemAnalyse() {
return saveMetadata("o_sel_qpool_metadata_item_analyse");
}
private QuestionMetadataPage saveMetadata(String panelClass) {
By buttonsBy = By.cssSelector("div." + panelClass + " div.panel-body div.o_sel_qpool_metadata_buttons");
OOGraphene.moveTo(buttonsBy, browser);
By saveBy = By.cssSelector("div." + panelClass + " div.panel-body div.o_sel_qpool_metadata_buttons button.btn.btn-primary");
browser.findElement(saveBy).click();
OOGraphene.waitBusy(browser);
OOGraphene.moveTop(browser);
return this;
}
}
|
import Command from '../../base'
import makeMigration from '../../helpers/makeMigration'
export default class MakeMigration extends Command {
static description = 'maker migration helper'
static examples = ['$ firelayer make:migration create_posts']
static args = [{ name: 'name', required: true }]
async run() {
const { args } = this.parse(MakeMigration)
const { name } = args
await makeMigration(name)
}
}
|
#!/bin/sh
set -e
DOMAINS="kivy.org www.kivy.org wiki.kivy.org blog.kivy.org pw.kivy.org dba.kivy.org chat.kivy.org"
FIRST_DOMAIN=$(echo $DOMAINS | { read first _; echo $first; })
KEYPATH=/web/tls/data/certs/$FIRST_DOMAIN/domain.key
FULLCHAINPATH=/web/tls/data/certs/$FIRST_DOMAIN/chain.crt
if [ ! -f $KEYPATH ]; then
mkdir -p /web/tls/data/certs/$FIRST_DOMAIN
/usr/local/bin/acme.sh --home /web/tls/data $(printf -- " -d %s" $DOMAINS) \
--standalone --issue --force --keypath $KEYPATH --fullchainpath $FULLCHAINPATH
openssl dhparam -out /web/tls/data/certs/$FIRST_DOMAIN/dhparam.pem 2048
else
if pgrep "nginx" > /dev/null; then
/usr/local/bin/acme.sh --home /web/tls/data --webroot /web/tls --renewAll \
--stopRenewOnError --keypath $KEYPATH --fullchainpath $FULLCHAINPATH \
--reloadcmd "/usr/sbin/nginx -s reload"
else
/usr/local/bin/acme.sh --home /web/tls/data --standalone --renewAll \
--stopRenewOnError --keypath $KEYPATH --fullchainpath $FULLCHAINPATH
fi
fi
|
#!/bin/bash
#Get regions with zero coverage in the reference genomes
#bash getholes.sh genomeIDSlist fasta
#$1 database fasta id
#$2 dataset fasta id
#1. Map to reference genome
#smalt index -k 10 -s 5 $1 $1.fasta
#smalt map -x -y 0.96 -n 20 -f samsoft -o map_$1_vs_$2.samsoft $1 $2.fasta
#2. Create mpileup files with coverage
samtools view -b -T $1.fasta map_$1_vs_$2.samsoft -o map_$1_vs_$2.bam
samtools sort -O bam -T toto map_$1_vs_$2.bam -o sorted_map_$1_vs_$2.bam
samtools mpileup -d 10000000 -a --reference $1.fasta sorted_map_$1_vs_$2.bam -o mpileup_sorted_map_$1_vs_$2.tab
#3. Get table of regions with holes
#4. Get fasta of holes regions
|
<reponame>npmcomponent/tower-resource
var series = require('part-async-series');
if ('undefined' === typeof window) {
var resource = require('..');
var assert = require('assert');
} else {
var resource = require('tower-resource');
var assert = require('timoxley-assert');
}
require('tower-memory-adapter');
describe('resource', function(){
beforeEach(resource.clear);
it('should define', function(){
var calls = 0;
var DefinedResource;
resource.on('define', function(m){
calls++;
DefinedResource = m;
});
var Post = resource('post')
.attr('title')
.attr('body');
assert(1 === calls);
assert(Post === DefinedResource);
// assert(2 === Post.attrs.length);
});
it('should validate/save/query', function(done){
var calls = [];
resource('user')
.validate(function(context, next){
calls.push('validate1');
next();
})
.validate(function(context){
calls.push('validate2');
});
resource('user').create(function(){
assert(2 === calls.length);
assert('validate1' === calls[0]);
assert('validate2' === calls[1]);
resource('user').find(function(err, records){
assert(1 === records.length);
done();
});
});
});
describe('attrs', function(){
it('should get/set', function(){
var calls = [];
resource('user')
.attr('email');
var user = resource('user').init();
assert(undefined === user.email());
user.on('change email', function(curr, prev){
calls.push([curr, prev]);
});
resource('user')
.on('change email', function(record, curr, prev){
calls.push([record, curr, prev]);
});
user.email('<EMAIL>');
assert('<EMAIL>' === user.get('email'));
assert('<EMAIL>' === user.email());
assert.deepEqual([user, '<EMAIL>', undefined], calls[0]);
assert.deepEqual(['<EMAIL>', undefined], calls[1]);
assert.deepEqual(user.attrs, user.dirty);
});
it('should set default attributes on init', function(){
// XXX: is there a more optimized way than this?
// thinking that it's more optimized _not_ to do lazy
// evaluation here, b/c everything (adapters, templates/scopes, etc.)
// will constantly be lazily evaluating.
// if we can assume that the attributes are set, then
// in those cases it can just grab `.attrs`, which is much more optimized.
resource('todo')
.attr('title', 'string')
.attr('completed', 'boolean', false);
var todo = resource('todo').init();
assert(false === todo.attrs.completed);
});
it('should not allow setting non-declared attrs', function(){
resource('non-declared');
var record = resource('non-declared').init();
record.set('foo', 'bar');
assert(undefined === record.get('foo'));
});
it('should sanitize/typcast', function(){
resource('sanitized')
.attr('integerAttr', 'integer')
.attr('floatAttr', 'float')
.attr('stringAttr', 'string')
// XXX: for dates, need to figure out
// hook into more robust lib.
.attr('dateAttr', 'date')
.attr('booleanAttr', 'boolean');
var record = resource('sanitized').init();
record.set('integerAttr', '61');
assert(61 === record.get('integerAttr'));
record.set('floatAttr', '6.1');
assert(6.1 === record.get('floatAttr'));
record.set('stringAttr', 100);
assert('100' === record.get('stringAttr'));
record.set('dateAttr', '1948-07-15');
assert(Date.parse('1948-07-15') === record.get('dateAttr').getTime());
record.set('booleanAttr', 0);
assert(false === record.get('booleanAttr'));
record.set('booleanAttr', 1);
assert(true === record.get('booleanAttr'));
});
it('should coerce attr to default value if set to undefined', function(){
resource('coerced')
.attr('foo', 'string', 'bar');
var record = resource('coerced').init();
assert('bar' === record.get('foo'));
// XXX: maybe b/c of this, we can get rid of `get` doing the check.
record.set('foo', undefined);
assert('bar' === record.get('foo'));
});
});
describe('validations', function(){
it('should validate', function(){
resource('post')
.attr('title')
.validate('present')
.attr('body', 'text')
.attr('status', 'string')
.validate('in', [ 'draft', 'published' ])
.attr('tags', 'array')
//.validate('lte', 5)
var post = resource('post').init();
post.validate();
assert(2 === post.errors.length);
assert('Invalid attribute: title' === post.errors[0]);
assert('Invalid attribute: status' === post.errors[1]);
var post = resource('post').init({ status: 'draft' });
post.validate();
assert(1 === post.errors.length);
var post = resource('post').init({ status: 'draft', title: 'Hello World' });
post.validate();
assert(0 === post.errors.length);
});
});
describe('query', function(){
it('should have `all` method on constructor', function(){
assert('function' === typeof resource('todo').all);
});
});
}); |
import numpy as np
import pandas as pd
from sklearn import preprocessing
from sklearn.metrics import accuracy_score
from sklearn.linear_model import LogisticRegression
#Load training and testing data
#Input features have to be formatted as numerical values
train_x = pd.read_csv("traindata.csv")
train_y = pd.read_csv("trainlabel.csv")
test_x = pd.read_csv("traindata.csv")
test_y = pd.read_csv("trainlabel.csv")
#Normalize data
scaler = preprocessing.StandardScaler().fit(train_x)
train_x = scaler.transform(train_x)
test_x = scaler.transform(test_x)
# Create classifier
classifier = LogisticRegression()
classifier.fit(train_x, train_y)
# Test accuracy
predictions = classifier.predict(test_x)
print("Accuracy:", accuracy_score(test_y, predictions)) |
/**
* UI展示包
*/
package cn.finalteam.rxgalleryfinalprovider.ui; |
<filename>src/main/java/org/rs2server/rs2/model/npc/MetalArmour.java
package org.rs2server.rs2.model.npc;
import org.rs2server.rs2.model.Animation;
import org.rs2server.rs2.model.Location;
import org.rs2server.rs2.model.Mob;
import org.rs2server.rs2.model.player.Player;
public class MetalArmour extends NPC {
/**
* The minimum location this NPC can walk into.
*/
private static final Location minLocation = Location.create(2849,3534,0);
/**
* The maximum location this NPC can walk into.
*/
private static final Location maxLocation = Location.create(2861,3545,0);
private static final Animation RISE = Animation.create(-1); //No clue on this one.
private Player owner;
public MetalArmour(NPCDefinition def, Location location, Player owner) {
super(def.getId(), location, minLocation, maxLocation, 1);
this.playAnimation(RISE);
this.forceChat("I'm ALIVE!");
this.setAggressiveDistance(10);
this.owner = owner;
}
@Override
public boolean canHit(Mob victim, boolean messages) {
return super.canHit(victim, messages) && victim == this.owner;
}
}
|
#!/usr/bin/env bash
# shellcheck disable=SC2128
# shellcheck source=/dev/null
set -x
set +e
source "./publicTest.sh"
source "./relayerPublic.sh"
# shellcheck disable=SC2120
function mainTest() {
kill_ebrelayer "chain33 -f"
sleep 2
# delete chain33 datadir
rm ../../datadir ../../logs -rf
local ganacheName=ganachetest
# shellcheck disable=SC2155
local isExit=$(docker inspect ${ganacheName} | jq ".[]" | jq ".Id")
if [[ ${isExit} != "" ]]; then
docker stop ${ganacheName}
docker rm ${ganacheName}
fi
kill_all_ebrelayer
cp ../../../plugin/dapp/cross2eth/ebrelayer/relayer.toml ./relayer.toml
}
mainTest "${1}"
|
/*
* Copyright 2016 California Institute of Technology ("Caltech").
* U.S. Government sponsorship acknowledged.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* License Terms
*/
package gov.nasa.jpl.imce.oti.magicdraw.dynamicScripts.json.magicDrawValidation
import play.api.libs.json._
import org.omg.oti.json.uml.OTIMOFElement
import scala.Predef.String
import scala.Option
/**
* OTI UML Json data structure for a MagicDraw Annotation
*
* @see com.nomagic.magicdraw.annotation.Annotation
* @see com.nomagic.magicdraw.validation.RuleViolationResult
*
* @param constraint OTI UML Json conversion of the RuleViolationResult.getRule
* @param severity OTI UML Json conversion of Annotation.getSeverity
* @param kind Annotation.getKind, if any
* @param text Annotation.getText, if any
*/
case class ElementAnnotation
( constraint: OTIMOFElement.OTIUMLConstraint,
severity: OTIMOFElement.OTIUMLEnumerationLiteral,
kind: Option[String],
text: Option[String])
object ElementAnnotation {
implicit def reads
: Reads[ElementAnnotation]
= Json.reads[ElementAnnotation]
implicit def writes
: Writes[ElementAnnotation]
= Json.writes[ElementAnnotation]
implicit def formats
: Format[ElementAnnotation]
= Json.format[ElementAnnotation]
} |
import numpy as np
import matplotlib.pyplot as plt
def plot_contours(poly):
assert len(poly) == 4 # Ensure there are four contours
# Plot each contour
for i in range(4):
plt.plot(poly[i][:, 0], poly[i][:, 1])
plt.show(block=False) # Display the plot without blocking
plt.pause(20) # Pause the display for 20 seconds
plt.close() # Close the plot after 20 seconds
# Example usage
poly = [np.array([[1, 2], [3, 4], [5, 6]]), np.array([[7, 8], [9, 10], [11, 12]]), np.array([[13, 14], [15, 16], [17, 18]]), np.array([[19, 20], [21, 22], [23, 24]])]
plot_contours(poly) |
FILE=locust_test.py
HOST=https://credit-stg.alphacario.com
if [ -n "$1" ]; then
FILE=$1
fi
if [ -n "$2" ]; then
HOST=$2
fi
locust --host=$HOST -f $FILE
|
def perfect_numbers(n):
# Initialize perfect number list
perfect_number_list = []
for num in range(1, n + 1):
sum = 0
for i in range(1, num):
if num % i == 0:
sum += i
# If the sum of its divisor equals
# the number itself, then that
# number is a perfect number.
if sum == num:
perfect_number_list.append(num)
return perfect_number_list
# Driver code
n = 1000
print(perfect_numbers(n)) |
package cn.stylefeng.guns.onlineaccess.modular.entity;
import cn.stylefeng.guns.core.pojo.base.entity.BaseEntity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Generated;
import org.springframework.web.multipart.MultipartFile;
import java.util.Date;
import java.util.Objects;
@TableName("application")
public class Application {
/**
* 主键 id
*/
@TableId(type = IdType.ASSIGN_ID)
@Generated
private Long id;
/**
* 状态 status
*/
private int status;
/**
* 申请人id applicantId
*/
private Long applicantId;
/**
* 组织id orgId
*/
private Long orgId;
/**
* 创建时间 createdTime
*/
private Date createdTime;
/**
* 课题名 subjectName
*/
private String subjectName;
/**
* 课题关键字 subjectKeyword
*/
private String subjectKeyword;
/**
* 项目主要参与人 projectParticipants
*/
private String projectParticipants;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public Long getApplicantId() {
return applicantId;
}
public void setApplicantId(Long applicantId) {
this.applicantId = applicantId;
}
public Long getOrgId() {
return orgId;
}
public void setOrgId(Long orgId) {
this.orgId = orgId;
}
public Date getCreatedTime() {
return createdTime;
}
public void setCreatedTime(Date createdTime) {
this.createdTime = createdTime;
}
public String getSubjectName() {
return subjectName;
}
public void setSubjectName(String subjectName) {
this.subjectName = subjectName;
}
public String getSubjectKeyword() {
return subjectKeyword;
}
public void setSubjectKeyword(String subjectKeyword) {
this.subjectKeyword = subjectKeyword;
}
public String getProjectParticipants() {
return projectParticipants;
}
public void setProjectParticipants(String projectParticipants) {
this.projectParticipants = projectParticipants;
}
public Application() {
}
public Application(Long id, int status, Long applicantId, Long orgId, Date createdTime, String subjectName, String subjectKeyword, String projectParticipants) {
this.id = id;
this.status = status;
this.applicantId = applicantId;
this.orgId = orgId;
this.createdTime = createdTime;
this.subjectName = subjectName;
this.subjectKeyword = subjectKeyword;
this.projectParticipants = projectParticipants;
}
}
|
<gh_stars>10-100
from scipy.integrate import odeint
def onejointarm(state,t):
theta = state[0] # joint angle (rad)
theta_dot = state[1] # joint velocity (rad/s)
l = 0.50 # link length (m)
g = 9.81 # gravitational constant (m/s/s)
theta_ddot = -g*sin(theta) / l
return [theta_dot, theta_ddot]
t = linspace(0.0,10.0,1001) # 10 seconds sampled at 1000 Hz
state0 = [90.0*pi/180.0, 0.0] # 90 deg initial angle, 0 deg/sec initial velocity
state = odeint(onejointarm, state0, t)
figure()
plot(t,state*180/pi)
legend(('theta','thetadot'))
xlabel('TIME (sec)')
ylabel('THETA (deg) & THETA_DOT (deg/sec)')
def animate_arm(state,t):
l = 0.5
figure(figsize=(12,6))
plot(0,0,'r.')
p, = plot((0,l*sin(state[0,0])),(0,-l*cos(state[0,0])),'b-')
tt = title("%4.2f sec" % 0.00)
xlim([-l-.05,l+.05])
ylim([-l,.10])
step = 3
for i in xrange(1,shape(state)[0]-10,step):
p.set_xdata((0,l*sin(state[i,0])))
p.set_ydata((0,-l*cos(state[i,0])))
tt.set_text("%4.2f sec" % (i*0.01))
draw()
animate_arm(state,t)
|
import re
from collections import Counter
def topNWords(filename, N):
with open(filename, 'r') as file:
text = file.read().lower()
words = re.findall(r'\b\w+\b', text)
stop_words = {'the', 'and', 'is', 'a', 'an', 'in', 'on', 'at', 'to', 'of', 'for', 'with', 'it', 'this', 'that', 'these', 'those', 'was', 'were', 'am', 'are', 'be', 'being', 'been', 'have', 'has', 'had', 'do', 'does', 'did', 'will', 'shall', 'can', 'could', 'may', 'might', 'must', 'should', 'would'}
filtered_words = [word for word in words if word not in stop_words]
word_counts = Counter(filtered_words)
top_words = word_counts.most_common(N)
return top_words |
killall node
brew services stop redis
sleep 2
brew services start redis
sleep 2
current_dir=$(pwd)
REDIRECT_CONSOLE=1 NODE_ENV=development NODE_PORT=5000 REDIS_HOST=localhost REDIS_PORT=6379 node index.js &
sleep 2
t2=`echo $(date +%s%3)/10 | bc -l`
t2=${t2%.*}
t2=`echo $t2 + 15 | bc -l`
curl -G "http://localhost:5000/echoAtTime?time=${t2}" --data-urlencode "message=Todo3"
t1=`echo $(date +%s%3)/10 | bc -l`
t1=${t1%.*}
t1=`echo $t1 + 5 | bc -l`
curl -G "http://localhost:5000/echoAtTime?time=${t1}" --data-urlencode "message=Todo1"
curl -G "http://localhost:5000/echoAtTime?time=${t1}" --data-urlencode "message=Todo2"
sleep 7
brew services stop redis
sleep 20
brew services start redis
sleep 10
echo 'ok' |
#!/bin/bash
############################################################################
#
# Title: Build Environment Script for macOS
# Authors: CR Oldham, Shane Lee
# Date: December 2015
#
# Description: This script sets up a build environment for Salt on macOS.
#
# Requirements:
# - Xcode Command Line Tools (xcode-select --install)
#
# Usage:
# This script can be passed 1 parameter
# $1 : <test mode> : if this script should be run in test mode, this
# disables the longer optimized compile time of python.
# Please DO NOT set to "true" when building a
# release version.
# (defaults to false)
#
# Example:
# The following will set up an optimized Python build environment for Salt
# on macOS
#
# ./dev_env.sh
#
############################################################################
############################################################################
# Make sure the script is launched with sudo
############################################################################
if [[ $(id -u) -ne 0 ]]
then
exec sudo /bin/bash -c "$(printf '%q ' "$BASH_SOURCE" "$@")"
fi
############################################################################
# Set to Exit on all Errors
############################################################################
trap 'quit_on_error $LINENO $BASH_COMMAND' ERR
quit_on_error() {
echo "$(basename $0) caught error on line : $1 command was: $2"
exit -1
}
############################################################################
# Parameters Required for the script to function properly
############################################################################
echo -n -e "\033]0;Build_Env: Variables\007"
MACOSX_DEPLOYMENT_TARGET=10.13
export MACOSX_DEPLOYMENT_TARGET
# This is needed to allow the some test suites (zmq) to pass
# taken from https://github.com/zeromq/libzmq/issues/1878
SET_ULIMIT=200000
sysctl -w kern.maxfiles=$SET_ULIMIT
sysctl -w kern.maxfilesperproc=$SET_ULIMIT
launchctl limit maxfiles $SET_ULIMIT $SET_ULIMIT
ulimit -n $SET_ULIMIT
SRCDIR=`git rev-parse --show-toplevel`
SCRIPTDIR=`pwd`
SHADIR=$SCRIPTDIR/shasums
INSTALL_DIR=/opt/salt
PKG_CONFIG=$INSTALL_DIR/bin/pkg-config
PKG_CONFIG_PATH=$INSTALL_DIR/lib/pkgconfig
PYDIR=$INSTALL_DIR/lib/python3.7
PYTHON=$INSTALL_DIR/bin/python3
PIP=$INSTALL_DIR/bin/pip3
# needed for python to find pkg-config and have pkg-config properly link
# the python install to the compiled openssl below.
export PKG_CONFIG
export PKG_CONFIG_PATH
############################################################################
# Determine Which XCode is being used (XCode or XCode Command Line Tools)
############################################################################
# Prefer Xcode command line tools over any other gcc installed (e.g. MacPorts,
# Fink, Brew)
# Check for Xcode Command Line Tools first
if [ -d '/Library/Developer/CommandLineTools/usr/bin' ]; then
MAKE=/Library/Developer/CommandLineTools/usr/bin/make
elif [ -d '/Applications/Xcode.app/Contents/Developer/usr/bin' ]; then
MAKE=/Applications/Xcode.app/Contents/Developer/usr/bin/make
else
echo "No installation of XCode found. This script requires XCode."
echo "Try running: xcode-select --install"
exit -1
fi
############################################################################
# Download Function
# - Downloads and verifies the MD5
############################################################################
download(){
if [ -z "$1" ]; then
echo "Must pass a URL to the download function"
fi
URL=$1
PKGNAME=${URL##*/}
cd $BUILDDIR
echo "################################################################################"
echo "Retrieving $PKGNAME"
echo "################################################################################"
curl -LO# $URL
echo "################################################################################"
echo "Comparing Sha512 Hash"
echo "################################################################################"
FILESHA=($(shasum -a 512 $PKGNAME))
EXPECTEDSHA=($(cat $SHADIR/$PKGNAME.sha512))
if [ "$FILESHA" != "$EXPECTEDSHA" ]; then
echo "ERROR: Sha Check Failed for $PKGNAME"
return 1
fi
echo "################################################################################"
echo "Unpacking $PKGNAME"
echo "################################################################################"
tar -zxvf $PKGNAME
return $?
}
############################################################################
# Ensure Paths are present and clean
############################################################################
echo "################################################################################"
echo "Ensure Paths are present and clean"
echo "################################################################################"
echo -n -e "\033]0;Build_Env: Clean\007"
# Make sure $INSTALL_DIR is clean
rm -rf $INSTALL_DIR
mkdir -p $INSTALL_DIR
chown $USER:staff $INSTALL_DIR
# Make sure build staging is clean
rm -rf build
mkdir -p build
BUILDDIR=$SCRIPTDIR/build
############################################################################
# Download and install pkg-config
############################################################################
echo -n -e "\033]0;Build_Env: pkg-config: download\007"
PKGURL="http://pkgconfig.freedesktop.org/releases/pkg-config-0.29.2.tar.gz"
PKGDIR="pkg-config-0.29.2"
download $PKGURL
echo "################################################################################"
echo "Building pkg-config"
echo "################################################################################"
cd $PKGDIR
echo -n -e "\033]0;Build_Env: pkg-config: configure\007"
env LDFLAGS="-framework CoreFoundation -framework Carbon" ./configure --prefix=$INSTALL_DIR --with-internal-glib
echo -n -e "\033]0;Build_Env: pkg-config: make\007"
$MAKE
echo -n -e "\033]0;Build_Env: pkg-config: make check\007"
$MAKE check
echo -n -e "\033]0;Build_Env: pkg-config: make install\007"
$MAKE install
############################################################################
# Download and install libsodium
############################################################################
echo -n -e "\033]0;Build_Env: libsodium: download\007"
PKGURL="https://download.libsodium.org/libsodium/releases/libsodium-1.0.18.tar.gz"
PKGDIR="libsodium-1.0.18"
download $PKGURL
echo "################################################################################"
echo "Building libsodium"
echo "################################################################################"
cd $PKGDIR
echo -n -e "\033]0;Build_Env: libsodium: configure\007"
./configure --prefix=$INSTALL_DIR
echo -n -e "\033]0;Build_Env: libsodium: make\007"
$MAKE
echo -n -e "\033]0;Build_Env: libsodium: make check\007"
$MAKE check
echo -n -e "\033]0;Build_Env: libsodium: make install\007"
$MAKE install
############################################################################
# Download and install zeromq
############################################################################
echo -n -e "\033]0;Build_Env: zeromq: download\007"
PKGURL="https://github.com/zeromq/zeromq4-1/releases/download/v4.1.7/zeromq-4.1.7.tar.gz"
PKGDIR="zeromq-4.1.7"
download $PKGURL
echo "################################################################################"
echo "Building zeromq"
echo "################################################################################"
cd $PKGDIR
echo -n -e "\033]0;Build_Env: zeromq: configure\007"
./configure --prefix=$INSTALL_DIR
echo -n -e "\033]0;Build_Env: zeromq: make\007"
$MAKE
echo -n -e "\033]0;Build_Env: zeromq: make check\007"
# some tests fail occasionally.
$MAKE check
echo -n -e "\033]0;Build_Env: zeromq: make install\007"
$MAKE install
############################################################################
# Download and install OpenSSL
############################################################################
echo -n -e "\033]0;Build_Env: OpenSSL: download\007"
PKGURL="http://openssl.org/source/openssl-1.0.2u.tar.gz"
PKGDIR="openssl-1.0.2u"
download $PKGURL
echo "################################################################################"
echo "Building OpenSSL"
echo "################################################################################"
cd $PKGDIR
echo -n -e "\033]0;Build_Env: OpenSSL: configure\007"
./Configure darwin64-x86_64-cc shared --prefix=$INSTALL_DIR --openssldir=$INSTALL_DIR/openssl
echo -n -e "\033]0;Build_Env: OpenSSL: make\007"
$MAKE
echo -n -e "\033]0;Build_Env: OpenSSL: make test\007"
$MAKE test
echo -n -e "\033]0;Build_Env: OpenSSL: make install\007"
$MAKE install
############################################################################
# Download and install Python
############################################################################
echo -n -e "\033]0;Build_Env: Python: download\007"
# if $1 is true the we should remove the --enable-optimizations flag to get a quicker
# build if testing other functions of this script
if [ "$1" == "true" ]; then
PY_CONF="--prefix=$INSTALL_DIR --enable-shared --with-ensurepip=install"
else
PY_CONF="--prefix=$INSTALL_DIR --enable-shared --with-ensurepip=install --enable-optimizations"
fi
PKGURL="https://www.python.org/ftp/python/3.7.4/Python-3.7.4.tar.xz"
PKGDIR="Python-3.7.4"
download $PKGURL
echo "################################################################################"
echo "Building Python"
echo "################################################################################"
echo "Note there are some test failures"
cd $PKGDIR
echo -n -e "\033]0;Build_Env: Python: configure\007"
# removed --enable-toolbox-glue as no longer a config option
./configure $PY_CONF
echo -n -e "\033]0;Build_Env: Python: make\007"
$MAKE
echo -n -e "\033]0;Build_Env: Python: make install\007"
$MAKE install
############################################################################
# upgrade pip
############################################################################
$PIP install --upgrade pip wheel
############################################################################
# Download and install salt python dependencies
############################################################################
echo -n -e "\033]0;Build_Env: PIP Dependencies\007"
cd $BUILDDIR
echo "################################################################################"
echo "Installing Salt Dependencies with pip (normal)"
echo "################################################################################"
$PIP install -r $SRCDIR/pkg/osx/req.txt -r $SRCDIR/pkg/osx/req_pyobjc.txt \
--target=$PYDIR/site-packages \
--ignore-installed \
--no-cache-dir
echo "--------------------------------------------------------------------------------"
echo "Create Symlink to certifi for openssl"
echo "--------------------------------------------------------------------------------"
ln -s $PYDIR/site-packages/certifi/cacert.pem $INSTALL_DIR/openssl/cert.pem
echo -n -e "\033]0;Build_Env: Finished\007"
cd $BUILDDIR
echo "################################################################################"
echo "Build Environment Script Completed"
echo "################################################################################"
|
#!/bin/bash
# Note is setup script currently does four things:
#
# 1. It creates a binaryninja.desktop file in ${HOME}/.local/share/applications and
# copies it to the desktop
# 2. It creates a .xml file to add a mime type for .bndb files.
# 3. It adds a binaryninja: url handler.
# 4. Creates .pth python file to add binary ninja to your python path
setvars()
{
APP="binaryninja"
FILECOMMENT="Binary Ninja Analysis Database"
APPCOMMENT="Binary Ninja: A Reverse Engineering Platform"
BNPATH=$(realpath "$(dirname "$(readlink -f "$0")")/..")
EXEC="${BNPATH}/binaryninja"
PNG="${BNPATH}/docs/img/logo.png"
EXT="bndb"
if [ "$ROOT" == "root" ]
then
SHARE="/usr/share" #For system
SUDO="sudo " #For system
else
SHARE="${HOME}/.local/share" #For user only
SUDO="" #For user only
fi
DESKTOPFILE="${SHARE}/applications/${APP}.desktop"
MIMEFILE="${SHARE}/mime/packages/application-x-${APP}.xml"
IMAGEFILE="${SHARE}/pixmaps/application-x-${APP}.png"
}
usage()
{
echo "Usage: $0 -[ulpdmrsh]
-u: For uninstall, removes all associations (does NOT remove ${HOME}/.binaryninja)
-l: Disable creation ${HOME}/.binaryninja/lastrun file
-p: Disable adding python path .pth file
-d: Disable adding desktop launcher
-m: Disable adding mime associations
-r: Run as root to set system wide preferences (requires sudo permissions)
-s: Run in headless mode (equivalent to -d -m)
-h: Display this help
" 1>&2
exit 1
}
lastrun()
{
#Contains the last run location, but on systems without a UI this ensures
#the UI doesn't have to run once for the core to be available.
if [ -f ${HOME}/.binaryninja/lastrun ]
then
echo lastrun already exists, remove to create a new one
else
if [ ! -d ${HOME}/.binaryninja ]
then
mkdir ${HOME}/.binaryninja
fi
echo ${BNPATH} > ${HOME}/.binaryninja/lastrun
fi
}
pythonpath()
{
echo Configuring python path
if [ "$USERINTERACTIVE" == "true" ]
then
SILENT=""
else
SILENT="-s"
fi
if [[ -x "`which python3`" ]]
then
python3 -V >/dev/null 2>&1 && ${SUDO}python3 "${BNPATH}/scripts/install_api.py" ${ROOT} ${SILENT}
else
echo "Python3 not found. Not installing BN PTH file."
fi
}
createdesktopfile()
{
mkdir -p ${SHARE}/{mime/packages,applications,pixmaps}
echo Creating .desktop file
# Desktop File
read -d '' DESKTOP << EOF
[Desktop Entry]
Name=${APP}
Exec=${EXEC// /\\\\ } %u
MimeType=application/x-${APP};x-scheme-handler/${APP};
Icon=${PNG// /\\\\s}
Terminal=false
Type=Application
Categories=Utility;
Comment=${APPCOMMENT}
EOF
read -d '' MIMEAPPS << EOF
[Added Associations]
application/x-executable=${APP}.desktop
application/x-elf=${APP}.desktop
application/x-sharedlib=${APP}.desktop
EOF
echo "${DESKTOP}" | $SUDO tee ${DESKTOPFILE} >/dev/null
echo "${MIMEAPPS}" | $SUDO tee -a ${MIMEFILE} >/dev/null
$SUDO chmod +x ${DESKTOPFILE}
GNOMEVERSION=`gnome-shell --version|awk '{print $3}'`
MINVERSION=3.36
# This check is dumb. Thanks Gnome for not only imitating the worst
# permission models of MacOS and Windows but doing it in a way that isn't
# even consistent between adjacent LTS versions :facepalm: Note that a
# reboot or reload of Gnome is required but I'm not going to do it here
# because the experience is poor.
if [ $(echo -en "$GNOMEVERSION\n$MINVERSION" | sort -t '.' -k 1,1 -k 2,2 -k 3,3 -g | tail -n1) != $MINVERSION ]
then
DBFLAG="true"
else
DBFLAG="yes"
fi
echo -e "\n\nWARNING: Note that the desktop icon that was created may not be usable until you login again or reboot depending on your GNOME version.\n"
$SUDO dbus-launch gio set "${DESKTOPFILE}" "metadata::trusted" $DBFLAG
$SUDO update-desktop-database ${SHARE}/applications
}
createmime()
{
echo Creating MIME settings
if [ ! -f ${DESKTOPFILE} -a ! -f ${HOME}/Desktop/${APP}.desktop ]
then
createdesktopfile
fi
echo "<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<mime-info xmlns=\"http://www.freedesktop.org/standards/shared-mime-info\">
<mime-type type=\"application/x-${APP}\">
<comment>${FILECOMMENT}</comment>
<icon name=\"application-x-${APP}\"/>
<magic-deleteall/>
<glob pattern=\"*.${EXT}\"/>
<sub-class-of type=\"application/x-sqlite3\" />
</mime-type>
</mime-info>"| $SUDO tee ${MIMEFILE} >/dev/null
#echo Copying icon
#$SUDO cp "$PNG" "$IMAGEFILE"
$SUDO cp "${PNG}" "${IMAGEFILE}"
$SUDO update-mime-database ${SHARE}/mime
}
addtodesktop()
{
cp "$DESKTOPFILE" "${HOME}/Desktop"
}
uninstall()
{
rm -i -r "$DESKTOPFILE" "$MIMEFILE" "$IMAGEFILE" "${HOME}/Desktop/${APP}.desktop"
$SUDO update-mime-database ${SHARE}/mime
python3 -V >/dev/null 2>&1 && ${SUDO}python3 "${BNPATH}/scripts/install_api.py" -u
exit 0
}
ROOT=user
CREATEDESKTOP=true
CREATEMIME=true
ADDTODESKTOP=true
CREATELASTRUN=true
USERINTERACTIVE=true
PYTHONPATH=true
UNINSTALL=false
while [[ $# -ge 1 ]]
do
flag="$1"
case $flag in
-u)
UNINSTALL=true
;;
-l)
CREATELASTRUN=false
;;
-p)
PYTHONPATH=false
;;
-d)
ADDTODESKTOP=false
CREATEDESKTOP=false
;;
-m)
CREATEMIME=false
;;
-r)
ROOT=root
;;
-s)
ADDTODESKTOP=false
CREATEMIME=false
CREATEDESKTOP=false
USERINTERACTIVE=false
;;
-h|*)
usage
;;
esac
shift
done
setvars
if [ "$UNINSTALL" == "true" ]
then
uninstall
fi
if [ "$CREATEDESKTOP" == "true" ]
then
createdesktopfile
fi
if [ "$CREATEMIME" == "true" ]
then
createmime
fi
if [ "$ADDTODESKTOP" == "true" ]
then
addtodesktop
fi
if [ "$CREATELASTRUN" == "true" ]
then
lastrun
fi
if [ "$PYTHONPATH" == "true" ]
then
pythonpath
fi
|
# nasm -f elf32 src/boot.s -o boot.o
yasm -p gas -f elf32 src/boot.s -o boot.o
gcc -m32 -c src/main.c -o kernel.o -ffreestanding -O2 -Wall -Wextra -fno-exceptions
ld -m elf_i386 -T src/linker.ld -o kernel.bin boot.o kernel.o
echo "Built kernel.bin."
if test "$1" = "iso"; then
echo "Building kernel.iso, please wait..."
mkdir -p iso/boot/grub
cp kernel.bin iso/boot
cp src/grub.cfg iso/boot/grub
grub-mkrescue -o kernel.iso iso/
echo "Built kernel.iso."
fi
|
<gh_stars>0
/*jshint node: true */
'use strict';
var
net = require('net'),
generate = require('mqtt-packet').generate,
ParserBuild = require('mqtt-packet').parser;
function MqttSender(socket) {
this.send = function (msg) {
socket.write(generate(msg));
};
this.close = function (code, reason) {
socket.end();
};
}
function MqttServer(gate, SessionClass, options) {
let sessionList = undefined;
let _server = net.Server(function (socket) {
var sender = new MqttSender(socket);
var session = new SessionClass(gate, sender, gate.makeSessionId());
if (sessionList) {
sessionList.registerSession(session);
}
let parser = ParserBuild();
parser.on('packet', function(msg) {
console.log('PACKET ARRIVED', msg);
session.handle(msg);
});
socket.on('data', function(chunk) {
parser.parse(chunk);
});
socket.on('end', function() {
});
socket.on('close', function() {
if (sessionList) {
sessionList.removeSession(session);
}
session.cleanup();
});
});
_server.listen(options);
this.setSessionList = function(list) {
sessionList = list;
};
}
module.exports = MqttServer;
|
def add_chars(string, num):
# add specified no of characters to the end of the string
return string + ' ' * num |
#/bin/env bash
# Compile C to assembly with mynqdcc, then call "gcc -w" to generate an executable.
# This is useful for running the nqcc test suite, e.g.:
# ```
# cd write_a_c_compiler
# ./test_compiler.sh "../mycc.sh /path/to/mynqcc-exe" 1
# ```
cmd="$1"
srcf="$2"
outf="${srcf%.*}"
asmf="${outf}.s"
$cmd -o "$asmf" "$srcf" && gcc -m32 -w -o "$outf" "$asmf"
|
<filename>night.py<gh_stars>0
"""
Defines the main night rng and the subsequent function that is the random action
As well as moon functions and "wolfing out"
night.py
<NAME>
"""
from main import *
def moon(day):
"""
Given the total number of days in the session it works out if their is a full moon or not.
:param day: Total number of days in the session
:return: Boolean based on weather the moon is full or not.
"""
if day % 29 == 0:
return True
else:
return False
def fullMoon(day):
"""
Based on the month it buffs/debuffs you based on the moon for the month
:param day: days in the year
:return: None
"""
if month(day) == "January":
print("The Wolf Moon") # In later version this will change stats
elif month(day) == "February":
print("The Snow Moon")
elif month(day) == "March":
print("The Worm Moon")
elif month(day) == "April":
print("The Pink Moon")
elif month(day) == "May":
print("The Flower Moon")
elif month(day) == "June":
print("The Strawberry Moon")
elif month(day) == "July":
print("The Buck Moon")
elif month(day) == "August":
print("The Sturgeon Moon")
elif month(day) == "September":
print("The Blood Moon")
elif month(day) == "October":
print("The Hunter's Moon")
elif month(day) == "November":
print("The Beaver Moon")
else:
print("The Cold Moon")
def howlTime(howl):
"""
Decides if the howls should have normal tutorial dialogue or random events.
howl = the howl list full of booleans
:return: None
"""
if howl["janHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["febHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["marHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["aprHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["mayHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["juneHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["julyHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["augHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["septHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["octHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["novHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
if howl["decHowl"] is False:
# Tutorial Dialogue
pass
else:
# random related event
pass
def night(day):
"""
Returns a random night action based on your decided day action
day = total days in the current session
:return: Function that will give a random day action
"""
input("As the sun takes it's daily nap and the moon starts to rise, night approaches. What will you do?")
while True:
if moon(day) is False:
WWYD = input("1: Go clubbing, 2: Stay home, 3: Go to sleep early")
if WWYD.isdigit() is True:
if int(WWYD) == 1:
pass
elif int(WWYD) == 2:
pass
elif int(WWYD) == 3:
pass
else:
input("Am I tried? Because I have no idea what I'm thinking.")
else:
input("Am I tired? Because I have no idea what I'm thinking.")
else:
WWYD = input("1: Go clubbing, 2: Stay home, 3: Go to sleep early, 4: Chain yourself up.")
if WWYD.isdigit() is True:
if int(WWYD) == 1:
pass
elif int(WWYD) == 2:
pass
elif int(WWYD) == 3:
pass
elif int(WWYD) == 4:
pass
else:
input("Am I tried, because I have no idea what I'm thinking")
else:
input("Am I tried, because I have no idea what I'm thinking")
def wolfOut():
pass |
<filename>ldaptor/test/test_ldaperrors.py
"""
Test cases for ldaptor.protocols.ldap.ldaperrors module.
"""
from twisted.trial import unittest
from ldaptor.protocols.ldap import ldaperrors
class UnnamedException(ldaperrors.LDAPException):
"""LDAP exception with undefined name"""
class GetTests(unittest.TestCase):
"""Getting LDAP exception implementation by error code"""
def test_get_success(self):
"""Getting OK message"""
success = ldaperrors.get(0, 'Some message')
self.assertEqual(success.__class__, ldaperrors.Success)
self.assertEqual(success.resultCode, 0)
self.assertEqual(success.name, b'success')
def test_get_existing_exception(self):
"""Getting existing LDAPException subclass"""
exception = ldaperrors.get(49, 'Error message')
self.assertEqual(exception.__class__, ldaperrors.LDAPInvalidCredentials)
self.assertEqual(exception.resultCode, 49)
self.assertEqual(exception.name, b'invalidCredentials')
self.assertEqual(exception.message, 'Error message')
def test_get_nonexisting_exception(self):
"""Getting non-existing LDAP error"""
exception = ldaperrors.get(55, 'Error message')
self.assertEqual(exception.__class__, ldaperrors.LDAPUnknownError)
self.assertEqual(exception.code, 55)
self.assertEqual(exception.message, 'Error message')
class LDAPExceptionTests(unittest.TestCase):
"""Getting bytes representations of LDAP exceptions"""
def test_exception_with_message(self):
"""Exception with a text message"""
exception = ldaperrors.LDAPProtocolError('Error message')
self.assertEqual(exception.toWire(), b'protocolError: Error message')
def test_empty_exception(self):
"""Exception with no message"""
exception = ldaperrors.LDAPCompareFalse()
self.assertEqual(exception.toWire(), b'compareFalse')
def test_unnamed_exception(self):
"""Exception with no name"""
exception = UnnamedException()
self.assertEqual(exception.toWire(), b'Unknown LDAP error UnnamedException()')
def test_unknown_exception_with_message(self):
"""Unknown exception with a text message"""
exception = ldaperrors.LDAPUnknownError(56, 'Error message')
self.assertEqual(exception.toWire(), b'unknownError(56): Error message')
def test_unknown_empty_exception(self):
"""Unknown exception with no message"""
exception = ldaperrors.LDAPUnknownError(57)
self.assertEqual(exception.toWire(), b'unknownError(57)')
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppress TensorFlow logging (1)
import pathlib
import tensorflow as tf
import sys
tf.get_logger().setLevel('ERROR') # Suppress TensorFlow logging (2)
import time
from object_detection.utils import label_map_util
from object_detection.utils import visualization_utils as viz_utils
from object_detection.builders import model_builder
from object_detection.utils import config_util
import numpy as np
from PIL import Image
import matplotlib
import matplotlib.pyplot as plt
import warnings
import cv2
warnings.filterwarnings('ignore') # Suppress Matplotlib warnings
matplotlib.use('TKAgg', force=True)
# Enable GPU dynamic memory allocation
gpus = tf.config.experimental.list_physical_devices('GPU')
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
PATH_TO_SAVED_MODEL = os.path.join(os.getcwd(),"my_model_2")
print('Loading model...', end='')
start_time = time.time()
category_index = label_map_util.create_category_index_from_labelmap(os.path.join(os.getcwd(),"labelmap.pbtxt"),
use_display_name=True)
# Load pipeline config and build a detection model
configs = config_util.get_configs_from_pipeline_file(PATH_TO_SAVED_MODEL+"/pipeline.config")
model_config = configs['model']
detection_model = model_builder.build(model_config=model_config, is_training=False)
# Load saved model and build the detection function
ckpt = tf.compat.v2.train.Checkpoint(model=detection_model)
ckpt.restore(os.path.join(PATH_TO_SAVED_MODEL+"/checkpoint", 'ckpt-0')).expect_partial()
end_time = time.time()
elapsed_time = end_time - start_time
print('Done! Took {} seconds'.format(elapsed_time))
def detect_fn(image):
"""Detect objects in image."""
image, shapes = detection_model.preprocess(image)
prediction_dict = detection_model.predict(image, shapes)
detections = detection_model.postprocess(prediction_dict, shapes)
return detections, prediction_dict, tf.reshape(shapes, [-1])
def load_image_into_numpy_array(path):
"""Load an image from file into a numpy array.
Puts image into numpy array to feed into tensorflow graph.
Note that by convention we put it into a numpy array with shape
(height, width, channels), where channels=3 for RGB.
Args:
path: the file path to the image
Returns:
uint8 numpy array with shape (img_height, img_width, 3)
"""
return np.array(Image.open(path))
print('Running inference for video source... ', end='')
video = cv2.VideoCapture(0) #depending on which webcame/videosource you want 0 is default
while True:
ret, image_np = video.read()
image_np_expanded = np.expand_dims(image_np, axis=0)
# The input needs to be a tensor, convert it using `tf.convert_to_tensor`.
input_tensor = tf.convert_to_tensor(np.expand_dims(image_np, 0), dtype=tf.float32)
# The model expects a batch of images, so add an axis with `tf.newaxis`.
detections, predictions_dict, shapes = detect_fn(input_tensor)
# input_tensor = np.expand_dims(image_np, 0)
label_id_offset = 1
image_np_with_detections = image_np.copy()
viz_utils.visualize_boxes_and_labels_on_image_array(
image_np_with_detections,
detections['detection_boxes'][0].numpy(),
(detections['detection_classes'][0].numpy() + label_id_offset).astype(int),
detections['detection_scores'][0].numpy(),
category_index,
use_normalized_coordinates=True,
max_boxes_to_draw=200,
min_score_thresh=.30,
agnostic_mode=False)
cv2.imshow('object detection', cv2.resize(image_np_with_detections, (800, 600)))
if cv2.waitKey(25) & 0xFF == ord('q'):
break
video.release()
cv2.destroyAllWindows()
|
<reponame>bizmaercq/eda-reporting
--Global Statistics on FlexPay
select aa.ac_branch,sum(Number_Received) Number_Received,sum(aa.Received) Received,sum(aa.commission) Commission, sum(aa.tax) Tax,sum(Number_Paid) Number_Paid ,sum(aa.paid) paid
from
(
select a.AC_BRANCH,
case when a.TRN_CODE = 'TTI' then count(*) else 0 end as Number_Received,
case when a.TRN_CODE = 'TTI' then sum(a.LCY_AMOUNT) else 0 end as Received,
case when a.TRN_CODE = 'TTC' then sum(a.LCY_AMOUNT) else 0 end as Commission,
case when a.TRN_CODE = 'TAX' then sum(a.LCY_AMOUNT) else 0 end as Tax,
case when a.TRN_CODE = 'TTL' then count(*) else 0 end as Number_Paid,
case when a.TRN_CODE = 'TTL' then sum(a.LCY_AMOUNT) else 0 end as Paid
from acvw_all_ac_entries a
where a.trn_ref_no like '%TTIW%'
and a.AC_NO not in ('571110000','452101000')
and a.TRN_DT between '&Start_Date' and '&End_Date'
group by a.AC_BRANCH,a.TRN_CODE
) aa
group by aa.ac_branch
order by aa.ac_branch;
-- Detail Statistics on FlexPay
select aa.home_branch TRN_BRANCH,aa.TRN_DT TRN_DATE,aa.TRN_REF_NO,aa.USER_ID,sum(aa.Received) Received,sum(aa.commission) Commission, sum(aa.tax) Tax, sum(aa.Paid) Paid
from
(
select u.home_branch,a.TRN_DT, a.TRN_REF_NO,u.USER_ID,
case when a.TRN_CODE = 'TTI' then sum(a.LCY_AMOUNT) else 0 end as Received,
case when a.TRN_CODE = 'TTC' then sum(a.LCY_AMOUNT) else 0 end as Commission,
case when a.TRN_CODE = 'TAX' then sum(a.LCY_AMOUNT) else 0 end as Tax,
case when a.TRN_CODE = 'TTL' then sum(a.LCY_AMOUNT) else 0 end as Paid
from acvw_all_ac_entries a , smtb_user u
where a.USER_ID = u.user_id
and a.trn_ref_no like '%TTIW%'
and a.AC_NO not in ('571110000','452101000')
and a.TRN_DT between '&Start_Date' and '&End_Date'
group by u.home_branch,a.TRN_CODE,a.TRN_DT,a.TRN_REF_NO,u.USER_ID
) aa
group by aa.home_branch,aa.TRN_DT,aa.TRN_REF_NO,aa.USER_ID
order by aa.TRN_REF_NO,aa.trn_dt,aa.USER_ID;
|
SELECT COUNT(*) FROM orders WHERE customer_id='customerID'; |
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.RequestParam;
@RestController
public class FamilyPlanningController {
@DeleteMapping("/family-planning")
@ApiOperation(value = "删除计生人员信息", notes = "传参:Integer id 记录id")
public Result delete(@RequestParam Integer id) {
// Implement logic to delete family planning personnel information
boolean deletionSuccessful = // Your logic to delete the personnel information based on the provided id
if (deletionSuccessful) {
return new Result("Personnel information deleted successfully");
} else {
return new Result("Failed to delete personnel information");
}
}
} |
<reponame>VincentLefevre/3D-parallax
#!/usr/bin/env python
#
# This file is part of libigl, a simple c++ geometry processing library.
#
# Copyright (C) 2017 <NAME> <<EMAIL>> and <NAME> <<EMAIL>>
#
# This Source Code Form is subject to the terms of the Mozilla Public License
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import sys, os
# Add the igl library to the modules search path
sys.path.insert(0, os.getcwd() + "/../")
import pyigl as igl
from shared import TUTORIAL_SHARED_PATH, check_dependencies
dependencies = ["glfw"]
check_dependencies(dependencies)
V = igl.eigen.MatrixXd()
F = igl.eigen.MatrixXi()
igl.readOFF(TUTORIAL_SHARED_PATH + "cheburashka.off", V, F)
# Two fixed points
# Left hand, left foot
b = igl.eigen.MatrixXd([[4331], [5957]]).castint()
bc = igl.eigen.MatrixXd([[1], [-1]])
# Construct Laplacian and mass matrix
L = igl.eigen.SparseMatrixd()
M = igl.eigen.SparseMatrixd()
Minv = igl.eigen.SparseMatrixd()
Q = igl.eigen.SparseMatrixd()
igl.cotmatrix(V, F, L)
igl.massmatrix(V, F, igl.MASSMATRIX_TYPE_VORONOI, M)
igl.invert_diag(M, Minv)
# Bi-Laplacian
Q = L * (Minv * L)
# Zero linear term
B = igl.eigen.MatrixXd.Zero(V.rows(), 1)
Z = igl.eigen.MatrixXd()
Z_const = igl.eigen.MatrixXd()
# Alternative, short hand
mqwf = igl.min_quad_with_fixed_data()
# Empty constraints
Beq = igl.eigen.MatrixXd()
Aeq = igl.eigen.SparseMatrixd()
igl.min_quad_with_fixed_precompute(Q, b, Aeq, True, mqwf)
igl.min_quad_with_fixed_solve(mqwf, B, bc, Beq, Z)
# Constraint forcing difference of two points to be 0
Aeq = igl.eigen.SparseMatrixd(1, V.rows())
# Right hand, right foot
Aeq.insert(0, 6074, 1)
Aeq.insert(0, 6523, -1)
Aeq.makeCompressed()
Beq = igl.eigen.MatrixXd([[0]])
igl.min_quad_with_fixed_precompute(Q, b, Aeq, True, mqwf)
igl.min_quad_with_fixed_solve(mqwf, B, bc, Beq, Z_const)
# Global definitions for viewer
# Pseudo-color based on solution
C = igl.eigen.MatrixXd()
C_const = igl.eigen.MatrixXd()
toggle = True
# Use same color axes
min_z = min(Z.minCoeff(), Z_const.minCoeff())
max_z = max(Z.maxCoeff(), Z_const.maxCoeff())
igl.jet(Z, min_z, max_z, C)
igl.jet(Z_const, min_z, max_z, C_const)
# Plot the mesh with pseudocolors
viewer = igl.glfw.Viewer()
viewer.data().set_mesh(V, F)
viewer.data().show_lines = False
viewer.data().set_colors(C)
def key_down(viewer, key, mode):
if key == ord(' '):
global toggle, C, C_const
if toggle:
viewer.data().set_colors(C)
else:
viewer.data().set_colors(C_const)
toggle = not toggle
return True
return False
viewer.callback_key_down = key_down
print("Press [space] to toggle between unconstrained and constrained.")
viewer.launch()
|
<filename>chartilo/drawers/candleChartDrawer.py
from . import Drawer
from ..positioners import Limiter, ChartPositioner
from PyQt5.QtGui import QColor, QFont, QFontMetrics, QPainter, QPen
class CandleChartDrawer(Drawer):
def __init__(self, painter) -> None:
super().__init__(painter)
def draw(self, data):
for i in range(0, len(Limiter.drawableData)):
pen = QPen(QColor("#26a69a"), data[i].width - 3)
if (data[i].openPrice > data[i].closePrice):
pen = QPen(QColor("#ef5350"), data[i].width - 3)
self.painter.setPen(pen)
self.painter.drawLine(
self.width - ChartPositioner.paddingHorizontal -
i * data[i].width,
self.getVerticalPosition(data[i].openPrice),
self.width - ChartPositioner.paddingHorizontal -
(i) * data[i].width,
self.getVerticalPosition(data[i].closePrice),
)
pen.setWidth(1)
self.painter.setPen(pen)
self.painter.drawLine(
self.width - ChartPositioner.paddingHorizontal -
i * data[i].width,
self.getVerticalPosition(data[i].minimalPrice),
self.width - ChartPositioner.paddingHorizontal -
i * data[i].width,
self.getVerticalPosition(data[i].maximalPrice),
)
|
<filename>src/app.module.ts
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { TypeOrmModule } from '@nestjs/typeorm';
import { TypeOrmConfigService } from './persistence/typeorm-config.service';
import { PostsModule } from './posts/posts.module';
@Module({
imports: [
ConfigModule.forRoot({
isGlobal: true,
envFilePath: '.development.env',
}),
TypeOrmModule.forRootAsync({
useClass: TypeOrmConfigService,
}),
PostsModule,
],
})
export class AppModule {}
|
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
class ItemManager<T> {
private List<T> items;
private Function<T, String> converter;
public ItemManager() {
items = new ArrayList<>();
}
public void setConverter(Function<T, String> converter) {
this.converter = converter;
}
public void addItem(T item) {
items.add(item);
}
public void removeItem(T item) {
items.remove(item);
}
public void displayItems() {
if (converter != null) {
for (T item : items) {
System.out.println(converter.apply(item));
}
} else {
for (T item : items) {
System.out.println(item.toString());
}
}
}
}
public class Main {
public static void main(String[] args) {
ItemManager<Integer> integerManager = new ItemManager<>();
integerManager.addItem(10);
integerManager.addItem(20);
integerManager.addItem(30);
// Setting a converter function to display items as strings
integerManager.setConverter(Object::toString);
integerManager.displayItems();
// Setting a custom converter function to display items as their squares
integerManager.setConverter(x -> String.valueOf(x * x));
integerManager.displayItems();
}
} |
<filename>__test__/reverse-mapping/undefined/string/output.js<gh_stars>0
var Color;
(function (Color) {
Color["Red"] = "red";
Color["Yellow"] = "yellow";
})(Color || (Color = {})); |
<filename>src/SalesforceLeadWidgetEditingConfig.js
import * as Scrivito from "scrivito";
Scrivito.provideEditingConfig("SalesforceLeadWidget", {
title: "Salesforce Lead Form",
attributes: {
agreementText: {
title: "Agreement text",
description: "Optional",
},
buttonText: {
title: "Submit button text",
description: "Default: send message",
},
backgroundColor: {
title: "Background color",
description: "Default: White",
values: [
{ value: "white", title: "White" },
{ value: "transparent", title: "Transparent" },
],
},
orgID: {
title: "Organization ID",
description: "Provided in Salesforce Web-to-lead form snippet.",
},
returnURL: {
title: "Return URL",
description: "Page you want the visitor to go to after form submission.",
},
},
properties: ["agreementText", "buttonText", "backgroundColor", "orgID", "returnURL"],
initialContent: {
buttonText: "send message",
backgroundColor: "white",
},
});
|
// Adds a new Google font
function AddFont(url, rel) {
let link = document.createElement("link")
link.rel = rel
link.href = url
if(url = "https://fonts.gstatic.com"){
link.crossOrigin
}
document.getElementsByTagName("head")[0].appendChild(link);
}
AddFont("https://fonts.googleapis.com", "preconnect");
AddFont("https://fonts.gstatic.com", "preconnect");
AddFont("https://fonts.googleapis.com/css2?family=Urbanist&display=swap", "stylesheet");
// Functions that save coding space
function CreateBasics(htmlName, classTitle, el) {
el.className = classTitle;
el.innerHTML = htmlName;
}
function AddToEl(generalEl, listOfEl) {
for (let i = 0; i < listOfEl.length; i++) {
generalEl.appendChild(listOfEl[i]);
}
}
function establishElements() {
// Create html elements for the on-page popup
let btn = document.createElement("button");
btn.addEventListener("click", HandleButtonPress);
CreateBasics("Snibbit", "gradient-button s-button", btn)
let box = document.createElement("body");
box.className = "s-box"
let bigText = document.createElement("h1");
CreateBasics("Snibbit Code", "s-title", bigText)
let logo = document.createElement("img")
logo.src = chrome.runtime.getURL("Images/logo-25x25.png")
var boxHead = document.createElement("header")
AddToEl(boxHead, [bigText, logo])
AddToEl(box, [boxHead])
let subHeader = document.createElement("small")
subHeader.className = "infotext"
subHeader.innerHTML = "Highlight a snippet"
AddToEl(box, [subHeader])
let inline = document.createElement("div");
inline.className = 'parent inline'
let search = document.createElement('input')
search.type = "text"
search.placeholder = "<title snippet>"
search.name = "search"
search.className = "child s-searchbox"
AddToEl(inline, [btn, search])
AddToEl(box, [inline])
document.getElementsByTagName("header")[0].appendChild(box)
}
establishElements()
// Stores URL, snippet content and an optinal assigned title
function HandleButtonPress() {
const selectedSnippet = GetSelectionText();
if (selectedSnippet == ""){
alert("Please highlight a snippet of text")
} else if (selectedSnippet.length > 150){
alert("Please try to save a smaller snippet")
}
else {
let siteLink = window.location.href
let snippetTitle = document.getElementsByClassName("s-searchbox")[0].value
let obj = {}
obj[selectedSnippet] = [snippetTitle, siteLink, "Snibbit"]
try {
chrome.storage.sync.set(obj);
alert("Snippet Saved")
} catch(err){
alert("Please refesh the page and try again.")
}
let grabbedKeys = findKeywords(selectedSnippet)
let allKeys = Object.keys(grabbedKeys)
for (let i = 0; i < allKeys.length; i++) {
chrome.storage.sync.get("SKey - " + allKeys[i], function(items){
var keyObj = {};
console.log(items);
if (Object.keys(items).length == 0){
keyObj["SKey - " + allKeys[i]] = grabbedKeys[allKeys[i]];
} else {
console.log("SKey - " + allKeys[i]);
keyObj["SKey - " + allKeys[i]] = grabbedKeys[allKeys[i]] + items["SKey - " + allKeys[i]];
}
chrome.storage.sync.set(keyObj);
console.log(keyObj);
})
}
}
document.getElementsByClassName("s-searchbox")[0].value = "";
}
function findKeywords(section) {
const words = section.split(" ");
let keywords = {};
let keywordsTemplate = new RegExp('[a-zA-Z_]+(([(][a-zA-Z_]*[)]){1}|([.](([a-zA-Z_*]+)([(][a-zA-Z_0-9",]+[)])?){1}))[;]?$')
for (let i = 0; i < words.length; i ++) {
if (keywordsTemplate.test(words[i])){
if (words[i] in keywords){
keywords[words[i]] += 1;
} else {
keywords[words[i]] = 1;
}
}
}
return keywords
}
function initializeKeywords(){
}
// Function to get and store highlighted text
function GetSelectionText() {
let text = "";
if (window.getSelection) {
text = window.getSelection().toString();
} else if (document.selection && document.selection.type != "Control") {
text = document.selection.createRange().text;
}
return text;
}
function addSubheader(e) {
const t = GetSelectionText();
var element = document.getElementsByClassName("infotext")[0];
element.style.WebkitTransition = 'opacity 1s';
if (t != "") {
element.style.opacity = "0";
element.style.filter = 'alpha(opacity=0)';
} else {
element.style.opacity = "1";
element.style.filter = 'alpha(opacity=80)';
}
}
document.onmouseup = addSubheader;
function checkForKeyWord(Text, keyWord){
if ((Text.textContent || Text.innerText).indexOf(keyWord) > -1) {
return true;
}
return false;
} |
// importing the required libraries
const express = require('express');
const app = express();
const Gpio = require('onoff').Gpio;
// setting the LED pin
const LED_PIN = 4;
const led = new Gpio(LED_PIN, 'out');
// setting the route handler
app.get('/api/led', (req, res) => {
let status = req.query.status;
if(status === 'on') {
led.writeSync(1);
} else if (status === 'off') {
led.writeSync(0);
}
res.send({message: 'LED Toggled'});
});
// starting the server
const port = 3000;
app.listen(port, () => console.log(`LED server running on port ${port}`); |
#!/bin/bash
#SBATCH -J Act_tanh_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py tanh 1 Adagrad 3 0.21429489256858142 385 0.011822902836239288 rnormal PE-infersent
|
fileId=1CSmiRO48iqBbAfouUwd2IBTPvrxjev1T
fileName=ckpt.pt
gdown "https://drive.google.com/uc?id=${fileId}" -O ${fileName}
|
<reponame>yicone/steedos-platform
var designer = {
urlQuery:function(name){
var reg = new RegExp("(^|&)" + name + "=([^&]*)(&|$)");
var r = window.location.search.substr(1).match(reg);
if (r != null) return unescape(r[2]);
return null;
},
run:function(){
var url = this.urlQuery("url");
url = decodeURIComponent(url);
if(url){
$("#ifrDesigner").attr("src",url);
}
var Steedos = window.parent.Steedos || null;
if (Steedos) {
Steedos.forbidNodeContextmenu(window);
}
}
};
$(function(){
designer.run();
}); |
#!/bin/bash
function printHelp() {
>&2 echo "USAGE: $0 [-r]
Untars the influxdb source tarball mounted at /influxdb-src.tar.gz,
then emits a tarball of influxdb binaries to /out,
which must be a mounted volume if you want to access the file.
Relies upon environment variables GOOS and GOARCH to determine what to build.
Respects CGO_ENABLED.
To build with race detection enabled, pass the -r flag.
"
}
RACE_FLAG=""
while getopts hr arg; do
case "$arg" in
h) printHelp; exit 1;;
r) RACE_FLAG="-race";;
esac
done
if [ -z "$GOOS" ] || [ -z "$GOARCH" ]; then
>&2 echo 'The environment variables $GOOS and $GOARCH must both be set.'
exit 1
fi
# Extract tarball into GOPATH.
tar xz -C "$GOPATH" -f /influxdb-src.tar.gz
SHA=$(jq -r .sha < "$GOPATH/src/github.com/influxdata/influxdb/.metadata.json")
SUFFIX=
if [ "$CGO_ENABLED" == "0" ]; then
# Only add the static suffix to the filename when explicitly requested.
SUFFIX=_static
elif [ -n "$RACE_FLAG" ]; then
# -race depends on cgo, so this option is exclusive from CGO_ENABLED.
SUFFIX=_race
fi
TARBALL_NAME="influxdb_bin_${GOOS}_${GOARCH}${SUFFIX}-${SHA}.tar.gz"
# note: according to https://github.com/golang/go/wiki/GoArm
# we want to support armel using GOARM=5
# and we want to support armhf using GOARM=6
# no GOARM setting is necessary for arm64
if [ $GOARCH == "armel" ]; then
GOARCH=arm
GOARM=5
fi
if [ $GOARCH == "armhf" ]; then
GOARCH=arm
GOARM=6
fi
OUTDIR=$(mktemp -d)
for cmd in \
influxdb/cmd/influxd \
influxdb/cmd/influx_stress \
influxdb/cmd/influx \
influxdb/cmd/influx_inspect \
influxdb/cmd/influx_tsm \
; do
# Build all the binaries into $OUTDIR.
# Windows binaries will get the .exe suffix as expected.
(cd "$OUTDIR" && go build $RACE_FLAG -i "github.com/influxdata/$cmd")
done
(cd "$OUTDIR" && tar czf "/out/$TARBALL_NAME" ./*)
(cd /out && md5sum "$TARBALL_NAME" > "$TARBALL_NAME.md5")
(cd /out && sha256sum "$TARBALL_NAME" > "$TARBALL_NAME.sha256")
|
<filename>crownforms-components/src/lib/designer/models/firmstep/FFFormConverter.ts
import {
ExpressionFieldValidationRuleModel,
FFConditionParser,
FieldValidationRuleModel,
IFieldModelOptions,
IRepeatableSubFormFieldOptions,
ISectionModelOptions,
ITextFieldOptions,
ITypedValueConverter,
IUnsupportedFieldModelOptions,
RegexFieldValidationRuleModel,
StringValue,
SubFormValue,
SubFormValueArrayValue,
TypedValue,
VoidValue,
DateValue,
IDateFieldOptions,
IFormModelOptions,
IHtmlFieldModelOptions,
INumberFieldOptions,
ISelectOneOptions,
NumberValue,
StringArrayValue,
ITwoChoiceFieldOptions,
ILookupModelOptions,
IFieldValidationRuleModelOptions,
IRegexFieldValidationRuleModelOptions,
IExpressionFieldValidationRuleModelOptions,
ISectionValidationRuleModelOptions,
IExpressionSectionValidationRuleModelOptions,
ILookupButtonOptions,
ISelectSeveralFieldModelOptions,
} from "../../../renderer";
import {
IFFAutoLookupFieldModel,
IFFDateFieldModel,
IFFFieldModel,
IFFFieldModelProps,
IFFFormModel,
IFFHtmlFieldModel,
IFFNumberFieldModel,
IFFSectionModel,
IFFSelectFieldModel,
IFFSubFormFieldModel,
IFFTextAreaFieldModel,
IFFTextFieldModel,
} from ".";
import {
IFFLookupModel,
IFFLookupModel_Output_template,
} from "./IFFLookupModel";
import sanitizeHtml from "sanitize-html";
const REGEX_SPLIT = /^\/(.*)\/([^/]*)$/;
export class FFFormConverter {
static convertFormOptions(source: IFFFormModel): IFormModelOptions {
return {
type: "form",
name: source.props.id,
title: source.formName,
sections: source.sections.map((s) => FFFormConverter.section(s, source)),
lookups: source.props.integrationDefinition
? Object.values(source.props.integrationDefinition).map((id) =>
FFFormConverter.lookup(id, source)
)
: [],
};
}
static lookup(
source: IFFLookupModel,
sourceForm: IFFFormModel
): ILookupModelOptions {
let sourceTemplate: IFFLookupModel_Output_template = JSON.parse(
source.Output_template
);
if (sourceTemplate.method !== "GET" && sourceTemplate.method !== "POST") {
throw new Error("Unsupported");
}
if (
sourceTemplate.responseType !== "XML" &&
sourceTemplate.responseType !== "JSON"
) {
throw new Error("Unsupported");
}
let sourceTemplateFields = JSON.parse(sourceTemplate.fields);
return {
httpMethod: sourceTemplate.method,
lookupType: "http",
type: "lookup",
name: source.ID,
url: sourceTemplate.url,
responseType: sourceTemplate.responseType,
resultPath: sourceTemplate.path_to_values,
resultFields: sourceTemplateFields,
};
}
private static textField(source: IFFTextFieldModel): ITextFieldOptions {
return {
...FFFormConverter.field("text", StringValue, "textfield", source),
width: source.props.width,
};
}
private static field<
TField extends string,
T extends TypedValue,
TControl extends string
>(
type: TField,
valueType: ITypedValueConverter<T>,
controlType: TControl,
source: IFFFieldModel
): IFieldModelOptions & { type: TField; controlType: TControl } {
const validationRules: IFieldValidationRuleModelOptions[] = [];
if (source.props.validationMask) {
const regexAndFlags =
source.props.validationMask === "_custom_regex_"
? source.props._custom_regex_
: source.props.validationMask;
const message =
source.props.validationMaskMessage &&
source.props.validationMaskMessageValue !== "Custom Regex"
? source.props.validationMaskMessage.replaceAll(
"_validation_mask_",
source.props.validationMaskMessageValue ??
`${source.props.label ?? "The value"} is invalid`
)
: `${source.props.label ?? "The value"} is invalid`;
if (regexAndFlags) {
const regex = regexAndFlags.replace(REGEX_SPLIT, "$1");
validationRules.push({
type: "regex",
errorMessage: message,
regex: regex,
} as IRegexFieldValidationRuleModelOptions);
}
}
if (source.props.validationCondition) {
validationRules.push({
type: "expression",
expression: source.props.validationCondition,
errorMessage: source.props.validationConditionMessage,
} as IExpressionFieldValidationRuleModelOptions);
}
return {
type,
controlType,
valueType: valueType.key,
name: source.props.dataName ?? "fieldnamemissing",
displayName: source.props.label,
hideLabel: source.props.labelPosition === "hideLabel",
hide: source.props.hidden ?? false,
alwaysRequired: !!source.props.mandatory,
requiredErrorMessage:
source.props.mandatoryMessage &&
source.props.mandatoryMessage !== "This field is required"
? source.props.mandatoryMessage
: null,
defaultValueExpression:
source.props.defaultValue &&
(source.props.defaultType === "specific" ||
!FFConditionParser.isValidStringExpression(source.props.defaultValue))
? FFConditionParser.getStringLiteral(source.props.defaultValue)
: source.props.defaultValue ?? null,
defaultValueFromTextExpression:
source.props.defaultValueText &&
(source.props.defaultType === "specific" ||
!FFConditionParser.isValidStringExpression(
source.props.defaultValueText
))
? FFConditionParser.getStringLiteral(source.props.defaultValueText)
: source.props.defaultValueText ?? null,
hintText: source.props.helpText ?? null,
displayCondition: source.props.displayCondition ?? null,
requiredCondition: source.props.mandatoryCondition ?? null,
metadata: {
firmstep: source,
},
validationRules,
readOnly: source.props.readOnly ?? null,
readOnlyCondition: source.props.readonlyCondition ?? null
};
}
private static unsupported(
source: IFFFieldModel,
details: string
): IUnsupportedFieldModelOptions {
return {
...FFFormConverter.field("unsupported", VoidValue, "unsupported", source),
source,
details,
};
}
static date(source: IFFDateFieldModel): IDateFieldOptions {
return {
...FFFormConverter.field("date", DateValue, "datefield", source),
};
}
private static section(
source: IFFSectionModel,
sourceForm: IFFFormModel
): ISectionModelOptions {
let validationRules : ISectionValidationRuleModelOptions[] = [];
if (source.props.validation) {
validationRules.push({
type: "expression",
expression: source.props.validation,
errorMessage: source.props.validationMessage
} as IExpressionSectionValidationRuleModelOptions)
}
return {
type: "section",
name: source.id,
displayName: source.name,
hide: false,
displayCondition: source.props.displayCondition ?? null,
validationRules,
fields: source.fields
.map((field) => {
if (field.type === "button") {
return FFFormConverter.lookupButton(
field as IFFFieldModel,
sourceForm
);
}
if (field.type === "autoLookup") {
return FFFormConverter.autoLookup(
field as IFFAutoLookupFieldModel,
sourceForm
);
}
if (field.type === "text") {
return FFFormConverter.textField(field as IFFTextFieldModel);
}
if (field.type === "textarea") {
return FFFormConverter.textArea(field as IFFTextAreaFieldModel);
}
if (field.type === "radio") {
return FFFormConverter.selectone(
field as IFFSelectFieldModel,
"radiofield"
);
}
if (field.type === "checkbox") {
if ((<IFFSelectFieldModel>field).props.listOfValues?.length === 1) {
return FFFormConverter.singleCheckbox(
field as IFFSelectFieldModel
);
}
return FFFormConverter.selectseveral(field as IFFSelectFieldModel);
}
if (field.type === "subform") {
const subFormField = field as IFFSubFormFieldModel;
if (subFormField.props.subformDefinition.sections.length !== 1) {
return FFFormConverter.unsupported(
field,
"Unsupported subform field - does not contain exactly 1 section"
);
}
if (subFormField.props.repeatable) {
return FFFormConverter.repeatableSubform(
field as IFFSubFormFieldModel
);
}
return FFFormConverter.subform(field as IFFSubFormFieldModel);
}
if (field.type === "select") {
return FFFormConverter.selectone(
field as IFFSelectFieldModel,
"selectfield"
);
}
if (field.type === "date") {
return FFFormConverter.date(field as IFFDateFieldModel);
}
if (field.type === "number") {
return FFFormConverter.number(field as IFFNumberFieldModel);
}
if (field.type === "html" || field.type === "staticText") {
return FFFormConverter.html(field as IFFHtmlFieldModel);
}
if (field.type === "line") {
return undefined;
}
return FFFormConverter.unsupported(
field,
`Unsupported field type '${field.type}'`
);
})
.filter((f) => f !== undefined) as IFieldModelOptions[],
};
}
static lookupButton(
source: IFFFieldModel,
sourceForm: IFFFormModel
): ILookupButtonOptions {
return {
...FFFormConverter.field(
"lookupButton",
VoidValue,
"lookupButton",
source
),
lookup: sourceForm.sections
.flatMap((s) =>
s.fields.filter((f): f is IFFSelectFieldModel => f.type === "select")
)
.find((f) => f.props.lookupButton === source.props.dataName)?.props
.lookup,
};
}
static autoLookup(
source: IFFAutoLookupFieldModel,
sourceForm: IFFFormModel
): ILookupButtonOptions {
return {
...FFFormConverter.field("autoLookup", VoidValue, "autoLookup", source),
lookup: source.props.lookup,
};
}
static html(source: IFFHtmlFieldModel): IHtmlFieldModelOptions {
let content = sanitizeHtml(source.props.content);
return {
...FFFormConverter.field("html", StringValue, "renderhtml", source),
defaultValueExpression: FFConditionParser.getStringLiteral(content),
};
}
static selectseveral(
source: IFFSelectFieldModel
): ISelectSeveralFieldModelOptions {
return {
...FFFormConverter.field(
"selectseveral",
StringArrayValue,
"multicheckboxfield",
source
),
...FFFormConverter.selectcommon(source, StringValue),
};
}
private static selectcommon<
TValue extends TypedValue,
TChoices extends TypedValue
>(source: IFFSelectFieldModel, choiceType: ITypedValueConverter<TChoices>) {
if (source.props.lookup) {
return {
choices: { lookup: source.props.lookup },
};
}
return {
choices:
source.props.listOfValues?.map((i) => {
return {
value: i.value,
label: i.label,
};
}) ?? [],
};
}
static selectone(
source: IFFSelectFieldModel,
controlType: "selectfield" | "radiofield"
): ISelectOneOptions {
return {
...FFFormConverter.field("selectone", StringValue, controlType, source),
nullText: source.props.selectLabel ?? null,
...FFFormConverter.selectcommon(source, StringValue),
};
}
private static repeatableSubform(
source: IFFSubFormFieldModel
): IRepeatableSubFormFieldOptions {
return {
...FFFormConverter.field(
"repeatablesubform",
SubFormValueArrayValue,
"inline",
source
),
form: FFFormConverter.convertFormOptions(source.props.subformDefinition),
};
}
static number(source: IFFNumberFieldModel): INumberFieldOptions {
return {
...FFFormConverter.field("number", NumberValue, "numberfield", source),
prefix: source.props.prefix,
suffix: source.props.suffix,
decimalPlaces: source.props.decimalPlaces,
width: source.props.width,
};
}
private static subform(source: IFFSubFormFieldModel) {
return {
...FFFormConverter.field("subform", SubFormValue, "inline", source),
form: FFFormConverter.convertFormOptions(source.props.subformDefinition),
};
}
private static textArea(source: IFFTextAreaFieldModel): ITextFieldOptions {
return {
...FFFormConverter.field("text", StringValue, "textareafield", source),
};
}
private static singleCheckbox(
source: IFFSelectFieldModel
): ITwoChoiceFieldOptions {
return {
...FFFormConverter.field(
"twochoice",
StringValue,
"checkboxfield",
source
),
trueLabel: source.props.listOfValues?.[0].label ?? "",
trueValue: source.props.listOfValues?.[0].value ?? "",
falseValue: "",
falseLabel: "",
};
}
}
|
#!/usr/bin/env bats
@test "Invoke dockerfile-lint with a working file" {
run pre-commit try-repo . dockerfile-lint --files test/fixtures/Dockerfile-fine
[ "$status" -eq 0 ]
}
@test "Invoke dockerfile-lint with a broken file" {
run pre-commit try-repo . dockerfile-lint --files test/fixtures/Dockerfile-broken
[ "$status" -eq 1 ]
[[ "${output}" == *"DL3006 Always tag the version of an image explicitly"* ]]
}
|
#!/usr/bin/env bash
# Usage: find . -iname "*.h" -or -iname "*.cc" -exec ./format-sources.sh {} \;
for f in $@
do
# apply clang-format file .clang-format in top dir
clang-format -i -style=file -fallback-style=none $f
# divider lines with 120 chars
perl -pi -e 's#^//-+$#//----------------------------------------------------------------------------------------------------------------------#g' $f
done |
<reponame>seidu626/vumi<filename>vumi/transports/smpp/smpp_transport.py
# -*- test-case-name: vumi.transports.smpp.tests.test_smpp_transport -*-
import json
import warnings
from uuid import uuid4
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue, succeed
from smpp.pdu import decode_pdu
from smpp.pdu_builder import PDU
from vumi.message import TransportUserMessage
from vumi.persist.txredis_manager import TxRedisManager
from vumi.transports.base import Transport
from vumi.transports.smpp.config import SmppTransportConfig
from vumi.transports.smpp.deprecated.transport import (
SmppTransportConfig as OldSmppTransportConfig)
from vumi.transports.smpp.deprecated.utils import convert_to_new_config
from vumi.transports.smpp.smpp_service import SmppService
from vumi.transports.failures import FailureMessage
def sequence_number_key(seq_no):
return 'sequence_number:%s' % (seq_no,)
def multipart_info_key(seq_no):
return 'multipart_info:%s' % (seq_no,)
def message_key(message_id):
return 'message:%s' % (message_id,)
def pdu_key(seq_no):
return 'pdu:%s' % (seq_no,)
def remote_message_key(message_id):
return 'remote_message:%s' % (message_id,)
class CachedPDU(object):
"""
A cached PDU with its associated vumi message_id.
"""
def __init__(self, vumi_message_id, pdu):
self.vumi_message_id = vumi_message_id
self.pdu = pdu
self.seq_no = pdu.obj['header']['sequence_number']
@classmethod
def from_json(cls, pdu_json):
if pdu_json is None:
return None
pdu_data = json.loads(pdu_json)
pdu = PDU(None, None, None)
pdu.obj = decode_pdu(pdu_data['pdu'])
return cls(pdu_data['vumi_message_id'], pdu)
def to_json(self):
return json.dumps({
'vumi_message_id': self.vumi_message_id,
# We store the PDU in wire format to avoid json encoding troubles.
'pdu': self.pdu.get_hex(),
})
class SmppMessageDataStash(object):
"""
Stash message data in Redis.
"""
def __init__(self, redis, config):
self.redis = redis
self.config = config
def init_multipart_info(self, message_id, part_count):
key = multipart_info_key(message_id)
expiry = self.config.submit_sm_expiry
d = self.redis.hmset(key, {
'parts': part_count,
})
d.addCallback(lambda _: self.redis.expire(key, expiry))
return d
def get_multipart_info(self, message_id):
key = multipart_info_key(message_id)
return self.redis.hgetall(key)
def _update_multipart_info_success_cb(self, mp_info, key, remote_id):
if not mp_info:
# No multipart data, so do nothing.
return
part_key = 'part:%s' % (remote_id,)
mp_info[part_key] = 'ack'
d = self.redis.hset(key, part_key, 'ack')
d.addCallback(lambda _: mp_info)
return d
def update_multipart_info_success(self, message_id, remote_id):
key = multipart_info_key(message_id)
d = self.get_multipart_info(message_id)
d.addCallback(self._update_multipart_info_success_cb, key, remote_id)
return d
def _update_multipart_info_failure_cb(self, mp_info, key, remote_id):
if not mp_info:
# No multipart data, so do nothing.
return
part_key = 'part:%s' % (remote_id,)
mp_info[part_key] = 'fail'
d = self.redis.hset(key, part_key, 'fail')
d.addCallback(lambda _: self.redis.hset(key, 'event_result', 'fail'))
d.addCallback(lambda _: mp_info)
return d
def update_multipart_info_failure(self, message_id, remote_id):
key = multipart_info_key(message_id)
d = self.get_multipart_info(message_id)
d.addCallback(self._update_multipart_info_failure_cb, key, remote_id)
return d
def _determine_multipart_event_cb(self, mp_info, message_id, event_type,
remote_id):
if not mp_info:
# We don't seem to have a multipart message, so just return the
# single-message data.
return (True, event_type, remote_id)
part_status_dict = dict(
(k[5:], v) for k, v in mp_info.items() if k.startswith('part:'))
remote_id = ','.join(sorted(part_status_dict.keys()))
event_result = mp_info.get('event_result', None)
if event_result is not None:
# We already have a result, even if we don't have all the parts.
event_type = event_result
elif len(part_status_dict) >= int(mp_info['parts']):
# We have all the parts, so we can determine the event type.
if all(pv == 'ack' for pv in part_status_dict.values()):
# All parts happy.
event_type = 'ack'
else:
# At least one part failed.
event_type = 'fail'
else:
# We don't have all the parts yet.
return (False, None, None)
# There's a race condition when we process multiple submit_sm_resps for
# parts of the same messages concurrently. We only want to send one
# event, so we do an atomic increment and ignore the event if we're
# not the first to succeed.
d = self.redis.hincrby(
multipart_info_key(message_id), 'event_counter', 1)
def confirm_multipart_event_cb(counter_value):
if int(counter_value) == 1:
return (True, event_type, remote_id)
else:
return (False, None, None)
d.addCallback(confirm_multipart_event_cb)
return d
def get_multipart_event_info(self, message_id, event_type, remote_id):
d = self.get_multipart_info(message_id)
d.addCallback(
self._determine_multipart_event_cb, message_id, event_type,
remote_id)
return d
def expire_multipart_info(self, message_id):
"""
Set the TTL on multipart info hash to something small. We don't delete
this in case there's still an in-flight operation that will recreate it
without a TTL.
"""
expiry = self.config.completed_multipart_info_expiry
return self.redis.expire(multipart_info_key(message_id), expiry)
def set_sequence_number_message_id(self, sequence_number, message_id):
key = sequence_number_key(sequence_number)
expiry = self.config.submit_sm_expiry
return self.redis.setex(key, expiry, message_id)
def get_sequence_number_message_id(self, sequence_number):
return self.redis.get(sequence_number_key(sequence_number))
def delete_sequence_number_message_id(self, sequence_number):
return self.redis.delete(sequence_number_key(sequence_number))
def cache_message(self, message):
key = message_key(message['message_id'])
expiry = self.config.submit_sm_expiry
return self.redis.setex(key, expiry, message.to_json())
def get_cached_message(self, message_id):
d = self.redis.get(message_key(message_id))
d.addCallback(lambda json_data: (
TransportUserMessage.from_json(json_data)
if json_data else None))
return d
def delete_cached_message(self, message_id):
return self.redis.delete(message_key(message_id))
def cache_pdu(self, vumi_message_id, pdu):
cached_pdu = CachedPDU(vumi_message_id, pdu)
key = pdu_key(cached_pdu.seq_no)
expiry = self.config.submit_sm_expiry
return self.redis.setex(key, expiry, cached_pdu.to_json())
def get_cached_pdu(self, seq_no):
d = self.redis.get(pdu_key(seq_no))
return d.addCallback(CachedPDU.from_json)
def delete_cached_pdu(self, seq_no):
return self.redis.delete(pdu_key(seq_no))
def set_remote_message_id(self, message_id, smpp_message_id):
if message_id is None:
# If we store None, we end up with the string "None" in Redis. This
# confuses later lookups (which treat any non-None value as a valid
# identifier) and results in broken delivery reports.
return succeed(None)
key = remote_message_key(smpp_message_id)
expire = self.config.third_party_id_expiry
d = self.redis.setex(key, expire, message_id)
d.addCallback(lambda _: message_id)
return d
def get_internal_message_id(self, smpp_message_id):
return self.redis.get(remote_message_key(smpp_message_id))
def delete_remote_message_id(self, smpp_message_id):
key = remote_message_key(smpp_message_id)
return self.redis.delete(key)
def expire_remote_message_id(self, smpp_message_id):
key = remote_message_key(smpp_message_id)
expire = self.config.final_dr_third_party_id_expiry
return self.redis.expire(key, expire)
class SmppTransceiverTransport(Transport):
CONFIG_CLASS = SmppTransportConfig
bind_type = 'TRX'
clock = reactor
start_message_consumer = False
service = None
redis = None
@property
def throttled(self):
return self.service.throttled
@inlineCallbacks
def setup_transport(self):
yield self.publish_status_starting()
config = self.get_static_config()
self.log.msg(
'Starting SMPP Transport for: %s' % (config.twisted_endpoint,))
default_prefix = '%s@%s' % (config.system_id,
config.transport_name)
redis_prefix = config.split_bind_prefix or default_prefix
self.redis = (yield TxRedisManager.from_config(
config.redis_manager)).sub_manager(redis_prefix)
self.dr_processor = config.delivery_report_processor(
self, config.delivery_report_processor_config)
self.deliver_sm_processor = config.deliver_short_message_processor(
self, config.deliver_short_message_processor_config)
self.submit_sm_processor = config.submit_short_message_processor(
self, config.submit_short_message_processor_config)
self.disable_ack = config.disable_ack
self.disable_delivery_report = config.disable_delivery_report
self.message_stash = SmppMessageDataStash(self.redis, config)
self.service = self.start_service()
def start_service(self):
config = self.get_static_config()
service = SmppService(config.twisted_endpoint, self.bind_type, self)
service.clock = self.clock
service.startService()
return service
@inlineCallbacks
def teardown_transport(self):
if self.service:
yield self.service.stopService()
if self.redis:
yield self.redis._close()
def _check_address_valid(self, message, field):
try:
message[field].encode('ascii')
except UnicodeError:
return False
return True
def _reject_for_invalid_address(self, message, field):
return self.publish_nack(
message['message_id'], u'Invalid %s: %s' % (field, message[field]))
@inlineCallbacks
def on_smpp_binding(self):
yield self.publish_status_binding()
@inlineCallbacks
def on_smpp_unbinding(self):
yield self.publish_status_unbinding()
@inlineCallbacks
def on_smpp_bind(self):
yield self.publish_status_bound()
if self.throttled:
yield self.publish_throttled()
@inlineCallbacks
def on_throttled(self):
yield self.publish_throttled()
@inlineCallbacks
def on_throttled_resume(self):
yield self.publish_throttled()
@inlineCallbacks
def on_throttled_end(self):
yield self.publish_throttled_end()
@inlineCallbacks
def on_smpp_bind_timeout(self):
yield self.publish_status_bind_timeout()
@inlineCallbacks
def on_connection_lost(self, reason):
yield self.publish_status_connection_lost(reason)
def publish_status_starting(self):
return self.publish_status(
status='down',
component='smpp',
type='starting',
message='Starting')
def publish_status_binding(self):
return self.publish_status(
status='down',
component='smpp',
type='binding',
message='Binding')
def publish_status_unbinding(self):
return self.publish_status(
status='down',
component='smpp',
type='unbinding',
message='Unbinding')
def publish_status_bound(self):
return self.publish_status(
status='ok',
component='smpp',
type='bound',
message='Bound')
def publish_throttled(self):
return self.publish_status(
status='degraded',
component='smpp',
type='throttled',
message='Throttled')
def publish_throttled_end(self):
return self.publish_status(
status='ok',
component='smpp',
type='throttled_end',
message='No longer throttled')
def publish_status_bind_timeout(self):
return self.publish_status(
status='down',
component='smpp',
type='bind_timeout',
message='Timed out awaiting bind')
def publish_status_connection_lost(self, reason):
return self.publish_status(
status='down',
component='smpp',
type='connection_lost',
message=str(reason.value))
@inlineCallbacks
def handle_outbound_message(self, message):
if not self._check_address_valid(message, 'to_addr'):
yield self._reject_for_invalid_address(message, 'to_addr')
return
if not self._check_address_valid(message, 'from_addr'):
yield self._reject_for_invalid_address(message, 'from_addr')
return
yield self.message_stash.cache_message(message)
yield self.submit_sm_processor.handle_outbound_message(
message, self.service)
@inlineCallbacks
def process_submit_sm_event(self, message_id, event_type, remote_id,
command_status):
if event_type == 'ack':
yield self.message_stash.delete_cached_message(message_id)
yield self.message_stash.expire_multipart_info(message_id)
if not self.disable_ack:
yield self.publish_ack(message_id, remote_id)
else:
if event_type != 'fail':
self.log.warning(
"Unexpected multipart event type %r, assuming 'fail'" % (
event_type,))
err_msg = yield self.message_stash.get_cached_message(message_id)
command_status = command_status or 'Unspecified'
if err_msg is None:
self.log.warning(
"Could not retrieve failed message: %s" % (message_id,))
else:
yield self.message_stash.delete_cached_message(message_id)
yield self.message_stash.expire_multipart_info(message_id)
yield self.publish_nack(message_id, command_status)
yield self.failure_publisher.publish_message(
FailureMessage(message=err_msg.payload,
failure_code=None,
reason=command_status))
@inlineCallbacks
def handle_submit_sm_success(self, message_id, smpp_message_id,
command_status):
yield self.message_stash.update_multipart_info_success(
message_id, smpp_message_id)
event_info = yield self.message_stash.get_multipart_event_info(
message_id, 'ack', smpp_message_id)
event_required, event_type, remote_id = event_info
if event_required:
yield self.process_submit_sm_event(
message_id, event_type, remote_id, command_status)
@inlineCallbacks
def handle_submit_sm_failure(self, message_id, smpp_message_id,
command_status):
yield self.message_stash.update_multipart_info_failure(
message_id, smpp_message_id)
event_info = yield self.message_stash.get_multipart_event_info(
message_id, 'fail', smpp_message_id)
event_required, event_type, remote_id = event_info
if event_required:
yield self.process_submit_sm_event(
message_id, event_type, remote_id, command_status)
def handle_raw_inbound_message(self, **kwargs):
# TODO: drop the kwargs, list the allowed key word arguments
# explicitly with sensible defaults.
message_type = kwargs.get('message_type', 'sms')
message = {
'message_id': uuid4().hex,
'to_addr': kwargs['destination_addr'],
'from_addr': kwargs['source_addr'],
'content': kwargs['short_message'],
'transport_type': message_type,
'transport_metadata': {},
}
if message_type == 'ussd':
session_event = {
'new': TransportUserMessage.SESSION_NEW,
'continue': TransportUserMessage.SESSION_RESUME,
'close': TransportUserMessage.SESSION_CLOSE,
}[kwargs['session_event']]
message['session_event'] = session_event
session_info = kwargs.get('session_info')
message['transport_metadata']['session_info'] = session_info
# TODO: This logs messages that fail to serialize to JSON
# Usually this happens when an SMPP message has content
# we can't decode (e.g. data_coding == 4). We should
# remove the try-except once we handle such messages
# better.
return self.publish_message(**message).addErrback(self.log.err)
@inlineCallbacks
def handle_delivery_report(
self, receipted_message_id, delivery_status,
smpp_delivery_status):
message_id = yield self.message_stash.get_internal_message_id(
receipted_message_id)
if message_id is None:
self.log.info(
"Failed to retrieve message id for delivery report."
" Delivery report from %s discarded."
% self.transport_name)
return
if self.disable_delivery_report:
dr = None
else:
dr = yield self.publish_delivery_report(
user_message_id=message_id,
delivery_status=delivery_status,
transport_metadata={
'smpp_delivery_status': smpp_delivery_status,
})
if delivery_status in ('delivered', 'failed'):
yield self.message_stash.expire_remote_message_id(
receipted_message_id)
returnValue(dr)
class SmppReceiverTransport(SmppTransceiverTransport):
bind_type = 'RX'
class SmppTransmitterTransport(SmppTransceiverTransport):
bind_type = 'TX'
class SmppTransceiverTransportWithOldConfig(SmppTransceiverTransport):
CONFIG_CLASS = OldSmppTransportConfig
NEW_CONFIG_CLASS = SmppTransportConfig
def __init__(self, *args, **kwargs):
super(SmppTransceiverTransportWithOldConfig, self).__init__(*args,
**kwargs)
warnings.warn(
'This is a transport using a deprecated config file. '
'Please use the new SmppTransceiverTransport, '
'SmppTransmitterTransport or SmppReceiverTransport '
'with the new processor aware SmppTransportConfig.',
category=PendingDeprecationWarning)
def get_static_config(self):
# return if cached
if hasattr(self, '_converted_static_config'):
return self._converted_static_config
cfg = super(
SmppTransceiverTransportWithOldConfig, self).get_static_config()
original = cfg._config_data.copy()
config = convert_to_new_config(
original,
'vumi.transports.smpp.processors.DeliveryReportProcessor',
'vumi.transports.smpp.processors.SubmitShortMessageProcessor',
'vumi.transports.smpp.processors.DeliverShortMessageProcessor'
)
self._converted_static_config = self.NEW_CONFIG_CLASS(
config, static=True)
return self._converted_static_config
|
<html>
<head>
<title>Realtime Stock Price Visualization</title>
<script src="https://d3js.org/d3.v5.min.js"></script>
</head>
<body>
<div id="chart" style="width: 700px; height: 500px;"></div>
<script>
d3.select("#chart").append("svg")
.attr("width", "700")
.attr("height", "500");
function getDataFromApi() {
//fetch data for price changes from API
}
// Create D3 Visualization
function createGraph(data) {
console.log(data);
}
// Call API
getDataFromApi();
</script>
</body>
</html> |
# Example configuration file
# Rename it to .env
# Generate token at https://github.com/settings/tokens
GITHUB_TOKEN=1234567890123456789012345678901234567890
|
<filename>src/AplPingPong.java
import utilidades.Semaforo;
class OPing extends Thread {
Semaforo SPing,SPong;
public OPing(Semaforo SPing,Semaforo SPong){
this.SPing=SPing;
this.SPong=SPong;
}
public void run(){
for(int i=0 ; i<50;i++){
SPing.Espera();
System.out.println("Ping ");
SPong.Libera();
}
}
}
class OPong extends Thread{
Semaforo SPing,SPong;
public OPong(Semaforo SPing,Semaforo SPong){
this.SPing=SPing;
this.SPong=SPong;
}
public void run(){
for(int i=0 ; i<50;i++){
SPong.Espera();
System.out.println("Pong ");
SPing.Libera();
}
}
}
public class AplPingPong {
public static void main(String [] a){
Semaforo SPing=new Semaforo(0);
Semaforo SPong=new Semaforo(1);
OPing ObjPing=new OPing(SPing,SPong);
OPong ObjPong=new OPong(SPing,SPong);
ObjPing.start();
ObjPong.start();
}
}
|
#!/bin/bash
# Package Install
apt-get install -y openssh-server
apt-get install -y openssh-client
apt-get install -y sshpass
apt-get install -y net-tools
apt-get install -y bridge-utils
apt-get install -y arptables
apt-get install -y iproute
apt-get install -y iproute2
apt-get install -y curl
apt-get install -y nfs-common
DEBIAN_FRONTEND=noninteractive apt-get install -y procmail
apt-get install -y socat
apt-get install -y websockify
## Container
apt-get install -y lxc
## VM
apt-get install -y expect
apt-get install -y qemu-kvm
apt-get install -y genisoimage
apt-get install -y cloud-utils
apt-get install -y openvpn
## Quota
apt-get install -y quota
|
#!/bin/bash
exec /entry.sh /usr/sbin/sshd -D -e -f/etc/ssh/sshd_config >/tmp/output/sshd_log 2>&1
|
import subprocess
import signal
import os
def runCmdTimeout(cmd, sol, size):
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, _ = process.communicate(timeout=size)
return output.decode('utf-8')
except subprocess.TimeoutExpired:
return "Command timed out"
def runCNF(prog, bin_dir, data_dir):
my_data = os.path.join(data_dir, "cnf")
sol = "sat-" + prog
runCmdTimeout([os.path.join(bin_dir, prog)], sol, 10) # Example timeout of 10 seconds |
function parseJSON(obj) {
const { name, age } = obj;
const { street, city, state } = obj.address;
return {
name,
age,
street,
city,
state
};
}
const parsedObj = parseJSON({
"name": "John Smith",
"age": 33,
"address": {
"street": "123 Main Street",
"city": "New York City",
"state": "New York"
}
}); |
<gh_stars>0
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.invariants.formula;
import com.google.common.base.Preconditions;
/**
* Instances of this class represent logical negations of invariants formulae.
*/
public class LogicalNot<ConstantType> implements BooleanFormula<ConstantType> {
/** The formula logically negated by this formula. */
private final BooleanFormula<ConstantType> negatedFormula;
/**
* Creates a new logical negation of the given formula.
*
* @param pToNegate the formula to logically negate.
*/
private LogicalNot(BooleanFormula<ConstantType> pToNegate) {
Preconditions.checkNotNull(pToNegate);
this.negatedFormula = pToNegate;
}
/**
* The formula logically negated by this formula.
*
* @return the formula logically negated by this formula.
*/
public BooleanFormula<ConstantType> getNegated() {
return this.negatedFormula;
}
@Override
public String toString() {
BooleanFormula<ConstantType> negated = getNegated();
if (negated instanceof LogicalNot) {
return ((LogicalNot<?>) negated).getNegated().toString();
}
if (negated instanceof Equal<?>) {
Equal<?> equation = (Equal<?>) negated;
return String.format("(%s != %s)",
equation.getOperand1(), equation.getOperand2());
}
if (negated instanceof LessThan<?>) {
LessThan<?> lessThan = (LessThan<?>) negated;
return String.format("(%s >= %s)",
lessThan.getOperand1(), lessThan.getOperand2());
}
if (negated instanceof LogicalAnd<?>) {
LogicalAnd<?> and = (LogicalAnd<?>) negated;
final String left;
if (and.getOperand1() instanceof LogicalNot) {
left = ((LogicalNot<?>) and.getOperand1()).getNegated().toString();
} else {
left = String.format("(!%s)", and.getOperand1());
}
final String right;
if (and.getOperand2() instanceof LogicalNot) {
right = ((LogicalNot<?>) and.getOperand2()).getNegated().toString();
} else {
right = String.format("(!%s)", and.getOperand2());
}
return String.format("(%s || %s)", left, right);
}
return String.format("(!%s)", negated);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o instanceof LogicalNot) {
return getNegated().equals(((LogicalNot<?>) o).getNegated());
}
return false;
}
@Override
public int hashCode() {
return -getNegated().hashCode();
}
@Override
public <ReturnType> ReturnType accept(BooleanFormulaVisitor<ConstantType, ReturnType> pVisitor) {
return pVisitor.visit(this);
}
@Override
public <ReturnType, ParamType> ReturnType accept(
ParameterizedBooleanFormulaVisitor<ConstantType, ParamType, ReturnType> pVisitor, ParamType pParameter) {
return pVisitor.visit(this, pParameter);
}
/**
* Gets an invariants formula representing the logical negation of the given
* operand.
*
* @param pToNegate the invariants formula to negate.
*
* @return an invariants formula representing the logical negation of the given
* operand.
*/
static <ConstantType> LogicalNot<ConstantType> of(BooleanFormula<ConstantType> pToNegate) {
return new LogicalNot<>(pToNegate);
}
}
|
<reponame>huangbin082/Bin<filename>Algorithm/src/test/java/com/leetcode/Solution_7Test.java
package com.leetcode;
import org.testng.annotations.Test;
public class Solution_7Test {
@Test
public void testReverse() {
Solution_7 solution_7 = new Solution_7();
System.out.println(solution_7.reverse(-2147483648));
}
} |
/*
* Copyright (c) 2019-2020 GeyserMC. http://geysermc.org
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @author GeyserMC
* @link https://github.com/GeyserMC/Geyser
*/
package org.geysermc.connector.network.translators.inventory;
import com.github.steveice10.mc.protocol.data.game.window.WindowType;
import com.nukkitx.protocol.bedrock.data.inventory.ContainerType;
import com.nukkitx.protocol.bedrock.data.inventory.InventoryActionData;
import lombok.AllArgsConstructor;
import org.geysermc.connector.inventory.Inventory;
import org.geysermc.connector.network.session.GeyserSession;
import org.geysermc.connector.network.translators.inventory.updater.ContainerInventoryUpdater;
import org.geysermc.connector.network.translators.inventory.updater.InventoryUpdater;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@AllArgsConstructor
public abstract class InventoryTranslator {
public static final Map<WindowType, InventoryTranslator> INVENTORY_TRANSLATORS = new HashMap<WindowType, InventoryTranslator>() {
{
put(null, new PlayerInventoryTranslator()); //player inventory
put(WindowType.GENERIC_9X1, new SingleChestInventoryTranslator(9));
put(WindowType.GENERIC_9X2, new SingleChestInventoryTranslator(18));
put(WindowType.GENERIC_9X3, new SingleChestInventoryTranslator(27));
put(WindowType.GENERIC_9X4, new DoubleChestInventoryTranslator(36));
put(WindowType.GENERIC_9X5, new DoubleChestInventoryTranslator(45));
put(WindowType.GENERIC_9X6, new DoubleChestInventoryTranslator(54));
put(WindowType.BREWING_STAND, new BrewingInventoryTranslator());
put(WindowType.ANVIL, new AnvilInventoryTranslator());
put(WindowType.CRAFTING, new CraftingInventoryTranslator());
//put(WindowType.GRINDSTONE, new GrindstoneInventoryTranslator()); //FIXME
put(WindowType.MERCHANT, new MerchantInventoryTranslator());
//put(WindowType.SMITHING, new SmithingInventoryTranslator()); //TODO for server authoritative inventories
InventoryTranslator furnace = new FurnaceInventoryTranslator();
put(WindowType.FURNACE, furnace);
put(WindowType.BLAST_FURNACE, furnace);
put(WindowType.SMOKER, furnace);
InventoryUpdater containerUpdater = new ContainerInventoryUpdater();
put(WindowType.ENCHANTMENT, new EnchantmentInventoryTranslator(containerUpdater)); //TODO
put(WindowType.GENERIC_3X3, new BlockInventoryTranslator(9, "minecraft:dispenser[facing=north,triggered=false]", ContainerType.DISPENSER, containerUpdater));
put(WindowType.HOPPER, new BlockInventoryTranslator(5, "minecraft:hopper[enabled=false,facing=down]", ContainerType.HOPPER, containerUpdater));
put(WindowType.SHULKER_BOX, new BlockInventoryTranslator(27, "minecraft:shulker_box[facing=north]", ContainerType.CONTAINER, containerUpdater));
//put(WindowType.BEACON, new BlockInventoryTranslator(1, "minecraft:beacon", ContainerType.BEACON)); //TODO
}
};
public final int size;
public abstract void prepareInventory(GeyserSession session, Inventory inventory);
public abstract void openInventory(GeyserSession session, Inventory inventory);
public abstract void closeInventory(GeyserSession session, Inventory inventory);
public abstract void updateProperty(GeyserSession session, Inventory inventory, int key, int value);
public abstract void updateInventory(GeyserSession session, Inventory inventory);
public abstract void updateSlot(GeyserSession session, Inventory inventory, int slot);
public abstract int bedrockSlotToJava(InventoryActionData action);
public abstract int javaSlotToBedrock(int slot);
public abstract SlotType getSlotType(int javaSlot);
public abstract void translateActions(GeyserSession session, Inventory inventory, List<InventoryActionData> actions);
}
|
from typing import List
class UserManagementSystem:
def __init__(self):
self.users = {}
self.contexts = {}
def add_user(self, name: str):
user_id = len(self.users) + 1
self.users[user_id] = name
def add_context(self, user_id: int, context: str):
if user_id in self.contexts:
self.contexts[user_id].append(context)
else:
self.contexts[user_id] = [context]
def get_users(self) -> List[str]:
return list(self.users.values())
def get_contexts(self, user_id: int) -> List[str]:
return self.contexts.get(user_id, []) |
import React from 'react';
import compose from 'recompose/compose';
import pure from 'recompose/pure';
import withHandlers from 'recompose/withHandlers';
import Panel from 'mtk-ui/lib/Panel';
import PanelHeader from 'mtk-ui/lib/PanelHeader';
import PanelIcon from 'mtk-ui/lib/PanelIcon';
import InputGroup from 'mtk-ui/lib/InputGroup';
import InputText from 'mtk-ui/lib/InputText';
import Button from 'mtk-ui/lib/Button';
import IconDevice from 'mcs-lite-icon/lib/IconDevice';
import IconSearch from 'mcs-lite-icon/lib/IconSearch';
import { withGetMessages } from 'react-intl-inject-hoc';
import messages from '../messages';
import styles from './styles.css';
const PanelHeaderLayout = ({
filterKey,
onInputTextChange,
onSearch,
getMessages: t,
}) => (
<div className={styles.base}>
<Panel>
<PanelHeader>
<PanelIcon icon={<IconDevice size={24} />} />
<div className={styles.content}>
{t('testDeviceList')}
<InputGroup className={styles.searchGroup}>
<InputText
value={filterKey}
onChange={onInputTextChange}
placeholder={t('search')}
/>
<Button
className={styles.searchButton}
onClick={onSearch}
>
<IconSearch size={18} />
</Button>
</InputGroup>
</div>
</PanelHeader>
</Panel>
</div>
);
export default compose(
pure,
withGetMessages(messages, 'Devices'),
withHandlers({
onInputTextChange: props => e => props.setFilterKey(e.target.value),
}),
)(PanelHeaderLayout);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.