text stringlengths 1 1.05M |
|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.metadata;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
/**
* Args for data Lineage.
*/
public enum LineageArgs {
// process instance
NOMINAL_TIME("nominalTime", "instance time"),
ENTITY_TYPE("entityType", "type of the entity"),
ENTITY_NAME("entityName", "name of the entity"),
TIMESTAMP("timeStamp", "current timestamp"),
// where
CLUSTER("cluster", "name of the current cluster"),
OPERATION("operation", "operation like generate, delete, replicate"),
// who
WORKFLOW_USER("workflowUser", "user who owns the feed instance (partition)"),
// workflow details
WORKFLOW_ID("workflowId", "current workflow-id of the instance"),
RUN_ID("runId", "current run-id of the instance"),
STATUS("status", "status of the user workflow isnstance"),
WF_ENGINE_URL("workflowEngineUrl", "url of workflow engine server, ex:oozie"),
USER_SUBFLOW_ID("subflowId", "external id of user workflow"),
USER_WORKFLOW_ENGINE("userWorkflowEngine", "user workflow engine type"),
USER_WORKFLOW_NAME("userWorkflowName", "user workflow name"),
USER_WORKFLOW_VERSION("userWorkflowVersion", "user workflow version"),
// what inputs
INPUT_FEED_NAMES("falconInputFeeds", "name of the feeds which are used as inputs"),
INPUT_FEED_PATHS("falconInputPaths", "comma separated input feed instance paths"),
// what outputs
FEED_NAMES("feedNames", "name of the feeds which are generated/replicated/deleted"),
FEED_INSTANCE_PATHS("feedInstancePaths", "comma separated feed instance paths"),
// lineage data recorded
LOG_DIR("logDir", "log dir where lineage can be recorded");
private String name;
private String description;
LineageArgs(String name, String description) {
this.name = name;
this.description = description;
}
public Option getOption() {
return new Option(this.name, true, this.description);
}
public String getOptionName() {
return this.name;
}
public String getOptionValue(CommandLine cmd) {
return cmd.getOptionValue(this.name);
}
}
|
<filename>app/models/user.rb
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0), not null
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# provider :string(255)
# uid :string(255)
# role :integer
# avatar :text(65535)
#
class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
has_many :donations
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable,
:omniauthable, :omniauth_providers => [:facebook]
enum role: {reg_user: 0, admin: 1}
after_initialize :init
def init
self.role ||= 0 #will set the default value only if it's nil
end
def self.from_omniauth(auth)
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.provider = auth.provider
user.uid = auth.uid
user.email = auth.info.email
user.avatar = auth.info.image
user.password = <PASSWORD>ise.friendly_token[0,20]
end
end
def repr
return self.email
end
end
|
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
# Note: this was copied from hack/lib/version.sh and then adapted
# Changes from original:
# variables renamed:
# KUBE_* -> CATALOG_*
# kube -> catalog
# KUBE_ROOT -> ROOT
# KUBE_GO_PACKAGE -> SC_GO_PACKAGE
# added get_ldflags for use in Makefile
# -----------------------------------------------------------------------------
# Version management helpers. These functions help to set, save and load the
# following variables:
#
# CATALOG_GIT_COMMIT - The git commit id corresponding to this
# source code.
# CATALOG_GIT_TREE_STATE - "clean" indicates no changes since the git commit id
# "dirty" indicates source code changes after the git commit id
# CATALOG_GIT_VERSION - "vX.Y" used to indicate the last release version.
# CATALOG_GIT_MAJOR - The major part of the version
# CATALOG_GIT_MINOR - The minor component of the version
# Grovels through git to set a set of env variables.
#
# If CATALOG_GIT_VERSION_FILE, this function will load from that file instead of
# querying git.
catalog::version::get_version_vars() {
if [[ -n ${CATALOG_GIT_VERSION_FILE-} ]]; then
catalog::version::load_version_vars "${CATALOG_GIT_VERSION_FILE}"
return
fi
local git=(git --work-tree "${ROOT}")
if [[ -n ${CATALOG_GIT_COMMIT-} ]] || CATALOG_GIT_COMMIT=$("${git[@]}" rev-parse "HEAD^{commit}" 2>/dev/null); then
if [[ -z ${CATALOG_GIT_TREE_STATE-} ]]; then
# Check if the tree is dirty. default to dirty
if git_status=$("${git[@]}" status --porcelain 2>/dev/null) && [[ -z ${git_status} ]]; then
CATALOG_GIT_TREE_STATE="clean"
else
CATALOG_GIT_TREE_STATE="dirty"
fi
fi
# Use git describe to find the version based on annotated tags.
if [[ -n ${CATALOG_GIT_VERSION-} ]] || CATALOG_GIT_VERSION=$("${git[@]}" describe --tags --abbrev=14 "${CATALOG_GIT_COMMIT}^{commit}" 2>/dev/null); then
# This translates the "git describe" to an actual semver.org
# compatible semantic version that looks something like this:
# v1.1.0-alpha.0.6+84c76d1142ea4d
#
# TODO: We continue calling this "git version" because so many
# downstream consumers are expecting it there.
DASHES_IN_VERSION=$(echo "${CATALOG_GIT_VERSION}" | sed "s/[^-]//g")
if [[ "${DASHES_IN_VERSION}" == "---" ]] ; then
# We have distance to subversion (v1.1.0-subversion-1-gCommitHash)
CATALOG_GIT_VERSION=$(echo "${CATALOG_GIT_VERSION}" | sed "s/-\([0-9]\{1,\}\)-g\([0-9a-f]\{14\}\)$/.\1\+\2/")
elif [[ "${DASHES_IN_VERSION}" == "--" ]] ; then
# We have distance to base tag (v1.1.0-1-gCommitHash)
CATALOG_GIT_VERSION=$(echo "${CATALOG_GIT_VERSION}" | sed "s/-g\([0-9a-f]\{14\}\)$/+\1/")
fi
if [[ "${CATALOG_GIT_TREE_STATE}" == "dirty" ]]; then
# git describe --dirty only considers changes to existing files, but
# that is problematic since new untracked .go files affect the build,
# so use our idea of "dirty" from git status instead.
CATALOG_GIT_VERSION+="-dirty"
fi
# Try to match the "git describe" output to a regex to try to extract
# the "major" and "minor" versions and whether this is the exact tagged
# version or whether the tree is between two tagged versions.
if [[ "${CATALOG_GIT_VERSION}" =~ ^v([0-9]+)\.([0-9]+)(\.[0-9]+)?([-].*)?$ ]]; then
CATALOG_GIT_MAJOR=${BASH_REMATCH[1]}
CATALOG_GIT_MINOR=${BASH_REMATCH[2]}
if [[ -n "${BASH_REMATCH[4]}" ]]; then
CATALOG_GIT_MINOR+="+"
fi
fi
fi
fi
}
# Saves the environment flags to $1
catalog::version::save_version_vars() {
local version_file=${1-}
[[ -n ${version_file} ]] || {
echo "!!! Internal error. No file specified in catalog::version::save_version_vars"
return 1
}
cat <<EOF >"${version_file}"
CATALOG_GIT_COMMIT='${CATALOG_GIT_COMMIT-}'
CATALOG_GIT_TREE_STATE='${CATALOG_GIT_TREE_STATE-}'
CATALOG_GIT_VERSION='${CATALOG_GIT_VERSION-}'
CATALOG_GIT_MAJOR='${CATALOG_GIT_MAJOR-}'
CATALOG_GIT_MINOR='${CATALOG_GIT_MINOR-}'
EOF
}
# Loads up the version variables from file $1
catalog::version::load_version_vars() {
local version_file=${1-}
[[ -n ${version_file} ]] || {
echo "!!! Internal error. No file specified in catalog::version::load_version_vars"
return 1
}
source "${version_file}"
}
catalog::version::ldflag() {
local key=${1}
local val=${2}
# If you update these, also update the list pkg/version/def.bzl.
echo "-X ${SC_GO_PACKAGE}/pkg/version.${key}=${val}"
}
# Prints the value that needs to be passed to the -ldflags parameter of go build
# in order to set the Kubernetes based on the git tree status.
# IMPORTANT: if you update any of these, also update the lists in
# pkg/version/def.bzl and hack/print-workspace-status.sh.
catalog::version::ldflags() {
catalog::version::get_version_vars
local buildDate=
[[ -z ${SOURCE_DATE_EPOCH-} ]] || buildDate="--date=@${SOURCE_DATE_EPOCH}"
local -a ldflags=($(catalog::version::ldflag "buildDate" "$(date ${buildDate} -u +'%Y-%m-%dT%H:%M:%SZ')"))
if [[ -n ${CATALOG_GIT_COMMIT-} ]]; then
ldflags+=($(catalog::version::ldflag "gitCommit" "${CATALOG_GIT_COMMIT}"))
ldflags+=($(catalog::version::ldflag "gitTreeState" "${CATALOG_GIT_TREE_STATE}"))
fi
if [[ -n ${CATALOG_GIT_VERSION-} ]]; then
ldflags+=($(catalog::version::ldflag "gitVersion" "${CATALOG_GIT_VERSION}"))
fi
if [[ -n ${CATALOG_GIT_MAJOR-} && -n ${CATALOG_GIT_MINOR-} ]]; then
ldflags+=(
$(catalog::version::ldflag "gitMajor" "${CATALOG_GIT_MAJOR}")
$(catalog::version::ldflag "gitMinor" "${CATALOG_GIT_MINOR}")
)
fi
# The -ldflags parameter takes a single string, so join the output.
echo "${ldflags[*]-}"
}
# called from Makefile
catalog::version::get_ldflags() {
export ROOT=$1
export SC_GO_PACKAGE=$2
catalog::version::ldflags
}
catalog::version::get_ldflags $1 $2
|
<filename>u-boot/common/cmd_sunxi_bmp.c
/*
* (C) Copyright 2002
* <NAME>, DENX Software Engineering, <EMAIL>.
*
* See file CREDITS for list of people who contributed to this
* project.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*/
/*
* BMP handling routines
*/
#include <common.h>
#include <bmp_layout.h>
#include <command.h>
#include <malloc.h>
#include <sunxi_bmp.h>
#include <sunxi_board.h>
#include <sunxi_advert.h>
extern int sunxi_partition_get_partno_byname(const char *part_name);
static int sunxi_bmp_probe_info (uint addr);
static int sunxi_bmp_show(sunxi_bmp_store_t bmp_info);
int sunxi_advert_display(char *fatname, char *filename);
DECLARE_GLOBAL_DATA_PTR;
/*
* Allocate and decompress a BMP image using gunzip().
*
* Returns a pointer to the decompressed image data. Must be freed by
* the caller after use.
*
* Returns NULL if decompression failed, or if the decompressed data
* didn't contain a valid BMP signature.
*/
static int do_sunxi_bmp_info(cmd_tbl_t * cmdtp, int flag, int argc, char * const argv[])
{
uint addr;
if(argc == 2)
{
/* use argument only*/
addr = simple_strtoul(argv[1], NULL, 16);
debug("bmp addr=%x\n", addr);
}
else if(argc == 3)
{
char load_addr[8];
char filename[32];
char *const bmp_argv[6] = { "fatload", "sunxi_flash", "0:0", load_addr, filename, NULL };
addr = simple_strtoul(argv[1], NULL, 16);
memcpy(load_addr, argv[1], 8);
memset(filename, 0, 32);
memcpy(filename, argv[2], strlen(argv[2]));
#ifdef DEBUG
int i;
for(i=0;i<6;i++)
{
printf("argv[%d] = %s\n", i, argv[i]);
}
#endif
if(do_fat_fsload(0, 0, 5, bmp_argv))
{
printf("sunxi bmp info error : unable to open bmp file %s\n", argv[2]);
return cmd_usage(cmdtp);
}
}
else
{
return cmd_usage(cmdtp);
}
return (sunxi_bmp_probe_info(addr));
}
U_BOOT_CMD(
sunxi_bmp_info, 3, 1, do_sunxi_bmp_info,
"manipulate BMP image data",
"only one para : the address where the bmp stored\n"
);
static int do_sunxi_bmp_display(cmd_tbl_t * cmdtp, int flag, int argc, char * const argv[])
{
uint addr;
uint de_addr;
sunxi_bmp_store_t bmp_info;
if(argc == 2)
{
/* use argument only*/
addr = simple_strtoul(argv[1], NULL, 16);
#if defined(CONFIG_SUNXI_LOGBUFFER)
de_addr = CONFIG_SYS_SDRAM_BASE + gd->ram_size - SUNXI_DISPLAY_FRAME_BUFFER_SIZE;
#else
de_addr = SUNXI_DISPLAY_FRAME_BUFFER_ADDR;
#endif
}
else if(argc == 3)
{
addr = simple_strtoul(argv[1], NULL, 16);
de_addr = simple_strtoul(argv[2], NULL, 16);
}
else if(argc == 4)
{
char load_addr[8];
char filename[32];
char *const bmp_argv[6] = { "fatload", "sunxi_flash", "0:0", load_addr, filename, NULL };
addr = simple_strtoul(argv[1], NULL, 16);
memcpy(load_addr, argv[1], 8);
memset(filename, 0, 32);
memcpy(filename, argv[3], strlen(argv[3]));
de_addr = simple_strtoul(argv[2], NULL, 16);
#ifdef DEBUG
int i;
for(i=0;i<6;i++)
{
printf("argv[%d] = %s\n", i, argv[i]);
}
#endif
if(do_fat_fsload(0, 0, 5, bmp_argv))
{
printf("sunxi bmp info error : unable to open bmp file %s\n", argv[2]);
return cmd_usage(cmdtp);
}
}
else
{
return cmd_usage(cmdtp);
}
if(de_addr < CONFIG_SYS_SDRAM_BASE)
{
#if defined(CONFIG_SUNXI_LOGBUFFER)
de_addr = CONFIG_SYS_SDRAM_BASE + gd->ram_size - SUNXI_DISPLAY_FRAME_BUFFER_SIZE;
#else
de_addr = SUNXI_DISPLAY_FRAME_BUFFER_ADDR;
#endif
}
debug("bmp addr %x, display addr %x\n", addr, de_addr);
bmp_info.buffer = (void *)de_addr;
if(!sunxi_bmp_decode(addr, &bmp_info))
{
debug("decode bmp ok\n");
return sunxi_bmp_show(bmp_info);
}
debug("decode bmp error\n");
return -1;
}
U_BOOT_CMD(
sunxi_bmp_show, 4, 1, do_sunxi_bmp_display,
"manipulate BMP image data",
"sunxi_bmp_display addr [de addr]\n"
"parameters 1 : the address where the bmp stored\n"
"parameters 2 : option para, the address where the bmp display\n"
);
#ifdef CONFIG_BOOT_GUI
int show_bmp_on_fb(char *bmp_head_addr, unsigned int fb_id)
{
bmp_image_t *bmp = (bmp_image_t *)bmp_head_addr;
struct canvas *cv = NULL;
char *src_addr;
int src_width, src_height, src_stride, src_cp_bytes;
char *dst_addr_b, *dst_addr_e;
rect_t dst_crop;
int need_set_bg = 0;
cv = fb_lock(fb_id);
if ((NULL == cv) || (NULL == cv->base)) {
printf("cv=%p, base= %p\n", cv,
(cv != NULL) ? cv->base : 0x0);
goto err_out;
}
if ((bmp->header.signature[0] != 'B')
|| (bmp->header.signature[1] != 'M')) {
printf("this is not a bmp picture\n");
goto err_out;
}
if (((24 != bmp->header.bit_count)
&& (32 != bmp->header.bit_count))
|| (cv->bpp != bmp->header.bit_count)) {
printf("no support %dbit bmp picture on %dbit fb\n",
bmp->header.bit_count, cv->bpp);
goto err_out;
}
if ((bmp->header.width > cv->width)
|| (bmp->header.height > cv->height)) {
printf("no support big size bmp[%dx%d] on fb[%dx%d]\n",
bmp->header.width, bmp->header.height,
cv->width, cv->height);
goto err_out;
}
src_width = bmp->header.width;
src_cp_bytes = src_width * bmp->header.bit_count >> 3;
src_stride = ((src_width * bmp->header.bit_count + 31) >> 5) << 2;
src_addr = (char *)(bmp_head_addr + bmp->header.data_offset);
if (bmp->header.height & 0x80000000) {
src_height = -bmp->header.height;
} else {
src_height = bmp->header.height;
src_addr += (src_stride * (src_height - 1));
src_stride = -src_stride;
}
dst_crop.left = (cv->width - src_width) >> 1;
dst_crop.right = dst_crop.left + src_width;
dst_crop.top = (cv->height - src_height) >> 1;
dst_crop.bottom = dst_crop.top + src_height;
dst_addr_b = (char *)cv->base + cv->stride * dst_crop.top
+ (dst_crop.left * cv->bpp >> 3);
dst_addr_e = dst_addr_b + cv->stride * src_height;
need_set_bg = cv->set_interest_region(cv, &dst_crop, 1, NULL);
if (0 != need_set_bg) {
if (src_width != cv->width) {
printf("memset full fb\n");
memset((void *)(cv->base), 0, cv->stride * cv->height);
} else if (0 != dst_crop.top) {
printf("memset top fb\n");
memset((void *)(cv->base), 0, cv->stride * dst_crop.top);
}
}
for (; dst_addr_b != dst_addr_e; dst_addr_b += cv->stride) {
memcpy((void *)dst_addr_b, (void *)src_addr, src_cp_bytes);
src_addr += src_stride;
}
if (0 != need_set_bg) {
if ((cv->height != dst_crop.bottom)
&& (src_width == cv->width)) {
printf("memset bottom fb\n");
memset((void *)(cv->base + cv->stride * dst_crop.bottom),
0, cv->stride * (cv->height - dst_crop.bottom));
}
}
fb_unlock(fb_id, NULL, 1);
save_disp_cmd();
return 0;
err_out:
if (NULL != cv)
fb_unlock(fb_id, NULL, 0);
return -1;
}
int sunxi_bmp_display(char *name)
{
int ret = 0;
char *argv[6];
char bmp_head[32];
char bmp_name[32];
char *bmp_head_addr = (char *)CONFIG_SYS_SDRAM_BASE;
#ifdef ENABLE_ADVERT_PICTURE
if (!strcmp(name, "bootlogo.bmp"))
if (!sunxi_advert_display("Reserve0", "advert.bmp"))
return 0;
#endif
if (NULL != bmp_head_addr) {
sprintf(bmp_head, "%lx", (ulong)bmp_head_addr);
} else {
printf("sunxi bmp: alloc buffer for %s fail\n", name);
return -1;
}
strncpy(bmp_name, name, sizeof(bmp_name));
printf("bmp_name=%s\n", bmp_name);
argv[0] = "fatload";
argv[1] = "sunxi_flash";
argv[2] = "0:0";
argv[3] = bmp_head;
argv[4] = bmp_name;
argv[5] = NULL;
if (do_fat_fsload(0, 0, 5, argv)) {
printf("sunxi bmp info error : unable to open logo file %s\n", argv[4]);
return -1;
}
ret = show_bmp_on_fb(bmp_head_addr, FB_ID_0);
if (0 != ret)
printf("show bmp on fb failed !\n");
return ret;
}
#else
int sunxi_bmp_display(char *name)
{
sunxi_bmp_store_t bmp_info;
char bmp_name[32];
char bmp_addr[32] = {0};
char* bmp_buff = NULL;
int ret = -1;
//const size_t bmp_buff_len = 10<<20; //10M
//size_t file_size = 0;
char * bmp_argv[6] = { "fatload", "sunxi_flash", "0:0", "00000000", bmp_name, NULL };
#ifdef ENABLE_ADVERT_PICTURE
if (!strcmp(name, "bootlogo.bmp"))
if (!sunxi_advert_display("Reserve0", "advert.bmp"))
return 0;
#endif
// free() function will take a long time,so not use malloc memory
bmp_buff = (char*)CONFIG_SYS_SDRAM_BASE;
if(bmp_buff == NULL)
{
printf("sunxi bmp: alloc buffer for %s fail\n", name);
return -1;
}
//set bmp decode addr is CONFIG_SYS_SDRAM_BASE
sprintf(bmp_addr,"%lx", (ulong)bmp_buff);
bmp_argv[3] = bmp_addr;
memset(bmp_name, 0, 32);
strcpy(bmp_name, name);
if(do_fat_fsload(0, 0, 5, bmp_argv))
{
printf("sunxi bmp info error : unable to open logo file %s\n", bmp_argv[4]);
return -1;
}
//file_size = simple_strtoul(getenv("filesize"), NULL, 16);
#if defined(CONFIG_SUNXI_LOGBUFFER)
bmp_info.buffer = (void *)(CONFIG_SYS_SDRAM_BASE + gd->ram_size - SUNXI_DISPLAY_FRAME_BUFFER_SIZE);
#else
bmp_info.buffer = (void *)(SUNXI_DISPLAY_FRAME_BUFFER_ADDR);
#endif
printf("bmp file buffer: 0x%lx, bmp_info.buffer: %lx\n",(ulong)bmp_buff,(ulong)bmp_info.buffer);
if(!sunxi_bmp_decode((ulong)bmp_buff, &bmp_info))
{
debug("decode bmp ok\n");
ret = sunxi_bmp_show(bmp_info);
}
return ret;
}
#endif
/*
* Subroutine: bmp_info
*
* Description: Show information about bmp file in memory
*
* Inputs: addr address of the bmp file
*
* Return: None
*
*/
static int sunxi_bmp_probe_info(uint addr)
{
bmp_image_t *bmp=(bmp_image_t *)addr;
if((bmp->header.signature[0]!='B') || (bmp->header.signature[1]!='M'))
{
printf("this is not a bmp picture\n");
return -1;
}
debug("bmp picture dectede\n");
printf("Image size : %d x %d\n", bmp->header.width, (bmp->header.height & 0x80000000) ? (-bmp->header.height):(bmp->header.height));
printf("Bits per pixel: %d\n", bmp->header.bit_count);
return(0);
}
/*
* Subroutine: bmp_display
*
* Description: Display bmp file located in memory
*
* Inputs: addr address of the bmp file
*
* Return: None
*
*/
int sunxi_bmp_decode(unsigned long addr, sunxi_bmp_store_t *bmp_info)
{
char *tmp_buffer;
char *bmp_data;
int zero_num = 0;
bmp_image_t *bmp = (bmp_image_t *)addr;
int x, y, bmp_bpix;
int tmp;
if((bmp->header.signature[0]!='B') || (bmp->header.signature[1] !='M'))
{
printf("this is not a bmp picture\n");
return -1;
}
debug("bmp dectece\n");
bmp_bpix = bmp->header.bit_count/8;
if((bmp_bpix != 3) && (bmp_bpix != 4))
{
printf("no support bmp picture without bpix 24 or 32\n");
return -1;
}
if(bmp_bpix ==3)
{
zero_num = (4 - ((3*bmp->header.width) % 4))&3;
}
debug("bmp bitcount %d\n", bmp->header.bit_count);
x = bmp->header.width;
y = (bmp->header.height & 0x80000000) ? (-bmp->header.height):(bmp->header.height);
debug("bmp x = %x, bmp y = %x\n", x, y);
tmp = bmp->header.height;
if (0 == (bmp->header.height & 0x80000000))
bmp->header.height = (-bmp->header.height);
memcpy(bmp_info->buffer, bmp, sizeof(bmp_header_t));
bmp_info->buffer += sizeof(bmp_header_t);
bmp->header.height = tmp;
tmp_buffer = (char *)bmp_info->buffer;
bmp_data = (char *)(addr + bmp->header.data_offset);
if(bmp->header.height & 0x80000000)
{
if(zero_num == 0)
{
memcpy(tmp_buffer,bmp_data,x*y*bmp_bpix);
}
else
{
int i, line_bytes, real_line_byte;
char *src;
line_bytes = (x * bmp_bpix) + zero_num;
real_line_byte = x * bmp_bpix;
for(i=0; i<y; i++)
{
src = bmp_data + i*line_bytes;
memcpy(tmp_buffer, src, real_line_byte);
tmp_buffer += real_line_byte;
}
}
}
else
{
uint i, line_bytes, real_line_byte;
char *src;
line_bytes = (x * bmp_bpix) + zero_num;
real_line_byte = x * bmp_bpix;
for(i=0; i<y; i++)
{
src = bmp_data + (y - i - 1) * line_bytes;
memcpy(tmp_buffer, src, real_line_byte);
tmp_buffer += real_line_byte;
}
}
bmp_info->x = x;
bmp_info->y = y;
bmp_info->bit = bmp->header.bit_count;
//flush_cache((uint)bmp_info->buffer, x * y * bmp_bpix);
flush_cache((uint)bmp_info->buffer-sizeof(bmp_header_t) , x * y * bmp_bpix+sizeof(bmp_header_t));
return 0;
}
static int sunxi_bmp_show(sunxi_bmp_store_t bmp_info)
{
debug("begin to set framebuffer\n");
if(board_display_framebuffer_set(bmp_info.x, bmp_info.y, bmp_info.bit, (void *)bmp_info.buffer))
{
printf("sunxi bmp display error : set frame buffer error\n");
return -2;
}
debug("begin to show layer\n");
board_display_show(0);
debug("bmp display finish\n");
return 0;
}
static int fat_read_file_ex(char *fatname, char *filename, char *addr)
{
char file_name[32];
char fat_name[32];
char partition[32];
int partition_num = -1;
char *bmp_buff = NULL;
char bmp_addr[32]={0};
memset(file_name, 0, 32);
strcpy(file_name, filename);
memset(fat_name, 0, 32);
strcpy(fat_name, fatname);
partition_num = sunxi_partition_get_partno_byname(fat_name);
if(partition_num < 0)
{
printf("[boot disp] can not find the partition %s\n",fat_name);
return -1;
}
sprintf(partition,"%x:0",partition_num);
bmp_buff = addr;
if(bmp_buff == NULL)
{
printf("sunxi bmp: alloc buffer fail\n");
return -1;
}
char * bmp_argv[6] = {"fatload", "sunxi_flash", "0:0", "00000000", file_name, NULL };
bmp_argv[2] = partition;
sprintf(bmp_addr,"%lx", (ulong)bmp_buff);
bmp_argv[3] = bmp_addr;
if(do_fat_fsload(0, 0, 5, bmp_argv))
{
printf("sunxi bmp info error : unable to open logo file %s\n", bmp_argv[1]);
return -1;
}
return 0;
}
static __s32 check_sum(void *mem_base, __u32 size, __u32 src_sum)
{
__u32 *buf = (__u32 *)mem_base;
__u32 count = 0;
__u32 sum = 0;
__u32 last = 0;
__u32 curlen = 0;
__s32 i = 0;
count = size >> 2;
do {
sum += *buf++;
sum += *buf++;
sum += *buf++;
sum += *buf++;
} while ((count -= 4) > (4 - 1));
for (i = 0; i < count; i++) {
sum += *buf++;
}
curlen = size % 4;
if ((size & 0x03) != 0) {
memcpy(&last, mem_base + size - curlen, curlen);
sum += last;
}
if (sum == src_sum) {
return 0;
} else {
printf("err: sum=%x; src_sum=%x\n", sum, src_sum);
return -1;
}
}
static int sunxi_advert_verify_head(struct __advert_head *adv_head)
{
char *addr = (char *)CONFIG_SYS_SDRAM_BASE;
if ((0 > fat_read_file_ex("Reserve0", "advert.crc", addr))) {
return -1;
};
memcpy((u32 *)adv_head, (u32 *)addr, sizeof(*adv_head));
if (memcmp((char *)(adv_head->magic), ADVERT_MAGIC, strlen(ADVERT_MAGIC))) {
printf("advert magic not equal,%s\n", (char *)(adv_head->magic));
return -1;
}
if ((SUNXI_DISPLAY_FRAME_BUFFER_SIZE < adv_head->length)
|| (0 == adv_head->length)) {
printf("advert length=%d to big or to short\n",
adv_head->length);
return -1;
}
return 0;
}
int sunxi_advert_display(char *fatname, char *filename)
{
struct __advert_head advert_head;
if (0 > sunxi_advert_verify_head(&advert_head)) {
return -1;
}
if ((0 > fat_read_file_ex("Reserve0", "advert.bmp",
(char *)CONFIG_SYS_SDRAM_BASE))
|| (0 > check_sum((u32 *)CONFIG_SYS_SDRAM_BASE,
advert_head.length, advert_head.check_sum))) {
return -1;
}
#ifdef CONFIG_BOOT_GUI
return show_bmp_on_fb((char *)CONFIG_SYS_SDRAM_BASE, FB_ID_0);
#else
sunxi_bmp_store_t bmp_info;
#if defined(CONFIG_SUNXI_LOGBUFFER)
bmp_info.buffer = (void *)(CONFIG_SYS_SDRAM_BASE + gd->ram_size - SUNXI_DISPLAY_FRAME_BUFFER_SIZE);
#else
bmp_info.buffer = (void *)(SUNXI_DISPLAY_FRAME_BUFFER_ADDR);
#endif
debug("check_sum advert bmp ok\n");
if (!sunxi_bmp_decode(CONFIG_SYS_SDRAM_BASE, &bmp_info)) {
debug("decode bmp ok\n");
return sunxi_bmp_show(bmp_info);
}
return -1;
#endif /*#ifdef CONFIG_BOOT_GUI*/
}
int do_sunxi_logo(cmd_tbl_t * cmdtp, int flag, int argc, char * const argv[])
{
return sunxi_bmp_display("bootlogo.bmp");
}
U_BOOT_CMD(
logo, 1, 0, do_sunxi_logo,
"show default logo",
"no args\n"
);
|
#!/bin/bash
rm cliStdo.log &> /dev/null
timeout -s SIGINT 6 mvn -q -f ServerCharCfg/pom.xml exec:java > srvStdo.log 2> srvStdr.log&
sleep 3
timeout -s SIGINT 4 mvn -q -f ClientCharSPCfg/pom.xml exec:java > cliStdo.log 2> cliStdr.log&
sleep 5
#printf "Cli stdo:\n\n"
cat cliStdo.log
#printf "\nCli stdr:\n\n"
>&2 cat cliStdr.log
#printf "\n\nSrv stdo:\n\n"
>&2 cat srvStdo.log
#printf "\nSrv stdr:\n\n"
>&2 cat srvStdr.log |
function getQueryParams(url) {
const params = {};
// Split the URL into its component parts
const splitUrl = url.split('?');
// Check if there are any query parameters
if (splitUrl.length > 1) {
// Split the query parameters into an array
const queryParams = splitUrl[1].split('&');
// Loop over the query parameters
queryParams.forEach((param) => {
// Split each parameter into its key and value parts
const splitParam = param.split('=');
// Add each parameter to the params object
params[splitParam[0]] = splitParam[1];
});
}
return params;
}
// Example usage
const params = getQueryParams('www.example.com?a=1&b=2');
console.log(params); // Outputs { a: '1', b: '2' } |
<reponame>gkapkowski/web3.py
from web3.utils.string import force_bytes
def test_contract_deployment_no_constructor(web3_tester, MathContract,
MATH_RUNTIME):
deploy_txn = MathContract.deploy()
txn_receipt = web3_tester.eth.getTransactionReceipt(deploy_txn)
assert txn_receipt is not None
assert txn_receipt['contractAddress']
contract_address = txn_receipt['contractAddress']
blockchain_code = web3_tester.eth.getCode(contract_address)
assert force_bytes(blockchain_code) == force_bytes(MATH_RUNTIME)
def test_contract_deployment_with_constructor_without_args(web3_tester,
SimpleConstructorContract,
SIMPLE_CONSTRUCTOR_RUNTIME):
deploy_txn = SimpleConstructorContract.deploy()
txn_receipt = web3_tester.eth.getTransactionReceipt(deploy_txn)
assert txn_receipt is not None
assert txn_receipt['contractAddress']
contract_address = txn_receipt['contractAddress']
blockchain_code = web3_tester.eth.getCode(contract_address)
assert force_bytes(blockchain_code) == force_bytes(SIMPLE_CONSTRUCTOR_RUNTIME)
def test_contract_deployment_with_constructor_with_arguments(web3_tester,
WithConstructorArgumentsContract,
WITH_CONSTRUCTOR_ARGUMENTS_RUNTIME):
deploy_txn = WithConstructorArgumentsContract.deploy(args=[1234, 'abcd'])
txn_receipt = web3_tester.eth.getTransactionReceipt(deploy_txn)
assert txn_receipt is not None
assert txn_receipt['contractAddress']
contract_address = txn_receipt['contractAddress']
blockchain_code = web3_tester.eth.getCode(contract_address)
assert force_bytes(blockchain_code) == force_bytes(WITH_CONSTRUCTOR_ARGUMENTS_RUNTIME)
def test_contract_deployment_with_constructor_with_address_argument(web3_tester,
WithConstructorAddressArgumentsContract,
WITH_CONSTRUCTOR_ADDRESS_RUNTIME):
deploy_txn = WithConstructorAddressArgumentsContract.deploy(args=["0x16d9983245de15e7a9a73bc586e01ff6e08de737"])
txn_receipt = web3_tester.eth.getTransactionReceipt(deploy_txn)
assert txn_receipt is not None
assert txn_receipt['contractAddress']
contract_address = txn_receipt['contractAddress']
blockchain_code = web3_tester.eth.getCode(contract_address)
assert force_bytes(blockchain_code) == force_bytes(WITH_CONSTRUCTOR_ADDRESS_RUNTIME)
|
<gh_stars>0
package restauthserver;
import io.swagger.jaxrs.config.BeanConfig;
import io.swagger.jaxrs.listing.ApiListingResource;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.resource.Resource;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URISyntaxException;
public class RestServer {
private static final Logger LOG = LoggerFactory.getLogger( RestServer.class );
public static void main(String[] args) throws Exception {
try {
// Workaround for resources from JAR files
Resource.setDefaultUseCaches( false );
buildSwagger();
final HandlerList handlers = new HandlerList();
handlers.addHandler( buildSwaggerUI() );
handlers.addHandler( buildContext() );
Server jettyServer = new Server(8096);
jettyServer.setHandler(handlers);
jettyServer.start();
jettyServer.join();
} catch (Exception e) {
LOG.error( "There was an error starting up the Entity Browser", e );
}
}
private static ContextHandler buildContext() {
ResourceConfig resourceConfig = new ResourceConfig();
// io.swagger.jaxrs.listing loads up Swagger resources
resourceConfig.packages( AuthRestService.class.getPackage().getName(), ApiListingResource.class.getPackage().getName() );
ServletContainer servletContainer = new ServletContainer( resourceConfig );
ServletHolder authRestService = new ServletHolder( servletContainer );
ServletContextHandler context = new ServletContextHandler( ServletContextHandler.SESSIONS );
context.setContextPath( "/" );
context.addServlet( authRestService, "/*" );
return context;
}
private static void buildSwagger()
{
// This configures Swagger
BeanConfig beanConfig = new BeanConfig();
beanConfig.setVersion( "1.0.0" );
beanConfig.setResourcePackage( AuthRestService.class.getPackage().getName() );
beanConfig.setBasePath( "/" );
beanConfig.setDescription( "Auth Rest API" );
beanConfig.setTitle( "Auth Rest Service" );
}
private static ContextHandler buildSwaggerUI() throws URISyntaxException {
final ResourceHandler swaggerUIResourceHandler = new ResourceHandler();
swaggerUIResourceHandler.setResourceBase( RestServer.class.getClassLoader().getResource( "swaggerui" ).toURI().toString() );
final ContextHandler swaggerUIContext = new ContextHandler();
swaggerUIContext.setContextPath( "/docs/" );
swaggerUIContext.setHandler( swaggerUIResourceHandler );
return swaggerUIContext;
}
}
|
<reponame>socketry/async-awa
#!/usr/bin/env python
import os, resource
import asyncio
class PortScanner:
def __init__(self, host="0.0.0.0", ports=range(1, 1024+1), batch_size=1024):
self.host = host
self.ports = ports
self.semaphore = asyncio.Semaphore(value=batch_size)
self.loop = asyncio.get_event_loop()
async def scan_port(self, port, timeout):
async with self.semaphore:
try:
future = asyncio.open_connection(self.host, port, loop=self.loop)
reader, writer = await asyncio.wait_for(future, timeout=timeout)
print("{} open".format(port))
writer.close()
except ConnectionRefusedError:
pass
# print("{} closed".format(port))
except asyncio.TimeoutError:
print("{} timeout".format(port))
def start(self, timeout=1.0):
self.loop.run_until_complete(asyncio.gather(
*[self.scan_port(port, timeout) for port in self.ports]
))
limits = resource.getrlimit(resource.RLIMIT_NOFILE)
batch_size = min(512, limits[0])
scanner = PortScanner(host="127.0.0.1", ports=range(1, 65535+1), batch_size=batch_size)
scanner.start()
|
<reponame>YourBetterAssistant/yourbetterassistant
"use strict";
import { Client, Message, MessageEmbed } from "discord.js";
import funcs from "../../handlers/functions";
module.exports = {
name: "serverinfo",
description: "shows an in-depth result of the server",
category: "Information",
memberpermissions: "VIEW_CHANNEL",
cooldown: 5,
usage: "serverinfo",
run: async (client: Client, message: Message, args: string[]) => {
const { delay } = funcs;
args;
//https://discord.com/developers/docs/resources/guild#guild-object-guild-features
let msg = await message.channel.send("Getting Info...");
delay(1000);
msg.edit("Results Found! Placing information...");
let embed = new MessageEmbed()
.setTitle(`Server Info of ${message.guild?.name}`)
.setColor("DARK_BLUE")
.setThumbnail(
`https://cdn.discordapp.com/icons/${message.guild?.id}/${message.guild?.icon}`
)
.addFields(
{
name: "Members",
value: `${message.guild?.memberCount}`,
inline: true,
},
{ name: "Owner", value: `<@!${message.guild?.ownerId}>`, inline: true },
{
name: "Humans",
value: message.guild?.members.cache
.filter((m) => !m.user.bot)
.size.toString()!,
inline: true,
},
{
name: "Bots",
value: message.guild?.members.cache
.filter((m) => m.user.bot)
.size.toString()!,
inline: true,
},
{
name: "Banned Members",
value: message.guild?.bans.cache.size.toString()!,
inline: true,
},
{
name: "Roles",
value: message.guild?.roles.cache.size.toString()!,
inline: true,
},
{
name: "Rules Channel",
value: `<#${message.guild?.rulesChannelId}>`,
inline: true,
},
{
name: "Emojis",
value: message.guild?.emojis.cache.size.toString()!,
inline: true,
},
{
name: "Stickers",
value: message.guild?.stickers.cache.size.toString()!,
inline: true,
},
{
name: "Verification Level",
value: message.guild?.verificationLevel!,
inline: true,
},
{
name: "Created At",
value: `<t:${Math.floor(message.guild?.createdTimestamp! / 1000)}>`,
inline: true,
},
{ name: "Features:", value: "**\n**", inline: false }
);
let e = 1;
message.guild?.features.forEach((i) =>
embed.addField(
` Feature ${e++}`,
` <a:greentick:881473012162002984> ${i}`,
false
)
);
msg.edit({ embeds: [embed] });
},
};
//
|
import selectionTemplate from '../selection';
import {D3Selection, D3BindSelection} from "../selection";
export function addBindingFunctionsToSelection(d3selection: D3Selection): D3BindSelection {
var d3bindSelection: D3BindSelection = Object.create(d3selection);
for (let key in selectionTemplate) {
d3bindSelection[key] = selectionTemplate[key];
}
return d3bindSelection;
}
export function override(selection: D3BindSelection, func: (_super: D3Selection) => D3Selection) {
var _super: D3Selection = Object.getPrototypeOf(selection);
var newSelection: D3Selection = func(_super);
return addBindingFunctionsToSelection(newSelection);
}
|
#include "stdafx.h"
extern HWND g_hMainWnd;
extern HWND g_hCanvasWnd;
extern HWND g_hRebar;
extern HWND g_hToolbars[DX_APP_NUM_TOOLBARS];
extern HWND g_hStatusBar;
static HFONT s_hCanvasFont = NULL;
#define IHIML_SMALL 0
#define IHIML_LARGE 1
static HIMAGELIST s_himls[2][DX_APP_NUM_TOOLBARS] = { NULL };
static SIZE s_sizImageSize[2] = {
{ 16, 16 }, // IHIML_SMALL
{ 24, 24 }, // IHIML_LARGE
};
#define rgbMaskColor RGB(255, 0, 255)
#ifdef DX_APP_USE_TEST_CTRLS
HWND g_hwndTestCtrls[DX_APP_USE_TEST_CTRLS];
#endif
///////////////////////////////////////////////////////////////////////////////
// COMMAND UI
// image list index
#define IML_STD 0
#define IML_HIST 1
// a pair of command id and string id
typedef struct CommandUI
{
INT id, ids;
#ifndef DX_APP_NEED_DIET
INT iImageList; // IML_*
INT iIcon;
HBITMAP hbmIcon;
#endif
} CommandUI;
// TODO: Add more entries...
// NOTE: The resource string IDS_TOOL_... must be in form of "(command name)|(command description)".
static CommandUI s_CommandUI[] =
{
#ifdef DX_APP_NEED_DIET
#define DEFINE_COMMAND_UI(id, ids, iImageList, iIcon) { id, ids },
#else
#define DEFINE_COMMAND_UI(id, ids, iImageList, iIcon) { id, ids, iImageList, iIcon },
#endif
#include "CommandUI.dat"
#undef DEFINE_COMMAND_UI
};
void dumpCommandUI(void)
{
TRACEA("---[CommandUI.tsv]---(FROM HERE)---\n");
#ifdef DX_APP_NEED_DIET
TRACEA("%s\t%s\t%s\t%s\t%s\t%s\n", "(id)", "(id-dec)", "(id-hex)", "(ids)", "(ids-dec)", "(ids-hex)");
#define DEFINE_COMMAND_UI(id, ids, iImageList, iIcon) TRACEA("%s\t%d\t0x%X\t%s\t%d\t0x%X\n", #id, id, id, #ids, ids, ids);
#else
TRACEA("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", "(id)", "(id-dec)", "(id-hex)", "(ids)", "(ids-dec)", "(ids-hex)", "(image-list-index)", "(icon-index-in-image-list)");
#define DEFINE_COMMAND_UI(id, ids, iImageList, iIcon) TRACEA("%s\t%d\t0x%X\t%s\t%d\t0x%X\t%d\t%d\n", #id, id, id, #ids, ids, ids, iImageList, iIcon);
#endif
#include "CommandUI.dat"
#undef DEFINE_COMMAND_UI
TRACEA("---[CommandUI.tsv]---(DOWN TO HERE)---\n");
}
static CommandUI *findCommand(INT id)
{
for (size_t i = 0; i < _countof(s_CommandUI); ++i)
{
if (id == s_CommandUI[i].id)
{
return &s_CommandUI[i];
}
}
return NULL;
}
LPTSTR getCommandText(INT id, BOOL bDetail)
{
CommandUI *info = findCommand(id);
if (info)
{
LPTSTR text = LoadStringDx(info->ids);
TCHAR *pch = _tcschr(text, TEXT('|'));
if (pch)
{
*pch = 0;
if (bDetail)
return pch + 1;
}
return text;
}
return NULL;
}
static void enableCommand(INT id, BOOL bEnabled, HMENU hMenu)
{
ARRAY_FOREACH(HWND hwndTB, g_hToolbars, {
SendMessage(hwndTB, TB_ENABLEBUTTON, id, bEnabled);
});
if (bEnabled)
EnableMenuItem(hMenu, id, MF_ENABLED);
else
EnableMenuItem(hMenu, id, MF_GRAYED);
}
static void checkCommand(INT id, BOOL bChecked, HMENU hMenu)
{
ARRAY_FOREACH(HWND hwndTB, g_hToolbars, {
SendMessage(hwndTB, TB_CHECKBUTTON, id, bChecked);
});
if (bChecked)
CheckMenuItem(hMenu, id, MF_CHECKED);
else
CheckMenuItem(hMenu, id, MF_UNCHECKED);
}
static void hideCommand(INT id, HMENU hMenu)
{
ARRAY_FOREACH(HWND hwndTB, g_hToolbars, {
SendMessage(hwndTB, TB_HIDEBUTTON, id, TRUE);
});
DeleteMenu(hMenu, id, MF_BYCOMMAND);
}
void loadMenuBitmaps(HMENU hMenu)
{
#ifndef DX_APP_NEED_DIET
INT i, id, nCount = GetMenuItemCount(hMenu);
MENUITEMINFO mii = {
sizeof(mii),
MIIM_FTYPE | MIIM_BITMAP | MIIM_CHECKMARKS
};
CommandUI *info;
INT cx, cy, cxyPadding = 2;
HBITMAP hbm;
HDC hdc;
HGDIOBJ hbmOld;
UINT uStyle;
hdc = CreateCompatibleDC(NULL);
for (i = 0; i < nCount; ++i)
{
if (!GetMenuItemInfo(hMenu, i, TRUE, &mii))
continue;
if ((mii.fType & MFT_SEPARATOR))
continue;
id = GetMenuItemID(hMenu, i);
info = findCommand(id);
if (!info || info->hbmIcon)
continue;
if (info->iImageList == -1 || info->iIcon == -1)
continue;
//cx = GetSystemMetrics(SM_CXMENUCHECK);
//cy = GetSystemMetrics(SM_CYMENUCHECK);
cx = s_sizImageSize[IHIML_SMALL].cx;
cy = s_sizImageSize[IHIML_SMALL].cy;
//TRACEA("%d, %d\n", cx, cy);
hbm = Create24BppBitmapDx(cx + cxyPadding * 2, cy + cxyPadding * 2);
if (!hbm)
continue;
uStyle = ILD_NORMAL;
hbmOld = SelectObject(hdc, hbm);
{
SelectObject(hdc, GetStockBrush(WHITE_BRUSH));
SelectObject(hdc, GetStockPen(BLACK_PEN));
Rectangle(hdc, 0, 0, cx + cxyPadding * 2, cy + cxyPadding * 2);
ImageList_DrawEx(s_himls[IHIML_SMALL][info->iImageList], info->iIcon,
hdc, cxyPadding, cxyPadding, cx, cy,
CLR_NONE, GetSysColor(COLOR_HIGHLIGHT), uStyle);
}
SelectObject(hdc, hbmOld);
info->hbmIcon = mii.hbmpItem = hbm;
SetMenuItemInfo(hMenu, i, TRUE, &mii);
}
DeleteDC(hdc);
#endif
}
void updateCommandUI(HWND hwnd, HMENU hMenu)
{
if (!hMenu)
hMenu = GetMenu(g_hMainWnd);
// TODO: Update UI status
enableCommand(ID_UNDO, Edit_CanUndo(g_hCanvasWnd), hMenu);
enableCommand(ID_REDO, (BOOL)SendMessage(g_hCanvasWnd, EM_CANREDO, 0, 0), hMenu);
enableCommand(ID_PRINTPREVIEW, FALSE, hMenu);
enableCommand(ID_PRINT, FALSE, hMenu);
enableCommand(ID_PROPERTIES, FALSE, hMenu);
enableCommand(ID_FIND, FALSE, hMenu);
enableCommand(ID_REPLACE, FALSE, hMenu);
enableCommand(ID_HELP, FALSE, hMenu);
enableCommand(ID_PAGESETUP, FALSE, hMenu);
// TODO: Add toolbar commands
checkCommand(ID_TOOLBAR1, IsWindowVisible(g_hToolbars[0]), hMenu);
checkCommand(ID_TOOLBAR2, IsWindowVisible(g_hToolbars[1]), hMenu);
#ifdef DX_APP_USE_TEST_CTRLS
static_assert(DX_APP_NUM_TOOLBARS == 3, "TODO:");
checkCommand(ID_TOOLBAR3, IsWindowVisible(g_hToolbars[2]), hMenu);
#else
static_assert(DX_APP_NUM_TOOLBARS == 2, "TODO:");
#endif
checkCommand(ID_STATUSBAR, IsWindowVisible(g_hStatusBar), hMenu);
hideCommand(ID_TOOLBAR4, hMenu);
loadMenuBitmaps(hMenu);
}
void showToolbar(INT index, BOOL bShow)
{
HWND hwndTB;
ASSERT(index < DX_APP_NUM_TOOLBARS);
hwndTB = g_hToolbars[index];
if (bShow)
{
ShowWindow(hwndTB, SW_SHOWNOACTIVATE);
SendMessage(g_hRebar, RB_SHOWBAND, index, TRUE);
}
else
{
ShowWindow(hwndTB, SW_HIDE);
SendMessage(g_hRebar, RB_SHOWBAND, index, FALSE);
}
}
///////////////////////////////////////////////////////////////////////////////
// CONTROLS
BOOL doCreateImageLists(void)
{
INT idBitmap;
HIMAGELIST himl;
BOOL bLarge;
bLarge = FALSE;
idBitmap = IDB_SMALLTOOLBAR1;
himl = ImageList_LoadImage(g_hInstance, MAKEINTRESOURCE(idBitmap),
s_sizImageSize[bLarge].cx, 0, rgbMaskColor, IMAGE_BITMAP,
LR_CREATEDIBSECTION);
if (!himl)
return FALSE;
s_himls[bLarge][0] = himl;
bLarge = FALSE;
idBitmap = IDB_SMALLTOOLBAR2;
himl = ImageList_LoadImage(g_hInstance, MAKEINTRESOURCE(idBitmap),
s_sizImageSize[bLarge].cx, 0, rgbMaskColor, IMAGE_BITMAP,
LR_CREATEDIBSECTION);
if (!himl)
return FALSE;
s_himls[bLarge][1] = himl;
bLarge = TRUE;
idBitmap = IDB_LARGETOOLBAR1;
himl = ImageList_LoadImage(g_hInstance, MAKEINTRESOURCE(idBitmap),
s_sizImageSize[bLarge].cx, 0, rgbMaskColor, IMAGE_BITMAP,
LR_CREATEDIBSECTION);
if (!himl)
return FALSE;
s_himls[bLarge][0] = himl;
bLarge = TRUE;
idBitmap = IDB_LARGETOOLBAR2;
himl = ImageList_LoadImage(g_hInstance, MAKEINTRESOURCE(idBitmap),
s_sizImageSize[bLarge].cx, 0, rgbMaskColor, IMAGE_BITMAP,
LR_CREATEDIBSECTION);
if (!himl)
return FALSE;
s_himls[bLarge][1] = himl;
return TRUE;
}
HWND doCreateToolbar1(HWND hwnd, INT index, BOOL bHasRebar)
{
HWND hwndToolbar = NULL;
DWORD style, exstyle;
INT id;
BOOL bStandardButtons = FALSE; // TODO: Modify
BOOL bUseLargeButtons = TRUE; // TODO: Modify
BOOL bAddString = FALSE; // TODO: Modify
BOOL bList = FALSE; // TODO: Modify
// TODO: Modify toolbar buttons
static TBBUTTON buttons[] =
{
// { image index, command id, button state, BTNS_, ... }
{ STD_FILENEW, ID_NEW, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_FILEOPEN, ID_OPEN, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_FILESAVE, ID_SAVE, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_PRINTPRE, ID_PRINTPREVIEW, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_PRINT, ID_PRINT, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_UNDO, ID_UNDO, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_REDOW, ID_REDO, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_CUT, ID_CUT, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_COPY, ID_COPY, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_PASTE, ID_PASTE, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_DELETE, ID_DELETE, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_PROPERTIES, ID_PROPERTIES, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_FIND, ID_FIND, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ STD_REPLACE, ID_REPLACE, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ -1, -1, TBSTATE_ENABLED, BTNS_SEP },
{ STD_HELP, ID_HELP, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
};
size_t i, k;
LPTSTR text;
// TODO: Invalidate iString's
for (i = 0; i < _countof(buttons); ++i)
{
buttons[i].iString = -1;
}
// TODO: Create hwndToolbar
style = WS_CHILD | CCS_TOP | TBS_HORZ | TBS_TOOLTIPS | CCS_NORESIZE;
if (bHasRebar)
style |= CCS_NORESIZE | CCS_NODIVIDER;
if (bList && bAddString)
style |= TBSTYLE_LIST;
exstyle = 0;
id = IDW_TOOLBAR1;
hwndToolbar = CreateWindowEx(exstyle, TOOLBARCLASSNAME, NULL,
style, 0, 0, 0, 0, hwnd, (HMENU)(INT_PTR)id, g_hInstance, NULL);
if (!hwndToolbar)
return NULL;
// TODO: Initialize toolbar
SendMessage(hwndToolbar, TB_BUTTONSTRUCTSIZE, sizeof(TBBUTTON), 0);
SetWindowLongPtr(hwndToolbar, GWL_STYLE, GetWindowStyle(hwndToolbar) | TBSTYLE_FLAT);
if (bAddString)
{
for (k = 0; k < _countof(buttons); ++k)
{
if (buttons[k].fsStyle & BTNS_SEP)
continue;
text = getCommandText(buttons[k].idCommand, FALSE);
buttons[k].iString = (INT)SendMessage(hwndToolbar, TB_ADDSTRING, 0, (LPARAM)text);
}
}
// Enable multiple image lists
SendMessage(hwndToolbar, CCM_SETVERSION, 5, 0);
if (bStandardButtons) // standard buttons
{
if (bUseLargeButtons)
{
TBADDBITMAP AddBitmap = { HINST_COMMCTRL, IDB_STD_LARGE_COLOR };
//TBADDBITMAP AddBitmap = { HINST_COMMCTRL, IDB_VIEW_LARGE_COLOR };
//TBADDBITMAP AddBitmap = { HINST_COMMCTRL, IDB_HIST_LARGE_COLOR };
SendMessage(hwndToolbar, TB_ADDBITMAP, 0, (LPARAM)&AddBitmap);
}
else
{
TBADDBITMAP AddBitmap = { HINST_COMMCTRL, IDB_STD_SMALL_COLOR };
//TBADDBITMAP AddBitmap = { HINST_COMMCTRL, IDB_VIEW_SMALL_COLOR };
//TBADDBITMAP AddBitmap = { HINST_COMMCTRL, IDB_HIST_SMALL_COLOR };
SendMessage(hwndToolbar, TB_ADDBITMAP, 0, (LPARAM)&AddBitmap);
}
SendMessage(hwndToolbar, TB_ADDBUTTONS, _countof(buttons), (LPARAM)&buttons);
}
else // non-standard
{
SIZE siz = s_sizImageSize[bUseLargeButtons];
SendMessage(hwndToolbar, TB_SETBITMAPSIZE, 0, MAKELPARAM(siz.cx, siz.cy));
SendMessage(hwndToolbar, TB_SETIMAGELIST, 0, (LPARAM)s_himls[bUseLargeButtons][index]);
}
SendMessage(hwndToolbar, TB_ADDBUTTONS, _countof(buttons), (LPARAM)&buttons);
// TODO: Set extended style
{
DWORD extended = 0;
if (bList)
extended |= TBSTYLE_EX_MIXEDBUTTONS; // BTNS_SHOWTEXT works
extended |= TBSTYLE_EX_DRAWDDARROWS; // BTNS_DROPDOWN and BTNS_WHOLEDROPDOWN will work
//extended |= TBSTYLE_EX_HIDECLIPPEDBUTTONS;
SendMessage(hwndToolbar, TB_SETEXTENDEDSTYLE, 0, extended);
}
return hwndToolbar;
}
HWND doCreateToolbar2(HWND hwnd, INT index, BOOL bHasRebar)
{
HWND hwndToolbar = NULL;
DWORD style, exstyle;
INT id;
BOOL bUseLargeButtons = TRUE; // TODO: Modify
BOOL bAddString = FALSE; // TODO: Modify
BOOL bList = FALSE; // TODO: Modify
// TODO: Modify toolbar buttons
static TBBUTTON buttons[] =
{
// { image index, command id, button state, BTNS_, ... }
{ HIST_BACK, ID_BACK, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ HIST_FORWARD, ID_FORWARD, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
{ HIST_FAVORITES, ID_FAVORITE, TBSTATE_ENABLED, BTNS_BUTTON | BTNS_AUTOSIZE },
};
size_t i, k;
LPTSTR text;
// TODO: Invalidate iString's
for (i = 0; i < _countof(buttons); ++i)
{
buttons[i].iString = -1;
}
// TODO: Create hwndToolbar
style = WS_CHILD | CCS_TOP | TBS_HORZ | TBS_TOOLTIPS | CCS_NORESIZE;
if (bHasRebar)
style |= CCS_NORESIZE | CCS_NODIVIDER;
if (bList && bAddString)
style |= TBSTYLE_LIST;
exstyle = 0;
id = IDW_TOOLBAR2;
hwndToolbar = CreateWindowEx(exstyle, TOOLBARCLASSNAME, NULL,
style, 0, 0, 0, 0, hwnd, (HMENU)(INT_PTR)id, g_hInstance, NULL);
if (!hwndToolbar)
return NULL;
// TODO: Initialize toolbar
SendMessage(hwndToolbar, TB_BUTTONSTRUCTSIZE, sizeof(TBBUTTON), 0);
SetWindowLongPtr(hwndToolbar, GWL_STYLE, GetWindowStyle(hwndToolbar) | TBSTYLE_FLAT);
if (bAddString)
{
for (k = 0; k < _countof(buttons); ++k)
{
if (buttons[k].fsStyle & BTNS_SEP)
continue;
text = getCommandText(buttons[k].idCommand, FALSE);
buttons[k].iString = (INT)SendMessage(hwndToolbar, TB_ADDSTRING, 0, (LPARAM)text);
}
}
// Enable multiple image lists
SendMessage(hwndToolbar, CCM_SETVERSION, 5, 0);
{
SIZE siz = s_sizImageSize[bUseLargeButtons];
SendMessage(hwndToolbar, TB_SETBITMAPSIZE, 0, MAKELPARAM(siz.cx, siz.cy));
SendMessage(hwndToolbar, TB_SETIMAGELIST, 0, (LPARAM)s_himls[bUseLargeButtons][index]);
}
SendMessage(hwndToolbar, TB_ADDBUTTONS, _countof(buttons), (LPARAM)&buttons);
// TODO: Set extended style
{
DWORD extended = 0;
if (bList)
extended |= TBSTYLE_EX_MIXEDBUTTONS; // BTNS_SHOWTEXT works
extended |= TBSTYLE_EX_DRAWDDARROWS; // BTNS_DROPDOWN and BTNS_WHOLEDROPDOWN will work
//extended |= TBSTYLE_EX_HIDECLIPPEDBUTTONS;
SendMessage(hwndToolbar, TB_SETEXTENDEDSTYLE, 0, extended);
}
return hwndToolbar;
}
#ifdef DX_APP_USE_TEST_CTRLS
HWND doCreateToolbar3(HWND hwnd, INT index, BOOL bHasRebar)
{
HWND hwndToolbar = NULL;
DWORD style, exstyle;
INT id;
BOOL bUseLargeButtons = TRUE; // TODO: Modify
BOOL bAddString = FALSE; // TODO: Modify
BOOL bList = FALSE; // TODO: Modify
// TODO: Modify toolbar buttons
static TBBUTTON buttons[DX_APP_USE_TEST_CTRLS + 1] =
{
// { image index, command id, button state, BTNS_, ... }
{ -1, ID_TEST_9, 0, BTNS_SEP },
{ -1, ID_TEST_1, 0, BTNS_SEP },
{ -1, ID_TEST_2, 0, BTNS_SEP },
{ -1, ID_TEST_3, 0, BTNS_SEP },
};
size_t i, k;
LPTSTR text;
SIZE siz;
INT cxPadding = 4;
// TODO: Invalidate iString's
for (i = 0; i < _countof(buttons); ++i)
{
buttons[i].iString = -1;
}
// TODO: Create hwndToolbar
style = WS_CHILD | CCS_TOP | TBS_HORZ | TBS_TOOLTIPS | CCS_NORESIZE;
if (bHasRebar)
style |= CCS_NORESIZE | CCS_NODIVIDER;
if (bList && bAddString)
style |= TBSTYLE_LIST;
exstyle = 0;
id = IDW_TOOLBAR2;
hwndToolbar = CreateWindowEx(exstyle, TOOLBARCLASSNAME, NULL,
style, 0, 0, 0, 0, hwnd, (HMENU)(INT_PTR)id, g_hInstance, NULL);
if (!hwndToolbar)
return NULL;
// TODO: Initialize toolbar
SendMessage(hwndToolbar, TB_BUTTONSTRUCTSIZE, sizeof(TBBUTTON), 0);
SetWindowLongPtr(hwndToolbar, GWL_STYLE, GetWindowStyle(hwndToolbar) | TBSTYLE_FLAT);
for (i = 0; i < DX_APP_USE_TEST_CTRLS; ++i)
{
TCHAR szText[MAX_PATH];
HFONT hFont = GetStockFont(DEFAULT_GUI_FONT);
if (i == 0)
{
style = WS_CHILD | WS_VISIBLE | ES_LEFT | ES_AUTOHSCROLL;
exstyle = WS_EX_CLIENTEDGE;
g_hwndTestCtrls[i] = CreateWindowEx(exstyle, TEXT("EDIT"), NULL, style,
0, 0, 0, 0, hwndToolbar, (HMENU)(INT_PTR)(IDW_TEST_CTRL_1 + i), g_hInstance, NULL);
SetWindowFont(g_hwndTestCtrls[i], hFont, TRUE);
buttons[i].iBitmap = 80 + cxPadding; // control width
continue;
}
StringCchPrintf(szText, _countof(szText), TEXT("Test %d"), (INT)i);
siz = GetTextExtentDx(szText, hFont);
siz.cx += 8;
siz.cy += 8;
buttons[i].iBitmap = siz.cx + cxPadding; // control width
style = WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON;
exstyle = 0;
g_hwndTestCtrls[i] = CreateWindowEx(exstyle, TEXT("BUTTON"), szText, style,
0, 0, 0, 0, hwndToolbar, (HMENU)(INT_PTR)(buttons[i].idCommand), g_hInstance, NULL);
if (g_hwndTestCtrls[i] == NULL)
return FALSE;
SetWindowFont(g_hwndTestCtrls[i], hFont, TRUE);
}
if (bAddString)
{
for (k = 0; k < _countof(buttons); ++k)
{
if (buttons[k].fsStyle & BTNS_SEP)
continue;
text = getCommandText(buttons[k].idCommand, FALSE);
buttons[k].iString = (INT)SendMessage(hwndToolbar, TB_ADDSTRING, 0, (LPARAM)text);
}
}
// Enable multiple image lists
SendMessage(hwndToolbar, CCM_SETVERSION, 5, 0);
SendMessage(hwndToolbar, TB_ADDBUTTONS, _countof(buttons), (LPARAM)&buttons);
for (i = 0; i < DX_APP_USE_TEST_CTRLS; ++i)
{
RECT rc;
SendMessage(hwndToolbar, TB_GETITEMRECT, (INT)i, (LPARAM)&rc);
rc.right -= cxPadding;
MoveWindow(g_hwndTestCtrls[i], rc.left, rc.top, rc.right - rc.left, rc.bottom - rc.top, TRUE);
}
// TODO: Set extended style
{
DWORD extended = 0;
if (bList)
extended |= TBSTYLE_EX_MIXEDBUTTONS; // BTNS_SHOWTEXT works
extended |= TBSTYLE_EX_DRAWDDARROWS; // BTNS_DROPDOWN and BTNS_WHOLEDROPDOWN will work
//extended |= TBSTYLE_EX_HIDECLIPPEDBUTTONS;
SendMessage(hwndToolbar, TB_SETEXTENDEDSTYLE, 0, extended);
}
return hwndToolbar;
}
#endif // def DX_APP_USE_TEST_CTRLS
BOOL doCreateRebar(HWND hwnd)
{
DWORD style, exstyle;
if (!doCreateImageLists())
return FALSE;
// TODO: Create a Rebar control
style = WS_CHILD | WS_VISIBLE | RBS_BANDBORDERS | CCS_NODIVIDER | RBS_AUTOSIZE;
exstyle = WS_EX_TOOLWINDOW;
g_hRebar = CreateWindowEx(exstyle, REBARCLASSNAME, NULL, style,
0, 0, 0, 0, hwnd, (HMENU)(INT_PTR)IDW_REBAR, g_hInstance, NULL);
if (!g_hRebar)
return FALSE;
{
REBARINFO info = { sizeof(info) };
SendMessage(g_hRebar, RB_SETBARINFO, 0, (LPARAM)&info);
}
// TODO: Create toolbars
g_hToolbars[0] = doCreateToolbar1(g_hRebar, 0, g_hRebar != NULL);
if (!g_hToolbars[0])
return FALSE;
g_hToolbars[1] = doCreateToolbar2(g_hRebar, 1, g_hRebar != NULL);
if (!g_hToolbars[1])
return FALSE;
#ifdef DX_APP_USE_TEST_CTRLS
static_assert(DX_APP_NUM_TOOLBARS == 3, "TODO:");
g_hToolbars[2] = doCreateToolbar3(g_hRebar, 2, g_hRebar != NULL);
if (!g_hToolbars[1])
return FALSE;
#else
static_assert(DX_APP_NUM_TOOLBARS == 2, "TODO:");
#endif
{
SIZE siz;
INT index = 0;
ARRAY_FOREACH(HWND hwndTB, g_hToolbars, {
REBARBANDINFO band = { sizeof(band) };
SendMessage(hwndTB, TB_GETMAXSIZE, 0, (LPARAM)&siz);
band.fMask = RBBIM_STYLE | RBBIM_CHILD | RBBIM_CHILDSIZE | RBBIM_SIZE;
band.hwndChild = hwndTB;
band.fStyle = RBBS_CHILDEDGE | RBBS_GRIPPERALWAYS;
band.cxMinChild = siz.cx;
band.cyMinChild = siz.cy;
band.cx = siz.cx;
SendMessage(g_hRebar, RB_INSERTBAND, (WPARAM)-1, (LPARAM)&band);
++index;
});
}
return TRUE;
}
BOOL registerControls(HINSTANCE hInst)
{
// TODO:
LoadLibraryA("RICHED32");
return TRUE;
}
BOOL createControls(HWND hwnd)
{
DWORD style, exstyle;
INT id;
// TODO: Create canvas font
{
LOGFONT lf = { 24 };
lf.lfCharSet = DEFAULT_CHARSET;
lf.lfPitchAndFamily = FIXED_PITCH | FF_MODERN;
s_hCanvasFont = CreateFontIndirect(&lf);
if (!s_hCanvasFont)
return FALSE;
}
// TODO: Create canvas window
style = WS_CHILD | WS_VISIBLE | ES_LEFT | ES_MULTILINE | ES_WANTRETURN | WS_HSCROLL | WS_VSCROLL;
exstyle = WS_EX_CLIENTEDGE;
id = IDW_CANVAS;
g_hCanvasWnd = CreateWindowEx(exstyle, TEXT("RichEdit"), NULL, style, 0, 0, 0, 0,
hwnd, (HMENU)(INT_PTR)id, g_hInstance, NULL);
if (!g_hCanvasWnd)
return FALSE;
// We have to receive EN_CHANGE from richedit
{
DWORD dwMask = (DWORD)SendMessage(g_hCanvasWnd, EM_GETEVENTMASK, 0, 0);
SendMessage(g_hCanvasWnd, EM_SETEVENTMASK, 0,
dwMask | ENM_CHANGE | ENM_DROPFILES | ENM_MOUSEEVENTS);
}
// TODO: Set canvas font
//SetWindowFont(g_hCanvasWnd, GetStockFont(DEFAULT_GUI_FONT), TRUE);
SetWindowFont(g_hCanvasWnd, s_hCanvasFont, TRUE);
if (!doCreateRebar(hwnd))
return FALSE;
// TODO: Create the status bar
style = WS_CHILD | SBS_SIZEGRIP;
exstyle = 0;
id = IDW_STATUSBAR;
g_hStatusBar = CreateStatusWindow(style, NULL, hwnd, id);
if (!g_hStatusBar)
return FALSE;
return TRUE;
}
void destroyControls(HWND hwnd)
{
DestroyWindow(g_hCanvasWnd);
ARRAY_FOREACH(HWND hwndTB, g_hToolbars, {
DestroyWindow(hwndTB);
});
#ifdef DX_APP_USE_TEST_CTRLS
ARRAY_FOREACH(HWND hwndTest, g_hwndTestCtrls, {
DestroyWindow(hwndTest);
});
#endif
#ifndef DX_APP_NEED_DIET
ARRAY_FOREACH(CommandUI ui, s_CommandUI, {
if (ui.hbmIcon)
{
DeleteObject(ui.hbmIcon);
ui.hbmIcon = NULL;
}
});
#endif
DestroyWindow(g_hRebar);
DestroyWindow(g_hStatusBar);
{
size_t i;
for (i = 0; i < _countof(s_himls); ++i)
{
ARRAY_FOREACH(HIMAGELIST himl, s_himls[i], {
ImageList_Destroy(himl);
});
}
}
if (s_hCanvasFont)
{
DeleteObject(s_hCanvasFont);
s_hCanvasFont = NULL;
}
}
// WM_MENUSELECT
void OnMenuSelect(HWND hwnd, WPARAM wParam, LPARAM lParam)
{
UINT uItem = LOWORD(wParam), fuFlags = HIWORD(wParam);
HMENU hmenu = (HMENU)lParam;
LPTSTR text;
UINT dummy[2] = { 0 };
if (fuFlags & MF_POPUP)
uItem = GetMenuItemID(hmenu, uItem);
if (fuFlags & MF_SYSMENU)
{
SendMessage(g_hStatusBar, SB_SETTEXT, 255 | SBT_NOBORDERS, (LPARAM)TEXT(""));
SendMessage(g_hStatusBar, SB_SIMPLE, TRUE, 0);
MenuHelp(WM_MENUSELECT, wParam, lParam, NULL, g_hInstance, g_hStatusBar, dummy);
return;
}
if (fuFlags == 0xFFFF && !hmenu)
{
SendMessage(g_hStatusBar, SB_SIMPLE, FALSE, 0);
PostMessage(hwnd, WM_COMMAND, 0, 0);
PostMessage(hwnd, WM_SIZE, 0, 0);
return;
}
text = getCommandText(uItem, TRUE);
if (text)
{
SendMessage(g_hStatusBar, SB_SETTEXT, 255 | SBT_NOBORDERS, (LPARAM)text);
SendMessage(g_hStatusBar, SB_SIMPLE, TRUE, 0);
return;
}
SendMessage(g_hStatusBar, SB_SETTEXT, 255 | SBT_NOBORDERS, (LPARAM)TEXT(""));
SendMessage(g_hStatusBar, SB_SIMPLE, TRUE, 0);
}
|
package com.acgist.snail.config;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.acgist.snail.context.NodeContext;
import com.acgist.snail.net.torrent.dht.request.AnnouncePeerRequest;
import com.acgist.snail.net.torrent.dht.request.FindNodeRequest;
import com.acgist.snail.net.torrent.dht.request.GetPeersRequest;
import com.acgist.snail.net.torrent.dht.request.PingRequest;
import com.acgist.snail.pojo.session.NodeSession;
import com.acgist.snail.utils.FileUtils;
import com.acgist.snail.utils.NumberUtils;
import com.acgist.snail.utils.StringUtils;
/**
* <p>DHT节点配置</p>
*
* @author acgist
*/
public final class DhtConfig extends PropertiesConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(DhtConfig.class);
/**
* <p>单例对象</p>
*/
private static final DhtConfig INSTANCE = new DhtConfig();
/**
* <p>获取单例对象</p>
*
* @return 单例对象
*/
public static final DhtConfig getInstance() {
return INSTANCE;
}
/**
* <p>配置文件:{@value}</p>
*/
private static final String DHT_CONFIG = "/config/bt.dht.properties";
/**
* <p>消息ID:{@value}</p>
* <p>请求ID、响应ID(默认两个字节)</p>
*/
public static final String KEY_T = "t";
/**
* <p>消息类型:{@value}</p>
* <p>请求消息类型:{@link #KEY_Q}</p>
* <p>响应消息类型:{@link #KEY_R}</p>
*/
public static final String KEY_Y = "y";
/**
* <p>请求消息类型、请求类型:{@value}</p>
* <p>请求消息类型:{@link #KEY_Y}</p>
* <p>请求类型:{@link QType}</p>
*
* @see QType
*/
public static final String KEY_Q = "q";
/**
* <p>响应消息类型、响应参数:{@value}</p>
* <p>响应消息类型:{@link #KEY_Y}</p>
* <p>响应参数类型:{@link Map}</p>
*/
public static final String KEY_R = "r";
/**
* <p>请求参数:{@value}</p>
* <p>请求参数类型:{@link Map}</p>
*/
public static final String KEY_A = "a";
/**
* <p>响应错误:{@value}</p>
* <p>响应错误类型:{@link Map}</p>
*
* @see ErrorCode
*/
public static final String KEY_E = "e";
/**
* <p>客户端版本:{@value}</p>
*/
public static final String KEY_V = "v";
/**
* <p>NodeId:{@value}</p>
*
* @see NodeContext#nodeId()
*/
public static final String KEY_ID = "id";
/**
* <p>下载端口:{@value}</p>
*
* @see QType#ANNOUNCE_PEER
* @see SystemConfig#getTorrentPortExt()
*/
public static final String KEY_PORT = "port";
/**
* <p>Token:{@value}</p>
*
* @see QType#ANNOUNCE_PEER
*/
public static final String KEY_TOKEN = "token";
/**
* <p>节点列表:{@value}</p>
*
* @see QType#FIND_NODE
* @see QType#GET_PEERS
*/
public static final String KEY_NODES = "nodes";
/**
* <p>Peer列表:{@value}</p>
*
* @see QType#GET_PEERS
*/
public static final String KEY_VALUES = "values";
/**
* <p>目标:{@value}</p>
* <p>NodeId、InfoHash</p>
*
* @see QType#FIND_NODE
*/
public static final String KEY_TARGET = "target";
/**
* <p>InfoHash:{@value}</p>
*
* @see QType#GET_PEERS
* @see QType#ANNOUNCE_PEER
*/
public static final String KEY_INFO_HASH = "info_hash";
/**
* <p>是否自动获取端口:{@value}</p>
*
* @see #IMPLIED_PORT_AUTO
* @see #IMPLIED_PORT_CONFIG
* @see QType#ANNOUNCE_PEER
*/
public static final String KEY_IMPLIED_PORT = "implied_port";
/**
* <p>自动配置(忽略端口配置)</p>
* <p>使用UDP连接端口作为对等端口并支持uTP</p>
*/
public static final Integer IMPLIED_PORT_AUTO = 1;
/**
* <p>端口配置</p>
* <p>使用消息端口配置</p>
*
* @see #KEY_PORT
*/
public static final Integer IMPLIED_PORT_CONFIG = 0;
/**
* <p>Peer列表长度:{@value}</p>
*
* @see QType#GET_PEERS
*/
public static final int GET_PEER_SIZE = 32;
/**
* <p>NodeId长度:{@value}</p>
*/
public static final int NODE_ID_LENGTH = 20;
/**
* <p>Node最大保存数量:{@value}</p>
* <p>超过Node最大保存数量均匀剔除多余节点</p>
*/
public static final int MAX_NODE_SIZE = 1024;
/**
* <p>DHT请求清理周期(分钟):{@value}</p>
*/
public static final int DHT_REQUEST_CLEAN_INTERVAL = 10;
/**
* <p>DHT响应超时:{@value}</p>
*/
public static final int DHT_TIMEOUT = SystemConfig.RECEIVE_TIMEOUT_MILLIS;
static {
LOGGER.debug("初始化DHT节点配置:{}", DHT_CONFIG);
INSTANCE.init();
INSTANCE.release();
}
/**
* <p>DHT请求类型</p>
*
* @author acgist
*/
public enum QType {
/**
* <p>ping</p>
*
* @see PingRequest
*/
PING("ping"),
/**
* <p>查找节点</p>
*
* @see FindNodeRequest
*/
FIND_NODE("find_node"),
/**
* <p>查找Peer</p>
*
* @see GetPeersRequest
*/
GET_PEERS("get_peers"),
/**
* <p>声明Peer</p>
*
* @see AnnouncePeerRequest
*/
ANNOUNCE_PEER("announce_peer");
/**
* <p>类型标识</p>
*/
private final String value;
/**
* @param value 类型标识
*/
private QType(String value) {
this.value = value;
}
/**
* <p>获取类型标识</p>
*
* @return 类型标识
*/
public String value() {
return this.value;
}
/**
* <p>通过类型标识获取请求类型</p>
*
* @param value 类型标识
*
* @return 请求类型
*/
public static final QType of(String value) {
final var types = QType.values();
for (QType type : types) {
if(type.value.equals(value)) {
return type;
}
}
return null;
}
}
/**
* <p>DHT响应错误</p>
* <p>数据格式:{@link List}</p>
* <p>信息格式:[0]=错误编码;[1]=错误描述;</p>
*
* @author acgist
*/
public enum ErrorCode {
/**
* <p>一般错误</p>
*/
CODE_201(201),
/**
* <p>服务错误</p>
*/
CODE_202(202),
/**
* <p>协议错误:不规范包、无效参数、错误Token</p>
*/
CODE_203(203),
/**
* <p>未知方法</p>
*/
CODE_204(204);
/**
* <p>错误编码</p>
*/
private final int code;
/**
* @param code 错误编码
*/
private ErrorCode(int code) {
this.code = code;
}
/**
* <p>获取错误编码</p>
*
* @return 错误编码
*/
public int code() {
return this.code;
}
}
/**
* <p>默认DHT节点</p>
* <p>NodeID=host:port</p>
*/
private final Map<String, String> nodes = new LinkedHashMap<>();
/**
* <p>禁止创建实例</p>
*/
private DhtConfig() {
super(DHT_CONFIG);
}
/**
* <p>初始化配置</p>
*/
private void init() {
this.properties.entrySet().forEach(entry -> {
final String nodeId = (String) entry.getKey();
final String address = (String) entry.getValue();
if(StringUtils.isNotEmpty(nodeId) && StringUtils.isNotEmpty(address)) {
this.nodes.put(nodeId, address);
} else {
LOGGER.warn("默认DHT节点注册失败:{}-{}", nodeId, address);
}
});
}
/**
* <p>获取所有DHT节点</p>
*
* @return 所有DHT节点
*/
public Map<String, String> nodes() {
return this.nodes;
}
/**
* <p>保存DHT节点配置</p>
* <p>注意:如果没有启动BT任务没有必要保存</p>
*/
public void persistent() {
LOGGER.debug("保存DHT节点配置");
final var persistentNodes = NodeContext.getInstance().nodes();
final int size = persistentNodes.size();
final Random random = NumberUtils.random();
final var data = persistentNodes.stream()
.filter(NodeSession::persistentable)
.filter(node -> size < MAX_NODE_SIZE || random.nextInt(size) < MAX_NODE_SIZE) // 随机保存
.collect(Collectors.toMap(
node -> StringUtils.hex(node.getId()),
node -> node.getHost() + ":" + node.getPort()
));
this.persistent(data, FileUtils.userDirFile(DHT_CONFIG));
}
}
|
module PoolParty
module Resources
=begin rdoc
== Symlink
The symlink resource sets a symlink
== Usage
has_symlink(:key => '...') do
# More options.
# This block is optional
end
== Options
* <tt>name</tt> The location of the symlink
* <tt>target</tt> The source of the symlink, the existing file or directory
== Examples
has_symlink(:name => "/var/www/poolpartyrb.com/public", :source => "/var/www/poolpartyrb.com/poolparty-website/site")
=end
class Symlink < Resource
def initialize *args, &block
super
end
def present
:create
end
def absent
:delete!
end
end
end
end |
#!/bin/bash
#
# Copyright (c) 2010, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Creates a 'tag' based on a fvp model (model).
#
# The script takes a single argument: the model tarball's filename (first argument)
#
function usage() {
echo "Usage: $0 model-tarball" 1>&2
exit 1
}
# Create a tag based a on fvp model
function create-tag() {
local model=$1
local tag
# get model basename
tag=$(basename $model)
# remove any extension (tgz expected)
tag=${tag%.*}
# finally lowercase
tag=${tag,,}
echo $tag
}
[ $# -ne 1 ] && usage
tarball=$1; create-tag ${tarball}
|
<filename>packages/server/src/api/controllers/positions.ts
import { Request, Router } from 'express';
import { celebrate, Joi, Segments } from 'celebrate';
import { EthNetwork } from '@sommelier/shared-types';
import { HTTPError } from 'api/util/errors';
import { memoConfig, UniswapV3Fetchers } from 'services/uniswap-v3';
import {
GetPositionsResult,
GetPositionSnapshotsResult,
V3PositionData,
} from '@sommelier/shared-types/src/api'; // how do we export at root level?
import catchAsyncRoute from 'api/util/catch-async-route';
import { networkValidator } from 'api/util/validators';
import validateEthAddress from 'api/util/validate-eth-address';
import { calculateStatsForNFLPs } from 'util/calculate-stats-v3';
import config from '@config';
const networks = Object.keys(config.uniswap.v3.networks);
type Path = {
network: EthNetwork;
address: string;
};
type V3PositionDataList = { [key: string]: V3PositionData };
const getPositionsValidator = celebrate({
[Segments.PARAMS]: Joi.object().keys({
network: Joi.string()
.valid(...networks)
.required(),
address: Joi.string()
.custom(validateEthAddress, 'Validate address')
.required(),
}),
});
// GET /positions/:address
async function getPositionStats(
req: Request<Path, unknown, unknown, unknown>,
): Promise<V3PositionDataList> {
const { network, address } = req.params;
const fetcher = UniswapV3Fetchers.get(network);
const positions = await fetcher.getPositions(address);
const snapshots = await fetcher.getPositionSnapshots(address);
const snapshotsByNFLP = snapshots.reduce(
(acc: { [key: string]: GetPositionSnapshotsResult }, snapshot) => {
const nflpId = snapshot.id.split('#')[0];
if (!acc[nflpId]) {
acc[nflpId] = [snapshot];
} else {
acc[nflpId].push(snapshot);
}
return acc;
},
{},
);
const results: V3PositionDataList = {};
for (const position of positions) {
const [nflpId] = position.id.split('#');
results[nflpId] = {
position,
snapshots: snapshotsByNFLP[nflpId],
stats: await calculateStatsForNFLPs(
position,
snapshotsByNFLP[nflpId],
),
};
}
return results;
}
const route = Router();
const cacheConfig = { public: true };
// sMaxAge: 5 min in seconds
const positionsConfig = {
maxAge: 30,
sMaxAge: memoConfig.getTopPools.ttl / 1000,
...cacheConfig,
};
route.get(
'/:network/positions/:address/stats',
getPositionsValidator,
catchAsyncRoute(getPositionStats, positionsConfig),
);
export default route;
|
package control;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.SQLException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import modelo.datos.VO.UsuarioVO;
import modelo.datos.WebFacade;
public class verFeedServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request,response);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
RequestDispatcher rd = request.getRequestDispatcher("/Usuario.jsp");
rd.forward(request, response);
}
}
|
#!/bin/bash
PROJECT_NAME=job
SERVICE_NAME=job-file-gateway
SELF_DIR=$(readlink -f "$(dirname $0)")
SERVICE_HOME=$(cd $SELF_DIR/.. && pwd )
if [[ -z "${BK_HOME}" ]]; then
BK_HOME=$(cd $SELF_DIR/../../../../ && pwd)
fi
JOB_HOME=${BK_HOME}/${PROJECT_NAME}
if [[ -z "${JOB_CONF_HOME}" ]]; then
JOB_CONF_HOME=${BK_HOME}/etc/${PROJECT_NAME}
fi
if [[ -z "${LOGS_HOME}" ]]; then
LOGS_HOME=${BK_HOME}/logs/${PROJECT_NAME}/${SERVICE_NAME}/
fi
SHELL_FILE_NAME=${0##*/}
JAR_FILE=${SERVICE_HOME}/${SERVICE_NAME}.jar
if [[ ! -n "${JAVA_HOME}" ]]||[[ ! -d "${JAVA_HOME}" ]]; then
JAVA_HOME="${BK_HOME}/service/java"
if [[ ! -d "${JAVA_HOME}" ]]; then
JAVA_HOME="${BK_HOME}/common/java"
fi
export JAVA_HOME
export PATH=${JAVA_HOME}/bin:$PATH
fi
# Java process pid
PID=0
# Spring profile param
SPRING_PROFILE="$2"
# Get current java process pid
function getPID(){
javaps=`${JAVA_HOME}/bin/jps -l | grep "${JAR_FILE}"`
if [[ -n "$javaps" ]]; then
PID=`echo ${javaps} | awk '{print $1}'`
else
PID=0
fi
}
function startup() {
JAVA_OPTS="$JAVA_OPTS -server -Dfile.encoding=UTF-8"
JAVA_OPTS="$JAVA_OPTS -Dspring.config.additional-location=file://${JOB_CONF_HOME}/application-file-gateway.yml"
if [ -n "${SPRING_PROFILE}" ];then
JAVA_OPTS="$JAVA_OPTS -Dspring.profiles.active=${SPRING_PROFILE}"
else
JAVA_OPTS="$JAVA_OPTS -Dspring.profiles.active=prod"
fi
getPID
if [[ ${PID} -ne 0 ]]; then
echo "${PROJECT_NAME}:${SERVICE_NAME} already started(PID=$PID)"
else
echo -n "Starting ${PROJECT_NAME}:${SERVICE_NAME}"
if [[ ! -d "${LOGS_HOME}" ]]; then
mkdir -p "${LOGS_HOME}"
fi
NOHUPLOG=/dev/null
#NOHUPLOG=${PROJECT_NAME}_${SERVICE_NAME}_console.log
nohup ${JAVA_HOME}/bin/java ${JAVA_OPTS} -jar ${JAR_FILE} > ${NOHUPLOG} 2>&1 &
# Process listening port status, 0:not listen, 1:listen
HTTP_PORT_STATUS=0
for i in $(seq 60)
do
sleep 0.5
echo -e ".\c"
getPID
if [[ ${PID} -ne 0 ]]; then
checkPortStatus "${PID}"
HTTP_PORT_STATUS=$?
if [[ ${HTTP_PORT_STATUS} -gt 0 ]]; then
break;
fi
fi
done
if [[ ${HTTP_PORT_STATUS} -eq 0 ]]; then
echo "[Failed]-- http port ${HTTP_PORT} start fail!"
exit 1
fi
echo "(PID=$PID)...[Success]"
fi
}
function checkPortStatus() {
# Process listening port status, 0:not listen, 1:listen
HTTP_PORT_STATUS=0
# $1 is pid
PID_TMP="$1"
PID_PNAME="${PID_TMP}/java"
LISTEN_PID_COUNT=`netstat -atnp|grep "${PID_PNAME}"|wc -l`
if [[ ${LISTEN_PID_COUNT} -ne 0 ]]; then
HTTP_PORT_STATUS=1
else
HTTP_PORT_STATUS=0
fi
return $HTTP_PORT_STATUS
}
function shutdown(){
getPID
if [[ ${PID} -ne 0 ]]; then
echo -n "Stopping ${PROJECT_NAME}:${SERVICE_NAME}(PID=${PID})..."
kill ${PID}
if [[ $? -ne 0 ]]; then
echo "[Failed]"
exit 1
fi
for i in $(seq 20)
do
sleep 0.5
getPID
if [[ ${PID} -eq 0 ]]; then
checkPortStatus "${PID}"
HTTP_PORT_STATUS=$?
if [[ ${HTTP_PORT_STATUS} -eq 0 ]]; then
break
fi
fi
echo -e ".\c"
done
getPID
if [[ ${PID} -eq 0 ]]; then
echo "[Success]"
else
kill -9 ${PID}
if [[ $? -ne 0 ]]; then
echo "[Failed]"
exit 1
fi
echo "some task is running in background,force stop it.[Success]"
fi
else
echo "${PROJECT_NAME}:${SERVICE_NAME} is not running"
fi
}
function getServerStatus(){
getPID
if [[ ${PID} -ne 0 ]]; then
checkPortStatus "${PID}"
HTTP_PORT_STATUS=$?
if [[ ${HTTP_PORT_STATUS} -eq 0 ]]; then
echo "${PROJECT_NAME}:${SERVICE_NAME} port ${HTTP_PORT} is not listening(PID=${PID})";
exit 99;
fi
echo "${PROJECT_NAME}:${SERVICE_NAME} is running(PID=${PID})"
else
echo "${PROJECT_NAME}:${SERVICE_NAME} is not running"
fi
}
function restart(){
shutdown
sleep 1
startup
}
case "$1" in
restart)
restart;;
start)
startup;;
stop)
shutdown;;
status)
getServerStatus;;
*)
echo $"Usage: ${SHELL_FILE_NAME} {start|stop|status|restart}"
esac
|
def find_max_subarray_sum(arr):
max_sum = 0
sum_so_far = 0
for i in range(len(arr)):
sum_so_far += arr[i]
if sum_so_far > max_sum:
max_sum =sum_so_far
return max_sum
print(find_max_subarray_sum([1, -2, 3, 5, -2])) |
mc.models.bar = function bar(model) {
model = model || {};
//============================================================
// Public Variables with Default Settings
// Components
model.xyChartBase = mc.models.xyChartBase(model); // base settings, scale calculations, and wrappers
// Settings
model.name = 'bar'; // used for top level class, usually matches model name
// Accessors
// Setup Component Static Settings
model.xyChartBase
.xScale(d3.scale.ordinal()) //using ordinal in this model to make sure each bar/bar group is equal-distance apart and equal-width
.yForce(0); // bar chart's should always force 0 so bar goes from 0 to max/min (it should be possible to override/remove this setting via chart.yForce([]) //TODO: confirm this (yForce and all other component's API's should be rebinded onto this chart)
// Setup Layers
model.layers = model.layers || {};
model.layers.groups = mc.layer({
dataBind: function(model, instance, data) {
return this.selectAll('.mc-group')
.data(model.series_, model.seriesKey_);
},
insert: function() {
return this.append('g')
.classed('mc-group', true);
},
events: {
'merge': function(model, instance) {
return this
.attr('class', function(d,i) { return 'mc-group mc-group-' + i })
.classed('mc-disabled', function(d) { return d.disabled });
},
'merge:transition': function(model, instance) {
return this
.attr('transform', function(d,i) { return 'translate(0,0)' });
},
'exit': function(model, instance) {
return this
.classed('mc-group-exit', true);
},
'exit:transition': function(model, instance) {
this.selectAll('.mc-bar')
.attr('y', instance.y(0))
.attr('height', 0)
.remove();
return this.remove();
}
}
});
model.layers.bars = mc.layer({
dataBind: function(model, instance, data) {
return this.selectAll('.mc-bar')
.data(model.values_, model.pointKey_);
},
insert: function() {
return this.append('rect')
.attr('class', function(d, i) { return 'mc-bar'; });
},
events: {
'enter': function(model, instance) {
return this
.attr('x', instance.x0Calc)
.attr('y', instance.y0Calc)
//TODO: investigate implementation of model.y_ and instance.y... scales MAY be shared between instances, BUT calculated for each instance when generating chart
// **this might be OK, tho might not. Need to test more on chart model's with multiple instances (ie. selection of more than 1 element with same model but different data)
.attr('y', function(d) { return model.y_.apply(this, arguments) > 0 ? instance.y0Calc.apply(this, arguments) : instance.y0(0) })
.attr('width', instance.x.rangeBand())
.attr('height', function() {
return Math.abs(instance.y0Calc.apply(this, arguments) - instance.y0(0))
});
},
'merge': function(model, instance) {
return this
.attr('class', (d, i) => 'mc-bar mc-bar-' + i);
},
'merge:transition': function(model, instance) {
return this
.attr('x', instance.xCalc)
.attr('y', function(d) { return model.y_.apply(this, arguments) > 0 ? instance.yCalc.apply(this, arguments) : instance.y(0) })
.attr('width', instance.x.rangeBand())
.attr('height', function() {
return Math.abs(instance.yCalc.apply(this, arguments) - instance.y(0))
});
},
'exit': function(model, instance) {
return this
.classed('mc-bar-exit', true);
},
'exit:transition': function(model, instance) {
return this
.attr('y', instance.y(0))
.attr('height', 0)
.remove();
}
}
});
//------------------------------------------------------------
chart.calc = function(instance, data) {
model.xyChartBase
.xRangeBands((model,instance) => [ [0, instance.width] , .1 ] );
model.xyChartBase.calc.call(this, instance, data);
//TODO: see if __chart__ keys/values can/should only go in chart.calc calls
// **consider placement based on parent chart overriding __chart__ values
// **remember that __chart__.dimension is used to store old dimension from last call
this.__chart__.chart = chart;
this.__chart__.update = () => instance.container.call(chart);
return chart;
};
chart.build = function(instance, data) {
model.xyChartBase.build.call(this, instance, data);
//------------------------------------------------------------
// Setup Chart Layers
instance.gEnter.append('g').attr('class', 'mc-groups');
//------------------------------------------------------------
//------------------------------------------------------------
// Core Chart Code
//TODO: consider consistent naming... chart.build vs layer.draw ? ... chart.calc vs layer.dataBind (maybe not this one?)
// while 'data' is passed in here and below, still using data bound to selection element, and d3 'trickling down' (THE D3 WAY)
instance.groups = model.layers.groups.draw(instance.g.select('.mc-groups'), model, instance, data);
instance.bars = model.layers.bars .draw(instance.groups, model, instance, data);
//------------------------------------------------------------
return chart;
};
function chart(selection, instance) {
selection.each(function(data) {
instance = instance || {};
chart.calc.call(this, instance, data);
chart.build.call(this, instance, data);
});
return chart;
}
//============================================================
// Expose Public API
mc.rebind(chart, model.xyChartBase);
//TODO: figure out a way to not need this
chart.rebind = function() {
mc.rebind(chart, model.xyChartBase);
return chart;
}
//------------------------------------------------------------
return chart;
};
|
package com.aqzscn.www.weixin.domain;
import weixin.popular.bean.message.EventMessage;
/**
* 自定义过滤器
* 为实现微信消息被动回复而做
*/
public interface CustomFilter {
/**
* 获取返回值
* @return 消息内容
*/
String getResult();
/**
* 获取功能代码
*/
String getKey();
/**
* 是否执行下一个过滤器
* @param eventMessage 微信消息体
* @return 布尔值
*/
boolean next(EventMessage eventMessage);
}
|
package com.biniam.designpatterns.command.foobarmotorco;
/**
* @author <NAME>
*/
public class Radio {
private final static Integer MIN_VOLUME = 0;
private final static Integer MAX_VOLUME = 10;
private final static Integer DEFAULT_VOLUME = 5;
private boolean on;
private Integer volume;
public Radio() {
on = false;
volume = DEFAULT_VOLUME;
}
public boolean isOn() {
return on;
}
public void on() {
this.on = true;
System.out.println("Radio is on. Volume level is " + getVolume());
}
public void off() {
this.on = false;
System.out.println("Radio is off");
}
public Integer getVolume() {
return volume;
}
public void VolumeUp() {
if (isOn() && getVolume() < MAX_VOLUME) {
volume++;
System.out.println("Volume turned up to level " + getVolume());
}
}
public void volumeDown() {
if (isOn() && getVolume() > MIN_VOLUME) {
volume--;
System.out.println("Volume turned down to level " + getVolume());
}
}
}
|
#! /bin/bash
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/software/node-v10.15.3-linux-x64/bin:/usr/local/software/go/bin
cd /var/www/html/wxyadpi
echo "更新代码"
git pull -v --all
name=wxyapi
echo "开始停止 $name 容器"
docker stop $name
echo "停止容器 $name 成功"
echo "开始删除 $name 容器"
docker rm $name
echo "删除 $name 容器成功"
docker images|grep none|awk '{print $3 }'|xargs docker rmi
imagesid=`docker images|grep -i $name|awk '{print $3}'`
if [ "$imagesid" == "" ];then
echo "镜像不存在!"
else
echo "删除镜像id $imagesid"
docker rmi $imagesid -f
echo "删除成功"
fi
docker build . -t wxyapi
docker run -itd --name wxyapi --link=mysql-dev:mysql-dev --link redis-test:redis-test -p 9111:9111 wxyapi
|
def digital_root(num):
if num < 10:
return num
else:
total_sum=0
while num >= 10:
total_sum += num % 10
num /= 10
return digital_root(total_sum)
answer = digital_root(num)
print(answer) |
<gh_stars>0
var DEFAULT_LIMIT = 25;
var DEFAULT_OFFSET = 0;
module.exports = {
limit: DEFAULT_LIMIT,
offset: DEFAULT_OFFSET,
setLimit: function (giveLimit) {
this.limit = giveLimit;
},
getLimit: function () {
return this.limit;
},
setOffset: function (givenOffset) {
this.offset = givenOffset;
},
getOffset: function () {
return this.offset;
}
};
|
#!/bin/sh
SCRIPT="$0"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/swagger-codegen-cli/target/swagger-codegen-cli.jar"
if [ ! -f "$executable" ]
then
mvn clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -Dlogback.configurationFile=bin/logback.xml"
# complex module name used for testing
ags="$@ generate -i modules/swagger-codegen/src/test/resources/2_0/petstore-with-fake-endpoints-models-for-testing.yaml -l perl -o samples/client/petstore/perl -DhideGenerationTimestamp=true"
# java $JAVA_OPTS -jar $executable $ags
# java $JAVA_OPTS -jar $executable $ags --additional-properties moduleName=Something::Deep -o samples/client/petstore/perl/deep_module_test
|
<reponame>KRISHVIMAL38/File_sharing_system.github.io
console.log('This is node App') |
<reponame>biancahng/turma-de-elite-backend<filename>src/main/java/com/devaneios/turmadeelite/repositories/LogStatusUserRepository.java
package com.devaneios.turmadeelite.repositories;
import com.devaneios.turmadeelite.entities.LogStatusUser;
import org.joda.time.DateTime;
import org.joda.time.LocalDateTime;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface LogStatusUserRepository extends PagingAndSortingRepository<LogStatusUser,Long> {
@Query(value = "SELECT old_is_active FROM log_status_user WHERE user_id = :user_id ORDER BY date_action DESC LIMIT 1", nativeQuery = true)
Boolean getLastOldStatusUser(long user_id);
@Query("SELECT count(u.id) FROM LogStatusUser u WHERE u.old_is_active=FALSE AND DATE_PART('MONTH', u.date_action) = :month AND DATE_PART('YEAR', u.date_action) = :year")
int countActiveUsers(int month, int year);
@Query("SELECT count(u.id) FROM LogStatusUser u WHERE u.old_is_active=TRUE AND DATE_PART('MONTH', u.date_action) = :month AND DATE_PART('YEAR', u.date_action) = :year")
int countInactiveUsers(int month, int year);
@Modifying
@Query(value = "INSERT INTO log_status_user(user_id, date_action, old_is_active) VALUES (:user_id, CURRENT_DATE, :old_is_active);",nativeQuery = true)
void insertLogStatusUser(Long user_id, Boolean old_is_active);
}
|
<reponame>kvithana/spotify-audio-features-to-csv
import Spotify from 'spotify-web-api-node'
import { clone } from 'lodash'
import { SpotifyAnalysisError } from './_error'
/**
* A wrapper around a Spotify API handler to assist in data importing functionality.
*/
class _Spotify {
s: Spotify
private _initialised: boolean
constructor() {
this.s = new Spotify()
this.s.setClientId(process.env.SPOTIFY_CLIENT_ID)
this.s.setClientSecret(process.env.SPOTIFY_CLIENT_SECRET)
this._initialised = false
}
/**
* Initialise the Spotify library by granting an access token. This method
* **must be resolved before calling any other methods**.
*/
public async initialise(): Promise<void> {
const accessToken = await this.s
.clientCredentialsGrant()
.then(({ body }) => {
this._initialised = true
return body.access_token
})
.catch((err) => {
throw new SpotifyAnalysisError('error with getting client credentials on initialisation:', err)
})
this.s.setAccessToken(accessToken)
}
/**
* Gets Spotify data for any number of tracks (unrestricted from 50 track direct
* API call limit).
* @param tracks array of track URIs
*/
public async getTracks(tracks: string[]): Promise<SpotifyApi.TrackObjectFull[]> {
this._assertInitialised()
let trackData: SpotifyApi.TrackObjectFull[] = []
const remaining = clone(tracks)
do {
await this.s
.getTracks(remaining.splice(0, 50))
.then(({ body }) => {
trackData = trackData.concat(body.tracks)
})
.catch((err) => {
console.error('error with getting tracks:', err)
throw err
})
} while (remaining.length)
return trackData
}
/**
* Gets Spotify data for any number of artists (unrestricted from 50 artists direct
* API call limit).
* @param artists array of artist URIs
*/
public async getArtists(artists: string[]): Promise<SpotifyApi.ArtistObjectFull[]> {
this._assertInitialised()
let artistData: SpotifyApi.ArtistObjectFull[] = []
const remaining = clone(artists)
do {
await this.s
.getArtists(remaining.splice(0, 50))
.then(({ body }) => {
artistData = artistData.concat(body.artists)
})
.catch((err) => {
console.error('error with getting artists:', err)
throw err
})
} while (remaining.length)
return artistData
}
/**
* Get audio feature information for tracks identified by its unique Spotify
* ID.
* @param tracks array of track URIs
*/
public async getAudioFeatures(tracks: string[]): Promise<SpotifyApi.AudioFeaturesObject[]> {
this._assertInitialised()
let featuresData: SpotifyApi.AudioFeaturesObject[] = []
const remaining = clone(tracks)
do {
await this.s
.getAudioFeaturesForTracks(remaining.splice(0, 50))
.then(({ body }) => {
featuresData = featuresData.concat(body.audio_features)
})
.catch((err) => {
console.error('error with getting features:', err)
throw err
})
} while (remaining.length)
return featuresData
}
/**
* Gets playlist data for a given playlist URI.
* @param playlist playlist URI
*/
public async getPlaylist(playlist: string): Promise<SpotifyApi.SinglePlaylistResponse> {
this._assertInitialised()
return this.s.getPlaylist(playlist).then(({ body }) => body)
}
/**
* Gets playlist tracks for a given playlist URI.
* @param playlist playlist URI
*/
public async getPlaylistTracks(playlist: string): Promise<SpotifyApi.PlaylistTrackObject[]> {
this._assertInitialised()
return this.s.getPlaylistTracks(playlist).then(({ body }) => body.items)
}
/**
* Ensure initialised has been called and an access token is set
*/
private _assertInitialised() {
if (!this._initialised) {
throw new SpotifyAnalysisError('initialise() method must be called before other methods')
}
return true
}
}
export default _Spotify
|
<gh_stars>0
var class_test_dequantize_layer_visitor =
[
[ "TestDequantizeLayerVisitor", "class_test_dequantize_layer_visitor.xhtml#aca17470039e616c99481dc691ad49c01", null ],
[ "VisitDequantizeLayer", "class_test_dequantize_layer_visitor.xhtml#ae988a88e27a6fd19c8c34565856832b6", null ]
]; |
#!/bin/sh
# Author: Zaibai
# This Shell module allows to filter the sending email of snmp traps to avoid flooding.
# It works with snmptt among others.
##########
# This function manages the filtering of snmp traps
# Usage Syntax:
# FunctionName DeviceName SNMP-OID [ResetNumberOccurrenceAfterXHour=5 NumberOccurrenceBeforeFiltering=20 FilteringDurationInHour=10]
# Sample - Standard: fTrapFilter "device-name1" "[.1.3.6.1.4.1.9.9.41.2.0.1]"
# Sample - Custom: fTrapFilter "device-name2" "[.1.3.6.1.2.1.17.0.2]" "10" "30" "5"
# Return value of the function:
# 0 = Cancelled/filtered mail
# 1 = Sending the mail
# 2 = Sending the mail in indicating the flood and the duration of the filtering
##########
fTrapFilter()
{
# Variable - reset (just in case)
unset iResetOccurrenceAfterHour
unset iOccurrenceMax
unset iFilteringTimeHour
unset iReturn
# Variable - Argument
sDevice="$1"
sOid="$2"
iResetOccurrenceAfterHour="${3:-5}"
iOccurrenceMax="${4:-20}"
iFilteringTimeHour="${5:-10}"
# Variable - Other
fBdd="/var/log/snmptt/bddTrapFilter.log"
[ $iTest -eq 1 ] && fBdd="bddTrapFilter.log"
sNotFound="NotFound"
sDateTimeNow=$(date +"%Y-%m-%d %H:%M:%S")
# We create the file $fBdd if not present
if [ ! -f "$fBdd" ]; then
touch "$fBdd"
chmod 644 "$fBdd"
fi
# We recover the information necessary for the processing
sSearch=$(grep --fixed-strings --line-number "$sDevice;$sOid;" $fBdd || echo $sNotFound)
sDateTimeOccurrence=$(printf '%s' "$sSearch" | awk -F ";" -v var="$sDateTimeNow" '{print ($3 == "" ? var : $3)}')
iOccurrence=$(printf '%s' "$sSearch" | awk -F ";" '{print ($4 == "" ? 1 : $4)}')
iLine=$(printf '%s' "$sSearch" | awk -F ":" '{print ($1 ~ /^[0-9]+$/ ? $1 : 0)}')
iDateTimeNowSecond=$(date --date "${sDateTimeNow}" +%s)
iDateTimeOccurrenceSecond=$(date --date "${sDateTimeOccurrence}" +%s)
iDateTimeDiffSecond=$(($iDateTimeNowSecond-$iDateTimeOccurrenceSecond))
iDateTimeDiffHour=$(($iDateTimeDiffSecond/3600))
# If the database does not know this combination ("$sDevice;$sOid;") then we add it
if [ "$sSearch" = "$sNotFound" ]; then
echo "$sDevice;$sOid;$sDateTimeNow;$iOccurrence" >> $fBdd
# Else, if the last occurrence is less than $iResetOccurrenceAfterHour hours or than the filtering is in progress, then we increment the counter
elif ([ $iOccurrence -lt $iOccurrenceMax ] && [ $iDateTimeDiffHour -lt $iResetOccurrenceAfterHour ]) || ([ $iOccurrence -ge $iOccurrenceMax ] && [ $iDateTimeDiffHour -lt $iFilteringTimeHour ]); then
iOccurrence=$(($iOccurrence+1))
sed -i "${iLine}s/.*/${sDevice};${sOid};${sDateTimeOccurrence};${iOccurrence}/" "$fBdd"
# Else we reset the counter
else
iOccurrence=1
sed -i "${iLine}s/.*/${sDevice};${sOid};${sDateTimeNow};${iOccurrence}/" "$fBdd"
fi
# If the counter exceeds $iOccurrenceMax and the last occurrence is less than $iFilteringTimeHour hours then we return 0 (cancelled/filtered mail)
if [ $iOccurrence -gt $iOccurrenceMax ] && [ $iDateTimeDiffHour -lt $iFilteringTimeHour ]; then
[ -z $iReturn ] && iReturn=0
# Else if the counter is equal at $iOccurrenceMax we return 2 (mail sent in indicating the flood and the duration of the filtering)
elif [ $iOccurrence -eq $iOccurrenceMax ]; then
[ -z $iReturn ] && iReturn=2
# We set the current date and time as the starting point for the duration of the flood
sed -i "${iLine}s/.*/${sDevice};${sOid};${sDateTimeNow};${iOccurrence}/" "$fBdd"
# Else we return 1 (we send the mail)
else
[ -z $iReturn ] && iReturn=1
fi
# In case of a test, the different values obtained are displayed
if [ $iTest -eq 1 ]; then
echo '\n------------ TEST fTrapFilter ------------'
echo "Device: $sDevice"
echo "Oid: $sOid"
echo "Reset the number of occurrences after: $iResetOccurrenceAfterHour hours"
echo "Number of occurrences before filtering: $iOccurrenceMax"
echo "Filtering duration: $iFilteringTimeHour hours"
echo "Combination found (Device;Oid;): $sSearch"
echo "Date of the first occurrence (or start of filtering): $sDateTimeOccurrence"
echo "Current number of occurrences (after modification): $iOccurrence"
echo "Location in the database: Line $iLine"
echo "Timestamp of the first occurrence (or start of filtering): $iDateTimeOccurrenceSecond seconds"
echo "Current timestamp: $iDateTimeNowSecond seconds"
echo "Difference between the two timestamps in seconds: $iDateTimeDiffSecond seconds"
echo "Difference between the two timestamps in hours: $iDateTimeDiffHour hours"
echo "Returned values (0:cancelled/filtered;1:OK;2:OK+flood/indicated filtering duration): $iReturn $iFilteringTimeHour"
echo '------------ TEST fTrapFilter ------------'
fi
# Returned values
echo "$iReturn $iFilteringTimeHour"
}
##########
# This function manages the sending of snmp traps by mail
# Usage Syntax:
# FunctionName ResultfTrapFilter ContentMail SubjectMail [AddressesTo="emailto1@domain.com, emailto2@domain.com" AddresseFrom="emailfrom@domain.com"]
# Sample - Standard: fSendTrap "1 10" "From: $2\nOid: $4\nDescription: $6" "[TRAP] $2: $3"
# Sample - Custom: fSendTrap "2 15" "From: $2\nOid: $4\nNode: $5\nPort: $6\nCode: $7\nDescription: $8" "[TRAP] $2: $3" "email1@domain.com, email2@domain.com" "sender@domain.com"
##########
fSendTrap()
{
# Variable - reset (just in case)
unset sReceiver
unset sSender
# Variable - Argument
iReturnST=$(echo "$1" | cut -d ' ' -f 1)
iFilteringTimeHourST=$(echo "$1" | cut -d ' ' -f 2)
sBody="$2"
sSubject="$3"
sReceiver="${4:-emailto1@domain.com, emailto2@domain.com}"
sSender="${5:-emailfrom@domain.com}"
[ $iTest -eq 1 ] && sReceiver="emailtestto1@domain.com"
[ $iTest -eq 1 ] && sSender="emailtestfrom@domain.com"
# Management of mail sending
# 0 = Cancelled/filtered mail
# 1 = Sending the mail
# 2 = Sending the mail in indicating the flood and the duration of the filtering
case $iReturnST in
1)
echo "$sBody" | mail -r "$sSender" -s "$sSubject" "$sReceiver"
;;
2)
sBody="$sBody \nFiltering: $iFilteringTimeHourST hours"
sSubject=$(echo "$sSubject" | sed -e 's/\[TRAP\]/[TRAP][FLOOD]/')
echo "$sBody" | mail -r "$sSender" -s "$sSubject" "$sReceiver"
;;
esac
# In case of a test, the different values obtained are displayed
if [ $iTest -eq 1 ]; then
sBodyTmp=$(printf '%s' "$sBody" | sed -e 's/\\n/\n /g')
echo '\n------------ TEST fSendTrap ------------'
echo "Values returned by fTrapFilter (0:cancelled/filtered;1:OK;2:OK+flood/indicated filtering duration): $iReturnST $iFilteringTimeHourST"
echo "Sender: $sSender"
echo "To: $sReceiver"
echo "Subject: $sSubject"
echo "Body: "
printf ' %b\n' "$sBodyTmp"
echo '------------ TEST fSendTrap ------------\n '
fi
}
# This function is executed if the script is called with the "test" argument
fMainTF()
{
# Test - Standard
sTest1=$(fTrapFilter "device name test1" "[.1.3.6.1.4.1.9.9.41.2.0.1]")
sTestReturn1=$(echo "$sTest1" | tail -n1)
sTest1=$(echo "$sTest1" | sed '$d')
echo "$sTest1"
fSendTrap "$sTestReturn1" "From: device name test1\nOid: [.1.3.6.1.4.1.9.9.41.2.0.1]\nDescription: test" "[TRAP] device name test1"
# Test - Custom
sTest2=$(fTrapFilter "device name test2" "[.1.3.6.1.2.1.17.0.2]" "15" "6" "1")
sTestReturn2=$(echo "$sTest2" | tail -n1)
sTest2=$(echo "$sTest2" | sed '$d')
echo "$sTest2"
fSendTrap "$sTestReturn2" "From: device name test2\nOid: [.1.3.6.1.2.1.17.0.2]\nDescription: test" "[TRAP] device name test2"
exit 1
}
# We check if it is a test
iTest=0
arg1_lower=$(echo "$1" | tr '[:upper:]' '[:lower:]')
if [ "$arg1_lower" = "test" ]; then
iTest=1
shift
fMainTF "$@"
fi
# Possible improvement
# Database archiving
|
def longest_common_substring(str1, str2):
# a 2D array to store the comparison
comparison = [[0 for i in range(len(str2)+1)] for j in range(len(str1)+1)]
result = "" # To store the result
print(comparison)
# iterate through the 2D array comparing each character
for i in range(1, len(str1) + 1):
for j in range(1, len(str2) + 1):
# check if the characters match
if str1[i-1] == str2[j-1]:
comparison[i][j] = comparison[i - 1][j - 1] + 1
# update the result if a longer common substring is found
if comparison[i][j] > len(result):
result = str1[i - comparison[i][j]:i]
else:
comparison[i][j] = 0
return result
common_str = longest_common_substring(str1, str2)
print("The longest common substring is: " + common_str) |
#!/bin/bash
#SBATCH --time=90:55:00
#SBATCH --account=vhs
#SBATCH --job-name=sea_mem_3n_6t_6d_1000f_617m_10i
#SBATCH --nodes=3
#SBATCH --nodelist=comp02,comp03,comp04
#SBATCH --output=./results/exp_node/run-2/sea_mem_3n_6t_6d_1000f_617m_10i/slurm-%x-%j.out
source /home/vhs/Sea/.venv/bin/activate
export SEA_HOME=/home/vhs/Sea
srun -N3 rm -rf /disk0/vhs/seatmp /disk1/vhs/seatmp /disk2/vhs/seatmp /disk3/vhs/seatmp /disk4/vhs/seatmp /disk5/vhs/seatmp /dev/shm/seatmp
srun -N3 echo "Clearing cache" && sync && echo 3 | sudo tee /proc/sys/vm/drop_caches
echo "Creating temp source mount directories"
srun -N3 mkdir /dev/shm/seatmp
srun -N3 mkdir /disk0/vhs/seatmp /disk1/vhs/seatmp /disk2/vhs/seatmp /disk3/vhs/seatmp /disk4/vhs/seatmp /disk5/vhs/seatmp
start=`date +%s.%N`
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_node/run-2/sea_mem_3n_6t_6d_1000f_617m_10i/n0_sea_parallel.sh &
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_node/run-2/sea_mem_3n_6t_6d_1000f_617m_10i/n1_sea_parallel.sh &
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_node/run-2/sea_mem_3n_6t_6d_1000f_617m_10i/n2_sea_parallel.sh &
wait
end=`date +%s.%N`
runtime=$( echo "$end - $start" | bc -l )
echo "Runtime: $runtime"
echo "Removing directories"
srun -N3 rm -rf /disk0/vhs/seatmp /disk1/vhs/seatmp /disk2/vhs/seatmp /disk3/vhs/seatmp /disk4/vhs/seatmp /disk5/vhs/seatmp /dev/shm/seatmp
|
const input = document.getElementById("searchInput");
const divsList = document.getElementsByClassName("panel");
function searchFilter() {
var filter, textVal;
filter = input.value.toUpperCase();
for (var i = 0; i < divsList.length; i++) {
textVal = divsList[i].textContent;
if (
textVal.toUpperCase().indexOf(filter) > -1 &&
!divsList[i].classList.contains("toggleAnswer")
) {
divsList[i].style.display = "block";
} else if (
textVal.toUpperCase().indexOf(filter) > -1 &&
divsList[i].classList.contains("toggleAnswer")
) {
divsList[i].previousElementSibling.style.display = "block";
} else {
divsList[i].style.display = "none";
}
}
}
function clearSearch() {
input.value = "";
for (var j = 0; j < divsList.length; j++) {
divsList[j].style.display = "";
}
}
|
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.tormenta.spout
import org.apache.storm.task.TopologyContext
import org.apache.storm.metric.api.IMetric
/**
* Abstraction for encapsulating metric options
*
* @author <NAME>
* @author <NAME>
*/
case class Metric[T <: IMetric](name: String, metric: T, timeBucketSizeInSecs: Int) {
private[spout] def register(context: TopologyContext) {
context.registerMetric(name, metric, timeBucketSizeInSecs)
}
} |
import io.netty.buffer.ByteBuf;
public class CustomProtocolDecoder implements Protocol {
@Override
public Object decode(ByteBuf datas, Class clazz) {
// Implement decoding logic based on the provided class type
// Example decoding logic:
// Read data from ByteBuf and construct an object of the specified class type
// Return the decoded object
return null; // Placeholder for actual decoding logic
}
@Override
public Object decode(ByteBuf datas, String classTypeName) {
// Implement decoding logic based on the class type name
// Example decoding logic:
// Read data from ByteBuf and construct an object of the specified class type
// Return the decoded object
return null; // Placeholder for actual decoding logic
}
// Handle potential exceptions during decoding process
// Example exception handling:
// Catch and handle any specific exceptions that may occur during decoding
// Provide appropriate error handling or logging
} |
<gh_stars>0
const Luhn = require('./luhn');
describe('Luhn', () => {
it('check digit', () => {
const luhn = new Luhn(34567);
expect(luhn.checkDigit).toEqual(7);
});
it('check digit again', () => {
const luhn = new Luhn(91370);
expect(luhn.checkDigit).toEqual(0);
});
it('addends', () => {
const luhn = new Luhn(12121);
expect(luhn.addends).toEqual([1, 4, 1, 4, 1]);
});
it('too large addend', () => {
const luhn = new Luhn(8631);
expect(luhn.addends).toEqual([7, 6, 6, 1]);
});
it('checksum', () => {
const luhn = new Luhn(4913);
expect(luhn.checksum).toEqual(22);
});
it('checksum again', () => {
const luhn = new Luhn(201773);
expect(luhn.checksum).toEqual(21);
});
it('invalid number', () => {
const luhn = new Luhn(738);
expect(luhn.valid).toEqual(false);
});
it('invalid number', () => {
const luhn = new Luhn(8739567);
expect(luhn.valid).toEqual(true);
});
it('create valid number', () => {
const number = Luhn.create(123);
expect(number).toEqual(1230);
});
it('create other valid number', () => {
const number = Luhn.create(873956);
expect(number).toEqual(8739567);
});
it('create yet another valid number', () => {
const number = Luhn.create(837263756);
expect(number).toEqual(8372637564);
});
});
|
module.exports = {
name: "<NAME>",
github: "https://github.com/aliosman21",
email: "<EMAIL>",
twitter: "@xxx",
facebook: "xxx",
linkedin: "in/xxx",
};
|
#!/bin/bash
LSB=$( which lsb_release )
if [[ ! ${LSB} ]]; then
echo "LSB is not installed. Unable to determine your distribution."
exit
else
${LSB} -a | grep -v LSB
fi |
<filename>assets/js/serviceWorker.js
const name = 'cache-v2';
const cacheFiles = [
'/static/html/offline.html'
]
self.addEventListener('install', evt => {
self.skipWaiting();
evt.waitUntil(
caches.open(name).then(cache => {
return cache.addAll(cacheFiles);
})
);
});
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cacheName => {
if (cacheName != name) {
return caches.delete(cacheName);
}
})
);
})
);
});
self.addEventListener('fetch', event => {
event.respondWith(
fetch(event.request).then(res => {
return res;
}).catch(() => {
return caches.open(name).then(async cache => {
return await cache.match('/static/html/offline.html');
})
})
);
});
self.addEventListener('notificationclick', e => {
e.notification.close();
});
self.addEventListener('push', e => {
e.waitUntil(
self.registration.showNotification(e.data.json().title, {
body: e.data.json().body,
icon: e.data.json().icon,
data: {
dateOfArrival: Date.now()
},
vibrate: [100, 50, 100],
badge: '/static/image/inticon-without-background.png'
})
)
}); |
package com.yin.springboot.thread.future;
import org.springframework.cache.annotation.Cacheable;
import java.util.concurrent.Callable;
/**
* Created by IntelliJ IDEA.
* User: Administrator
* Date:2019/7/23
* Time: 0:18
* To change this template use File | Settings | File Templates.
*/
public class TestFuture implements Callable<String> {
private String name;
TestFuture(String name){
this.name=name;
}
@Override
public String call() throws Exception {
return name;
}
}
|
'use strict';
describe('lang-spec:', function () {
beforeEach(angular.mock.module('hevicado'));
describe('LangCtrl-spec:', function () {
var mockTranslate;
var ctrlScope;
//prepare controller for testing
beforeEach(inject(function ($controller, _$rootScope_) {
//prepare controller for testing
ctrlScope = _$rootScope_.$new();
//mock dependencies
mockTranslate = jasmine.createSpyObj('$translate', ['use']);
//inject mocks
$controller('LangCtrl', {
$scope: ctrlScope,
$translate: mockTranslate
});
}));
it('should change current language', function () {
//given language module is configured
expect(ctrlScope.changeLanguage).toBeDefined();
//when user changes current language
ctrlScope.changeLanguage('pl');
//then language is changes
//and all labels are re-translated automatically
expect(mockTranslate.use).toHaveBeenCalledWith('pl');
});
});
});
|
// for now, let's assume nbd/Promise is exactly the same as a native one
export default Promise;
|
<reponame>sonacy/ts-react
export * from './ReactSymbols'
export * from './ReactWorkTags'
export * from './ReactSideEffectTags'
export * from './ReactElementType'
|
<filename>rank_server/src/protocol/msg_rank_request.pb.cc
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: msg_rank_request.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "msg_rank_request.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/reflection_ops.h>
#include <google/protobuf/wire_format.h>
// @@protoc_insertion_point(includes)
namespace rank {
namespace {
const ::google::protobuf::Descriptor* RequestRankPacket_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
RequestRankPacket_reflection_ = NULL;
const ::google::protobuf::Descriptor* RequestRankResultPacket_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
RequestRankResultPacket_reflection_ = NULL;
} // namespace
void protobuf_AssignDesc_msg_5frank_5frequest_2eproto() {
protobuf_AddDesc_msg_5frank_5frequest_2eproto();
const ::google::protobuf::FileDescriptor* file =
::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
"msg_rank_request.proto");
GOOGLE_CHECK(file != NULL);
RequestRankPacket_descriptor_ = file->message_type(0);
static const int RequestRankPacket_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankPacket, rank_type_),
};
RequestRankPacket_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
RequestRankPacket_descriptor_,
RequestRankPacket::default_instance_,
RequestRankPacket_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankPacket, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankPacket, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(RequestRankPacket));
RequestRankResultPacket_descriptor_ = file->message_type(1);
static const int RequestRankResultPacket_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankResultPacket, rank_type_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankResultPacket, items_),
};
RequestRankResultPacket_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
RequestRankResultPacket_descriptor_,
RequestRankResultPacket::default_instance_,
RequestRankResultPacket_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankResultPacket, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestRankResultPacket, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(RequestRankResultPacket));
}
namespace {
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);
inline void protobuf_AssignDescriptorsOnce() {
::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,
&protobuf_AssignDesc_msg_5frank_5frequest_2eproto);
}
void protobuf_RegisterTypes(const ::std::string&) {
protobuf_AssignDescriptorsOnce();
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
RequestRankPacket_descriptor_, &RequestRankPacket::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
RequestRankResultPacket_descriptor_, &RequestRankResultPacket::default_instance());
}
} // namespace
void protobuf_ShutdownFile_msg_5frank_5frequest_2eproto() {
delete RequestRankPacket::default_instance_;
delete RequestRankPacket_reflection_;
delete RequestRankResultPacket::default_instance_;
delete RequestRankResultPacket_reflection_;
}
void protobuf_AddDesc_msg_5frank_5frequest_2eproto() {
static bool already_here = false;
if (already_here) return;
already_here = true;
GOOGLE_PROTOBUF_VERIFY_VERSION;
::rank::protobuf_AddDesc_msg_5frank_5fitem_2eproto();
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
"\n\026msg_rank_request.proto\022\004rank\032\023msg_rank"
"_item.proto\"&\n\021RequestRankPacket\022\021\n\trank"
"_type\030\001 \002(\005\"Q\n\027RequestRankResultPacket\022\021"
"\n\trank_type\030\001 \002(\005\022#\n\005items\030\002 \003(\0132\024.rank."
"RankItemPacket", 174);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"msg_rank_request.proto", &protobuf_RegisterTypes);
RequestRankPacket::default_instance_ = new RequestRankPacket();
RequestRankResultPacket::default_instance_ = new RequestRankResultPacket();
RequestRankPacket::default_instance_->InitAsDefaultInstance();
RequestRankResultPacket::default_instance_->InitAsDefaultInstance();
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_msg_5frank_5frequest_2eproto);
}
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer_msg_5frank_5frequest_2eproto {
StaticDescriptorInitializer_msg_5frank_5frequest_2eproto() {
protobuf_AddDesc_msg_5frank_5frequest_2eproto();
}
} static_descriptor_initializer_msg_5frank_5frequest_2eproto_;
// ===================================================================
#ifndef _MSC_VER
const int RequestRankPacket::kRankTypeFieldNumber;
#endif // !_MSC_VER
RequestRankPacket::RequestRankPacket()
: ::google::protobuf::Message() {
SharedCtor();
}
void RequestRankPacket::InitAsDefaultInstance() {
}
RequestRankPacket::RequestRankPacket(const RequestRankPacket& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
}
void RequestRankPacket::SharedCtor() {
_cached_size_ = 0;
rank_type_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
RequestRankPacket::~RequestRankPacket() {
SharedDtor();
}
void RequestRankPacket::SharedDtor() {
if (this != default_instance_) {
}
}
void RequestRankPacket::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* RequestRankPacket::descriptor() {
protobuf_AssignDescriptorsOnce();
return RequestRankPacket_descriptor_;
}
const RequestRankPacket& RequestRankPacket::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_msg_5frank_5frequest_2eproto();
return *default_instance_;
}
RequestRankPacket* RequestRankPacket::default_instance_ = NULL;
RequestRankPacket* RequestRankPacket::New() const {
return new RequestRankPacket;
}
void RequestRankPacket::Clear() {
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
rank_type_ = 0;
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool RequestRankPacket::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) return false
::google::protobuf::uint32 tag;
while ((tag = input->ReadTag()) != 0) {
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required int32 rank_type = 1;
case 1: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
input, &rank_type_)));
set_has_rank_type();
} else {
goto handle_uninterpreted;
}
if (input->ExpectAtEnd()) return true;
break;
}
default: {
handle_uninterpreted:
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
return true;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
return true;
#undef DO_
}
void RequestRankPacket::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// required int32 rank_type = 1;
if (has_rank_type()) {
::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->rank_type(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
}
::google::protobuf::uint8* RequestRankPacket::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// required int32 rank_type = 1;
if (has_rank_type()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->rank_type(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
return target;
}
int RequestRankPacket::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// required int32 rank_type = 1;
if (has_rank_type()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(
this->rank_type());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void RequestRankPacket::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const RequestRankPacket* source =
::google::protobuf::internal::dynamic_cast_if_available<const RequestRankPacket*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void RequestRankPacket::MergeFrom(const RequestRankPacket& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_rank_type()) {
set_rank_type(from.rank_type());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void RequestRankPacket::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void RequestRankPacket::CopyFrom(const RequestRankPacket& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool RequestRankPacket::IsInitialized() const {
if ((_has_bits_[0] & 0x00000001) != 0x00000001) return false;
return true;
}
void RequestRankPacket::Swap(RequestRankPacket* other) {
if (other != this) {
std::swap(rank_type_, other->rank_type_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata RequestRankPacket::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = RequestRankPacket_descriptor_;
metadata.reflection = RequestRankPacket_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int RequestRankResultPacket::kRankTypeFieldNumber;
const int RequestRankResultPacket::kItemsFieldNumber;
#endif // !_MSC_VER
RequestRankResultPacket::RequestRankResultPacket()
: ::google::protobuf::Message() {
SharedCtor();
}
void RequestRankResultPacket::InitAsDefaultInstance() {
}
RequestRankResultPacket::RequestRankResultPacket(const RequestRankResultPacket& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
}
void RequestRankResultPacket::SharedCtor() {
_cached_size_ = 0;
rank_type_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
RequestRankResultPacket::~RequestRankResultPacket() {
SharedDtor();
}
void RequestRankResultPacket::SharedDtor() {
if (this != default_instance_) {
}
}
void RequestRankResultPacket::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* RequestRankResultPacket::descriptor() {
protobuf_AssignDescriptorsOnce();
return RequestRankResultPacket_descriptor_;
}
const RequestRankResultPacket& RequestRankResultPacket::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_msg_5frank_5frequest_2eproto();
return *default_instance_;
}
RequestRankResultPacket* RequestRankResultPacket::default_instance_ = NULL;
RequestRankResultPacket* RequestRankResultPacket::New() const {
return new RequestRankResultPacket;
}
void RequestRankResultPacket::Clear() {
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
rank_type_ = 0;
}
items_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool RequestRankResultPacket::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) return false
::google::protobuf::uint32 tag;
while ((tag = input->ReadTag()) != 0) {
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required int32 rank_type = 1;
case 1: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
input, &rank_type_)));
set_has_rank_type();
} else {
goto handle_uninterpreted;
}
if (input->ExpectTag(18)) goto parse_items;
break;
}
// repeated .rank.RankItemPacket items = 2;
case 2: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
parse_items:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, add_items()));
} else {
goto handle_uninterpreted;
}
if (input->ExpectTag(18)) goto parse_items;
if (input->ExpectAtEnd()) return true;
break;
}
default: {
handle_uninterpreted:
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
return true;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
return true;
#undef DO_
}
void RequestRankResultPacket::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// required int32 rank_type = 1;
if (has_rank_type()) {
::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->rank_type(), output);
}
// repeated .rank.RankItemPacket items = 2;
for (int i = 0; i < this->items_size(); i++) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->items(i), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
}
::google::protobuf::uint8* RequestRankResultPacket::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// required int32 rank_type = 1;
if (has_rank_type()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->rank_type(), target);
}
// repeated .rank.RankItemPacket items = 2;
for (int i = 0; i < this->items_size(); i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->items(i), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
return target;
}
int RequestRankResultPacket::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// required int32 rank_type = 1;
if (has_rank_type()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(
this->rank_type());
}
}
// repeated .rank.RankItemPacket items = 2;
total_size += 1 * this->items_size();
for (int i = 0; i < this->items_size(); i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->items(i));
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void RequestRankResultPacket::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const RequestRankResultPacket* source =
::google::protobuf::internal::dynamic_cast_if_available<const RequestRankResultPacket*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void RequestRankResultPacket::MergeFrom(const RequestRankResultPacket& from) {
GOOGLE_CHECK_NE(&from, this);
items_.MergeFrom(from.items_);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_rank_type()) {
set_rank_type(from.rank_type());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void RequestRankResultPacket::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void RequestRankResultPacket::CopyFrom(const RequestRankResultPacket& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool RequestRankResultPacket::IsInitialized() const {
if ((_has_bits_[0] & 0x00000001) != 0x00000001) return false;
for (int i = 0; i < items_size(); i++) {
if (!this->items(i).IsInitialized()) return false;
}
return true;
}
void RequestRankResultPacket::Swap(RequestRankResultPacket* other) {
if (other != this) {
std::swap(rank_type_, other->rank_type_);
items_.Swap(&other->items_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata RequestRankResultPacket::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = RequestRankResultPacket_descriptor_;
metadata.reflection = RequestRankResultPacket_reflection_;
return metadata;
}
// @@protoc_insertion_point(namespace_scope)
} // namespace rank
// @@protoc_insertion_point(global_scope)
|
import iconv from 'iconv-lite';
import encodings from 'iconv-lite/encodings';
import { clearData } from 'fastexpress';
import { sequelize, Accounts } from '../../src/models';
import Controller from '../../src/controllers/Accounts';
import truncate from '../truncate';
import usersFacture from '../factures/Users';
import accountsFacture from '../factures/Accounts';
import { EXCEPTION_NOT_FOUND } from '../../src/errors';
import { fields as accountFields } from '../../src/services/AccountService';
iconv.encodings = encodings;
let reqMock = {
query: {},
};
let resMock = {
json: jest.fn(),
};
describe('Accounts Controller should', () => {
let user;
let account;
beforeAll(async () => {
await truncate();
user = await usersFacture();
account = await accountsFacture({ UserId: user.id });
account = await Accounts.findById(account.id);
});
beforeEach(async () => {
const status = jest.fn();
reqMock = {
query: {},
params: {},
body: {},
};
resMock = {
status,
send: jest.fn(),
json: jest.fn(),
};
status.mockReturnValue(resMock);
});
afterAll(() => {
sequelize.close();
});
it('list accounts', async () => {
await Controller.list(reqMock, resMock);
expect(resMock.json).toBeCalled();
const response = resMock.json.mock.calls[0][0];
expect(response).toHaveProperty('data');
expect(response).toHaveProperty('pagination');
expect(response.data.length).toBeTruthy();
expect(response.data).toEqual(clearData([account], accountFields));
expect(response.pagination).toEqual({
currentPage: 1,
nextPage: null,
perPage: 100,
previousPage: null,
totalItems: 1,
totalPages: 1,
});
});
it('create account', async () => {
const body = {
UserId: user.id,
name: '<NAME>',
initalValue: '100.99',
type: 'wallet',
};
reqMock.body = body;
await Controller.create(reqMock, resMock);
let accountCreated = resMock.json.mock.calls[0][0];
accountCreated = accountCreated.toJSON();
expect(body.UserId).toEqual(accountCreated.UserId);
expect(body.name).toEqual(accountCreated.name);
expect(body.initalValue).toEqual(accountCreated.initalValue);
expect(body.type).toEqual(accountCreated.type);
});
it('get account', async () => {
reqMock.params.id = account.id;
await Controller.get(reqMock, resMock);
expect(resMock.json).toBeCalled();
const response = resMock.json.mock.calls[0][0];
expect(response).toEqual(account);
});
it('get account not find account', async () => {
reqMock.params.id = 99999999;
await Controller.get(reqMock, resMock);
expect(resMock.status).toBeCalled();
expect(resMock.send).toBeCalled();
expect(resMock.status.mock.calls[0][0]).toEqual(404);
expect(resMock.send.mock.calls[0][0]).toEqual(EXCEPTION_NOT_FOUND);
});
it('update account', async () => {
reqMock.params.id = account.id;
const body = {
name: 'BankTwo',
UserId: user.id,
initalValue: 40.7,
type: 'investing',
};
reqMock.body = body;
await Controller.update(reqMock, resMock);
account = await Accounts.findById(account.id);
expect(resMock.json).toBeCalled();
const response = resMock.json.mock.calls[0][0];
expect(response).toBeTruthy();
expect(response.toJSON()).toHaveProperty('name');
expect(response.toJSON()).toHaveProperty('UserId');
expect(response.toJSON()).toHaveProperty('initalValue');
expect(response.toJSON()).toHaveProperty('type');
expect(response.name).toEqual(body.name);
expect(response.UserId).toEqual(body.UserId);
expect(response.toJSON().initalValue).toEqual(body.initalValue);
expect(response.type).toEqual(body.type);
});
it('delete account', async () => {
reqMock.params.id = account.id;
await Controller.destroy(reqMock, resMock);
expect(resMock.status).toBeCalled();
expect(resMock.send).toBeCalled();
expect(resMock.send).toBeCalled();
expect(resMock.status.mock.calls[0][0]).toEqual(204);
});
});
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=C:\src\flutter"
export "FLUTTER_APPLICATION_PATH=C:\Projects\GitHub\mobile-app"
export "FLUTTER_TARGET=lib\main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build\ios"
export "FLUTTER_FRAMEWORK_DIR=C:\src\flutter\bin\cache\artifacts\engine\ios"
export "FLUTTER_BUILD_NAME=1.2.59"
export "FLUTTER_BUILD_NUMBER=1"
|
import { createConnection, Connection, ConnectionOptions } from 'typeorm';
const configDev: ConnectionOptions = {
type: 'sqlite',
database: 'database.sqlite',
entities: [__dirname + '/../entity/*.ts'],
migrations: [__dirname + '/../persistence/migration/*.ts'],
logging: true,
synchronize: false
};
const configProd: ConnectionOptions = {
type: 'sqlite',
database: 'database.sqlite',
entities: [__dirname + '/../entity/*.js'],
migrations: [__dirname + '/../persistence/migration/*.js'],
logging: true,
synchronize: false
};
export const openConnection: any = async () => {
const config = process.env.IS_PROD ? configProd : configDev;
return await createConnection(config);
};
|
class DataFormatter:
def __init__(self, data):
self.data = data
def validate_required_field(self, field_name):
"""
validate if the required field is provided
:param field_name: input the field_name
:return: True if the field is provided, else raise ValueError
"""
if field_name not in self.data:
raise ValueError("Field %s is required, but was not provided." % (field_name,))
return True
def format_field(self, field):
"""
format the data for each field and convert them into string
:param field: input the field_name
:return: string format of the data corresponding to field name
"""
data = self.data[field]
if data is None:
return ''
return str(data) |
package be.kwakeroni.parameters.petshop.definition;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
/**
* Created by kwakeroni on 18/08/17.
*/
@RunWith(MockitoJUnitRunner.class)
public class BulkDiscountGroupTest {
@Test
public void test() {
BulkDiscountGroup buildDiscount = new BulkDiscountGroup();
}
} |
#!/bin/bash
# Module specific variables go here
# Files: file=/path/to/file
# Arrays: declare -a array_name
# Strings: foo="bar"
# Integers: x=9
###############################################
# Bootstrapping environment setup
###############################################
# Get our working directory
cwd="$(pwd)"
# Define our bootstrapper location
bootstrap="${cwd}/tools/bootstrap.sh"
# Bail if it cannot be found
if [ ! -f ${bootstrap} ]; then
echo "Unable to locate bootstrap; ${bootstrap}" && exit 1
fi
# Load our bootstrap
source ${bootstrap}
###############################################
# Metrics start
###############################################
# Get EPOCH
s_epoch="$(gen_epoch)"
# Create a timestamp
timestamp="$(gen_date)"
# Whos is calling? 0 = singular, 1 is as group
caller=$(ps $PPID | grep -c stigadm)
###############################################
# Perform restoration
###############################################
# If ${restore} = 1 go to restoration mode
if [ ${restore} -eq 1 ]; then
report "Not yet implemented" && exit 1
fi
###############################################
# STIG validation/remediation
###############################################
# Module specific validation code should go here
# Errors should go in ${errors[@]} array (which on remediation get handled)
# All inspected items should go in ${inspected[@]} array
errors=("${stigid}")
# If ${change} = 1
#if [ ${change} -eq 1 ]; then
# Create the backup env
#backup_setup_env "${backup_path}"
# Create a backup (configuration output, file/folde permissions output etc
#bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')"
#bu_file "${backup_path}" "${author}" "${stigid}" "${file}"
#if [ $? -ne 0 ]; then
# Stop, we require a backup
#report "Unable to create backup" && exit 1
#fi
# Iterate ${errors[@]}
#for error in ${errors[@]}; do
# Work to remediate ${error} should go here
#done
#fi
# Remove dupes
#inspected=( $(remove_duplicates "${inspected[@]}") )
###############################################
# Results for printable report
###############################################
# If ${#errors[@]} > 0
if [ ${#errors[@]} -gt 0 ]; then
# Set ${results} error message
#results="Failed validation" UNCOMMENT ONCE WORK COMPLETE!
results="Not yet implemented!"
fi
# Set ${results} passed message
[ ${#errors[@]} -eq 0 ] && results="Passed validation"
###############################################
# Report generation specifics
###############################################
# Apply some values expected for report footer
[ ${#errors[@]} -eq 0 ] && passed=1 || passed=0
[ ${#errors[@]} -gt 0 ] && failed=1 || failed=0
# Calculate a percentage from applied modules & errors incurred
percentage=$(percent ${passed} ${failed})
# If the caller was only independant
if [ ${caller} -eq 0 ]; then
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Generate the report
report "${results}"
# Display the report
cat ${log}
else
# Since we were called from stigadm
module_header "${results}"
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Finish up the module specific report
module_footer
fi
###############################################
# Return code for larger report
###############################################
# Return an error/success code (0/1)
exit ${#errors[@]}
# Date: 2018-06-29
#
# Severity: CAT-II
# Classification: UNCLASSIFIED
# STIG_ID: V001029
# STIG_Version: SV-40284r1
# Rule_ID: GEN006160
#
# OS: Solaris
# Version: 10
# Architecture: X86
#
# Title: The smbpasswd file must be owned by root.
# Description: If the smbpasswd file is not owned by root, the smbpasswd file may be maliciously accessed or modified, potentially resulting in the compromise of Samba accounts.
|
#!/bin/bash
set -e
cd packages/prisma-cli-engine
yarn precommit
cd ../../packages/prisma-cli-core
yarn precommit
|
<filename>open-sphere-base/control-panels/src/main/java/io/opensphere/controlpanels/layers/util/FeatureTypeLabel.java
package io.opensphere.controlpanels.layers.util;
import java.awt.AlphaComposite;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import org.apache.log4j.Logger;
import io.opensphere.core.Toolbox;
import io.opensphere.core.iconlegend.IconLegendRegistry;
import io.opensphere.core.util.ColorUtilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.image.IconUtil;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.MapVisualizationType;
/**
* The Class FeatureTypeLabel.
*/
@SuppressWarnings("serial")
public class FeatureTypeLabel extends JLabel
{
/** The Constant LOGGER. */
private static final Logger LOGGER = Logger.getLogger(FeatureTypeLabel.class);
/**
* The icon used for annotation feature types.
*/
private static ImageIcon ourAnnotationFeaturesDarkOutlineIcon;
/**
* The icon used for annotation feature types.
*/
private static ImageIcon ourAnnotationFeaturesIcon;
/**
* The icon used for annotation feature types.
*/
private static ImageIcon ourAnnotationFeaturesLightOutlineIcon;
/**
* The icon used for annotation region types.
*/
private static ImageIcon ourAnnotationRegionsDarkOutlineIcon;
/**
* The icon used for annotation region types.
*/
private static ImageIcon ourAnnotationRegionsIcon;
/**
* The icon used for annotation region types.
*/
private static ImageIcon ourAnnotationRegionsLightOutlineIcon;
/** The our features dark outline icon. */
private static ImageIcon ourFeaturesDarkOutlineIcon;
/** The points feature icon. */
private static ImageIcon ourFeaturesIcon;
/** The our features light outline icon. */
private static ImageIcon ourFeaturesLightOutlineIcon;
/** The imagery icon. */
private static ImageIcon ourImageryIcon;
/**
* The motion imagery icon.
*/
private static ImageIcon ourMotionImageryIcon;
/**
* The motion imagery meta data icon.
*/
private static ImageIcon ourMotionImageryDataIcon;
/** The terrain icon. */
private static ImageIcon ourTerrainIcon;
/** The our tiles dark outline icon. */
private static ImageIcon ourTilesDarkOutlineIcon;
/** The tiles icon. */
private static ImageIcon ourTilesIcon;
/** The our tiles light outline icon. */
private static ImageIcon ourTilesLightOutlineIcon;
/** The tracks icon. */
private static ImageIcon ourTracksIcon;
/** The dark outline of the tracks icon. */
private static ImageIcon ourTracksDarkOutlineIcon;
/** The light outline of the tracks icon. */
private static ImageIcon ourTracksLightOutlineIcon;
/** The heatmaps icon. */
private static ImageIcon ourHeatmapsIcon;
/** The light outline of the heatmaps icon. */
private static ImageIcon ourHeatmapsLightOutlineIcon;
/** The dark outline of the heatmaps icon. */
private static ImageIcon ourHeatmapsDarkOutlineIcon;
/** The unknown type icon. */
private static ImageIcon ourUnknownTypeIcon;
/** The Base icon to render icon map. */
private final Map<ImageIcon, ImageIcon> myBaseIconToRenderIconMap = New.map();
/** The our outline icon. */
private ImageIcon myOutlineIcon;
/** The type. */
private String myType;
static
{
try
{
ourFeaturesIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/features-base.png")));
ourFeaturesLightOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/features-base-lightoutline.png")));
ourFeaturesDarkOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/features-base-darkoutline.png")));
ourTilesIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/tiles-base.png")));
ourTilesLightOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/tiles-base-lightoutline.png")));
ourTilesDarkOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/tiles-base-darkoutline.png")));
ourTracksIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/path-default.png")));
ourTracksLightOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/path-lightoutline.png")));
ourTracksDarkOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/path-darkoutline.png")));
ourHeatmapsIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/heatmaps.png")));
ourHeatmapsLightOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/heatmaps-lightoutline.png")));
ourHeatmapsDarkOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/heatmaps-darkoutline.png")));
ourImageryIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/imagery.png")));
ourTerrainIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/terrain.png")));
ourAnnotationFeaturesIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/location16x16.png")));
ourAnnotationFeaturesLightOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/location_lightoutline16x16.png")));
ourAnnotationFeaturesDarkOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/location_darkoutline16x16.png")));
ourAnnotationRegionsIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/polygon.png")));
ourAnnotationRegionsLightOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/polygon_lightoutline.png")));
ourAnnotationRegionsDarkOutlineIcon = new ImageIcon(
ImageIO.read(FeatureTypeLabel.class.getResource("/images/polygon_darkoutline.png")));
ourMotionImageryIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/film.png")));
ourMotionImageryDataIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/mi_metadata.png")));
ourUnknownTypeIcon = new ImageIcon(ImageIO.read(FeatureTypeLabel.class.getResource("/images/unknownType.png")));
}
catch (IOException e)
{
LOGGER.warn("Failed to load image icons for AddDataLeafNodePanel. " + e);
}
}
/**
* Instantiates a new feature type label.
*/
public FeatureTypeLabel()
{
super(ourUnknownTypeIcon);
Dimension size = new Dimension(17, 17);
setMinimumSize(size);
setMaximumSize(size);
setPreferredSize(size);
}
/**
* Instantiates a new feature type label.
*
* @param toolbox the toolbox
*/
public FeatureTypeLabel(Toolbox toolbox)
{
this();
IconLegendRegistry iconReg = toolbox.getUIRegistry().getIconLegendRegistry();
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourFeaturesIcon), "Feature Layer",
"Feature layers are highly interactive, allowing users to manipulate and select individual records on the map.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourTilesIcon), "Tile Layer",
"Tile layers summarize large amounts of geospatial data as non-interactive "
+ "snapshot images, and they have no additional data associated to them.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourHeatmapsIcon), "Heatmap Layer",
"Heatmap layers provide a high-level indication of activity concentrations.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourTracksIcon), "Track",
"Tracks can be created manually, by using existing meta data points, or by using a set of points in the list tool."
+ "The controls for creating a track are very similar to the ArcLength tool.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourImageryIcon), "Imagery Layer",
"Imagery layers are images that have been georectified and overlaid on the map at their corresponding location.");
iconReg.addIconToLegend(ourTerrainIcon, "Terrain Layer",
"A terrain layer provides a vertical offset associated with a point "
+ "on the earth which gives the appearance of height to layers that are overlaid on terrain layers.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourAnnotationFeaturesIcon), "Annotation Features",
"An annotation feature is a user added point on the map that is highly customizable. "
+ "These annotations show up in the 'Layers' panel under the 'My Places' category.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourAnnotationRegionsIcon), "Annotation Regions",
"An annotation region is a user created polygon or circle on the map. "
+ "It can be used for querying data, purging data, and zooming. "
+ "These annotations show up in the 'Layers' panel under the 'My Places' category.");
iconReg.addIconToLegend(IconUtil.getNormalIcon(ourUnknownTypeIcon), "Unknown Type",
"This application attempts to identify layer types based on many criteria. "
+ "If a layer type does not fit any of the known criteria, the layer will be assigned the 'Unknown Type'.");
}
/**
* Gets the feature type.
*
* @return the feature type
*/
public String getType()
{
return myType;
}
@Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
if (myOutlineIcon != null)
{
g.drawImage(myOutlineIcon.getImage(), 0, 0, null);
}
}
/**
* Sets the icon by type.
*
* @param typeColor the type color
* @param type the new icon by type
*/
public void setIconByType(Color typeColor, MapVisualizationType type)
{
ImageIcon anIcon = null;
boolean isLightOutline = typeColor == null || ColorUtilities.getBrightness(typeColor) < 130;
myOutlineIcon = null;
switch (type)
{
case POINT_ELEMENTS:
case ELLIPSE_ELEMENTS:
case CIRCLE_ELEMENTS:
case POLYGON_ELEMENTS:
case ICON_ELEMENTS:
case MIXED_ELEMENTS:
case TRACK_ELEMENTS:
case POLYLINE_ELEMENTS:
case COMPOUND_FEATURE_ELEMENTS:
case LOB_ELEMENTS:
case PLACE_NAME_ELEMENTS:
case PROCESS_RESULT_ELEMENTS:
anIcon = ourFeaturesIcon;
myType = "Feature";
myOutlineIcon = isLightOutline ? ourFeaturesLightOutlineIcon : ourFeaturesDarkOutlineIcon;
break;
case TERRAIN_TILE:
anIcon = ourTerrainIcon;
myType = "Terrain";
break;
case IMAGE:
anIcon = ourImageryIcon;
myType = "Imagery";
break;
case IMAGE_TILE:
anIcon = ourTilesIcon;
myType = "Tile";
myOutlineIcon = isLightOutline ? ourTilesLightOutlineIcon : ourTilesDarkOutlineIcon;
break;
case INTERPOLATED_IMAGE_TILES:
anIcon = ourHeatmapsIcon;
myType = "Heatmap";
myOutlineIcon = isLightOutline ? ourHeatmapsLightOutlineIcon : ourHeatmapsDarkOutlineIcon;
break;
case USER_TRACK_ELEMENTS:
anIcon = ourTracksIcon;
myType = "Track";
myOutlineIcon = isLightOutline ? ourTracksLightOutlineIcon : ourTracksDarkOutlineIcon;
break;
case ANNOTATION_POINTS:
anIcon = ourAnnotationFeaturesIcon;
myType = "Annotation";
myOutlineIcon = isLightOutline ? ourAnnotationFeaturesLightOutlineIcon : ourAnnotationFeaturesDarkOutlineIcon;
break;
case ANNOTATION_REGIONS:
anIcon = ourAnnotationRegionsIcon;
myType = "Annotation";
myOutlineIcon = isLightOutline ? ourAnnotationRegionsLightOutlineIcon : ourAnnotationRegionsDarkOutlineIcon;
break;
case MOTION_IMAGERY:
anIcon = ourMotionImageryIcon;
myType = "Motion Imagery";
break;
case MOTION_IMAGERY_DATA:
anIcon = ourMotionImageryDataIcon;
myType = "Motion Imagery Data";
break;
default:
anIcon = ourUnknownTypeIcon;
myType = "Unknown";
break;
}
if (typeColor != null && !Color.white.equals(typeColor))
{
anIcon = mixColorWithIcon(anIcon, typeColor);
}
setIcon(anIcon);
}
/**
* Sets the icon by type.
*
* @param dti the new icon by type
*/
public void setIconByType(DataTypeInfo dti)
{
if (dti != null && dti.getMapVisualizationInfo() != null)
{
MapVisualizationType type = dti.getMapVisualizationInfo().getVisualizationType();
setIconByType(dti.getBasicVisualizationInfo().getTypeColor(), type);
}
else
{
myOutlineIcon = null;
setIcon(ourUnknownTypeIcon);
myType = "Unknown";
}
}
/**
* Mix color with icon.
*
* @param anIcon the an icon
* @param typeColor the type color
* @return the image icon
*/
private ImageIcon mixColorWithIcon(ImageIcon anIcon, Color typeColor)
{
ImageIcon renderIcon = myBaseIconToRenderIconMap.get(anIcon);
if (renderIcon == null)
{
BufferedImage bi = new BufferedImage(anIcon.getIconWidth(), anIcon.getIconHeight(), BufferedImage.TYPE_INT_ARGB);
renderIcon = new ImageIcon();
renderIcon.setImage(bi);
myBaseIconToRenderIconMap.put(anIcon, renderIcon);
}
Graphics g = renderIcon.getImage().getGraphics();
g.setColor(Color.black);
((Graphics2D)g).setComposite(AlphaComposite.Clear);
g.fillRect(0, 0, renderIcon.getIconWidth(), renderIcon.getIconHeight());
g.setColor(typeColor);
((Graphics2D)g).setComposite(AlphaComposite.SrcOver);
g.fillRect(0, 0, renderIcon.getIconWidth(), renderIcon.getIconHeight());
((Graphics2D)g).setComposite(AlphaComposite.DstIn);
g.drawImage(anIcon.getImage(), 0, 0, null);
return renderIcon;
}
}
|
#!/bin/bash
# Copyright 2019 Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -e
set -o pipefail
usage() {
echo "Usage: $0 -m <manifest-dir> -w <work-dir> [-r <rpmbuilder-dir> -p <rpm-create-search-dir>]"
exit 1
}
rpm_search_paths=""
while getopts "m:w:p:r:" OPT; do
case $OPT in
m)
export MANIFEST_PATH=$(readlink -f $OPTARG)
;;
w)
export WORK=$OPTARG
;;
r)
export RPMBUILDER_PATH=$(readlink -f $OPTARG)
;;
p)
rpm_search_paths+=" $OPTARG"
;;
*)
usage
;;
esac
done
[ -z "$MANIFEST_PATH" ] && usage
[ -z "$WORK" ] && usage
[ -n "$rpm_search_paths" -a -z "$RPMBUILDER_PATH" ] && usage
shift "$((OPTIND-1))"
[ "$#" -ne 0 ] && usage
scriptdir="$(dirname $(readlink -f ${BASH_SOURCE[0]}))"
source $scriptdir/lib.sh
_initialize_work_dirs
docker build -f $scriptdir/docker-context/Dockerfile-dib -t dib $scriptdir/docker-context
docker build -f $scriptdir/docker-context/Dockerfile-buildtools -t buildtools $scriptdir/docker-context
# Create manifest RPM
$LIBDIR/create_manifest_rpm.sh
# Create RPMs
if [ -n "$rpm_search_paths" ]; then
$LIBDIR/build_rpms.sh $rpm_search_paths
fi
# Create repo config
$LIBDIR/build_step_create_yum_repo_files.sh
# QCOW
$LIBDIR/build_step_golden_image.sh
sha1short=$(grep -v product-manifest $RPMLISTS/rpmlist | sha1sum | cut -c-8)
# ISO images
$LIBDIR/build_step_create_install_cd.sh
echo "=== SUCCESS ==="
echo "Build results are in $WORKRESULTS"
echo "Installed RPMS checksum: ${sha1short} (this will change if list of installed RPMs changes)"
|
#!/usr/bin/env bash
# shellcheck disable=SC2002,SC2015,SC2086,SC2091
# This scripts brings up a bare-metal Tectonic cluster using VMs and
# containerized matchbox/dnsmasq servers.
#
# The following environment variables are expected:
# - BRANCH_NAME
# - BUILD_ID
# The script setups the environment and calls `make apply` from the repository
# root.
#
# Due to the assumptions made by this script, it is *not* safe to run multiple
# instances of it on a single host and the Terraform configuration must be
# matching the infrastructure. Notably:
# - matchbox is expected on 172.18.0.2,
# - three nodes are expected on 172.18.0.21 (master), 172.18.0.22 (worker), 172.18.0.23 (worker).
#
# This script requires the following packages on the host:
# - qemu-kvm
# - libvirt-bin
# - virt-manager
# - curl
# - unzip
# - policycoreutils.
set -xe
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT="$DIR/../../.."
BIN_DIR="$ROOT/bin_test"
MATCHBOX_VERSION=v0.6.1
KUBECTL_VERSION=v1.6.4
TERRAFORM_VERSION=0.11.1
KUBECTL_URL="https://storage.googleapis.com/kubernetes-release/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl"
TERRAFORM_URL="https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip"
export VM_DISK='20'
export VM_MEMORY='2048'
export ASSETS_DIR="${ASSETS_DIR:-/tmp/matchbox/assets}"
main() {
if [ -z "${BRANCH_NAME}" ] || [ -z "${BUILD_ID}" ]; then
echo "\$BRANCH_NAME, \$BUILD_ID are required"
return 1
fi
if [ -z "$1" ]; then
echo "$0 <tfvars file's relative path>"
return 1
fi
echo "Installing required binaries"
install
echo "Cleanup testing environment"
cleanup &>/dev/null
trap kill_terraform_and_cleanup EXIT
echo "Setting up configuration and environment"
configure "$1"
setup
echo "Starting matchbox"
(cd "${ROOT}"/matchbox && sudo -S -E ./scripts/devnet create)
echo "Waiting for matchbox..."
until $(curl --silent --fail -k http://matchbox.example.com:8080 > /dev/null); do
echo "Waiting for matchbox..."
sleep 5
if sudo -E systemctl is-failed dev-matchbox; then
sudo -E journalctl -u dev-matchbox
exit 1
fi
if sudo -E systemctl is-failed dev-dnsmasq; then
sudo -E journalctl -u dev-dnsmasq
exit 1
fi
done
echo "Starting Terraform"
(cd "${ROOT}" && make apply || kill $$) & # Self-destruct and trigger trap on failure
TERRAFORM_PID=$!
sleep 15
echo "Starting QEMU/KVM nodes"
(cd "${ROOT}"/matchbox && sudo -E ./scripts/libvirt create)
echo "Waiting for Kubernetes/Tectonic cluster to be up and running:"
cluster_up
echo "Running Go smoke tests"
test_cluster
echo "SUCCESS: Tectonic bare-metal cluster came up!"
cleanup
}
install() {
mkdir -p $BIN_DIR
export PATH=$BIN_DIR:$PATH
echo "Installing kubectl"
curl -L -o ${BIN_DIR}/kubectl ${KUBECTL_URL}
chmod +x ${BIN_DIR}/kubectl
echo "Installing Terraform"
curl ${TERRAFORM_URL} | funzip > $BIN_DIR/terraform
sudo chmod +x $BIN_DIR/terraform
echo "Installing matchbox"
(cd ${ROOT}/ && rm -rf matchbox && git clone https://github.com/coreos/matchbox)
(cd ${ROOT}/matchbox && git checkout $MATCHBOX_VERSION)
}
setup() {
echo "Copying matchbook test credentials"
cp ${DIR}/fake-creds/{ca.crt,server.crt,server.key} ${ROOT}/matchbox/examples/etc/matchbox
if [ ! -d $ASSETS_DIR/coreos/$COREOS_VERSION ]; then
echo "Downloading CoreOS image"
${ROOT}/matchbox/scripts/get-coreos $COREOS_CHANNEL $COREOS_VERSION $ASSETS_DIR
fi
echo "Configuring ssh-agent"
eval "$(ssh-agent -s)"
chmod 600 ${ROOT}/matchbox/tests/smoke/fake_rsa
ssh-add ${ROOT}/matchbox/tests/smoke/fake_rsa
echo "Setting up the metal0 bridge"
sudo mkdir -p /etc/rkt/net.d
sudo bash -c 'cat > /etc/rkt/net.d/20-metal.conf << EOF
{
"name": "metal0",
"type": "bridge",
"bridge": "metal0",
"isGateway": true,
"ipMasq": true,
"ipam": {
"type": "host-local",
"subnet": "172.18.0.0/24",
"routes" : [ { "dst" : "0.0.0.0/0" } ]
}
}
EOF'
echo "Setting up DNS"
if ! grep -q "172.18.0.3" /etc/resolv.conf; then
echo "nameserver 172.18.0.3" | cat - /etc/resolv.conf | sudo tee /etc/resolv.conf >/dev/null
fi
}
configure() {
export PLATFORM=metal
export CLUSTER="tf-${PLATFORM}-${BRANCH_NAME}-${BUILD_ID}"
export TF_VAR_tectonic_cluster_name
TF_VAR_tectonic_cluster_name=$(echo ${CLUSTER} | awk '{print tolower($0)}')
CONFIG=${DIR}/$1
make localconfig
ln -sf ${CONFIG} ${ROOT}/build/${CLUSTER}/terraform.tfvars
COREOS_CHANNEL=$(awk -F "=" '/^tectonic_container_linux_channel/ {gsub(/[ \t"]/, "", $2); print $2}' ${CONFIG})
COREOS_VERSION=$(awk -F "=" '/^tectonic_container_linux_version/ {gsub(/[ \t"]/, "", $2); print $2}' ${CONFIG})
export SMOKE_KUBECONFIG=${ROOT}/build/${CLUSTER}/generated/auth/kubeconfig
}
cleanup() {
set +e
# Kill any remaining VMs.
(cd ${ROOT}/matchbox && sudo ./scripts/libvirt destroy)
# shellcheck disable=SC2006
# Reset rkt pods and CNI entirely, to avoid IP conflict due to leakage bug.
for p in `sudo rkt list | tail -n +2 | awk '{print $1}'`; do sudo rkt stop --force $p; done
sudo rkt gc --grace-period=0s
# shellcheck disable=SC2006
for ns in `ip netns l | grep -o -E '^[[:alnum:]]+'`; do sudo ip netns del $ns; done
sudo ip l del metal0
# shellcheck disable=SC2006
for veth in `ip l show | grep -oE 'veth[^@]+'`; do sudo ip l del $veth; done
sudo rm -Rf /var/lib/cni/networks/*
sudo rm -Rf /var/lib/rkt/*
sudo rm -f /etc/rkt/net.d/20-metal.conf
# Reset DNS.
cat /etc/resolv.conf | grep -v 172.18.0.3 | sudo tee /etc/resolv.conf
# Reset failed units (i.e. matchbox, dnsmasq which we just killed).
sudo systemctl reset-failed
set -e
}
kill_terraform_and_cleanup() {
echo "Killing Terraform"
kill ${TERRAFORM_PID} || true
echo "WARNING: Cleanup is temporarily disabled on failure for debugging purposes. Next job will clean at startup."
#echo "Cleanup testing environment"
#cleanup
}
kubelet_up() {
ssh -q -i ${ROOT}/matchbox/tests/smoke/fake_rsa \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile=/dev/null \
-o PreferredAuthentications=publickey \
core@$1 /usr/bin/systemctl status k8s-node-bootstrap kubelet
curl --silent --fail -m 1 "http://$1:10255/healthz" > /dev/null
}
cluster_up() {
echo "Waiting for Kubelets to start..."
until kubelet_up "node1.example.com" \
&& kubelet_up "node2.example.com" \
&& kubelet_up "node3.example.com"
do
sleep 15
echo "Waiting for Kubelets to start..."
done
echo "$(readyNodes) of 3 nodes are Ready..."
until [[ "$(readyNodes)" == "3" ]]; do
sleep 5
echo "$(readyNodes) of 3 nodes are Ready..."
done
echo "List of nodes:"
k8s get nodes
sleep 5
until [[ "$(readyPods)" == "$(podCount)" && "$(readyPods)" -gt "0" ]]; do
sleep 15
echo "$(readyPods) / $(podCount) pods are Running..."
k8s get pods --all-namespaces || true
done
echo "List of pods:"
k8s get pods --all-namespaces || true
echo "Waiting for Tectonic Console..."
until $(curl --silent --fail -k https://tectonic.example.com > /dev/null); do
echo "Waiting for Tectonic Console..."
k8s get pods --all-namespaces || true
sleep 15
done
}
k8s() {
${BIN_DIR}/kubectl --kubeconfig=${SMOKE_KUBECONFIG} "$@"
}
# ready nodes returns the number of Ready Kubernetes nodes
readyNodes() {
# shellcheck disable=SC2126
k8s get nodes -o template --template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}}{{.}}{{end}}{{end}}{{end}}' | grep -o -E True | wc -l
}
# ready pods returns the number of Running pods
readyPods() {
# shellcheck disable=SC2126
k8s get pods --all-namespaces -o template --template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}}{{.}}{{end}}{{end}}{{end}}' | grep -o -E True | wc -l
}
# podCount returns the number of pods
podCount() {
# shellcheck disable=SC2126
k8s get pods --all-namespaces -o template --template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}}{{.}}{{end}}{{end}}{{end}}' | grep -o -E status | wc -l
}
test_cluster() {
MASTER_COUNT=$(grep tectonic_master_count "$CONFIG" | awk -F "=" '{gsub(/"/, "", $2); print $2}')
WORKER_COUNT=$(grep tectonic_worker_count "$CONFIG" | awk -F "=" '{gsub(/"/, "", $2); print $2}')
export SMOKE_NODE_COUNT=$(( MASTER_COUNT + WORKER_COUNT ))
export SMOKE_MANIFEST_PATHS=${ROOT}/build/${CLUSTER}/generated/
# shellcheck disable=SC2155
export SMOKE_NETWORKING=$(grep tectonic_networking "$CONFIG" | awk -F "=" '{gsub(/"/, "", $2); print $2}' | tr -d ' ')
bin/smoke -test.v -test.parallel=1 --cluster
}
main "$@"
|
#!/bin/sh
if [ -z "$1" ]; then
echo "ERROR: need to specify version"
exit
fi
rm -Rf reviser
mkdir reviser
cp -R src/* reviser
cp README reviser
cp MIT-LICENSE reviser
tar -czvf reviser-$1.tar.gz reviser/
rm -rf reviser
rm -rf reviser-$1.tar
mv reviser-$1.tar.gz releases
|
window.addEventListener("scroll",function(){
var header = document.querySelector('#navbarr');
header.classList.toggle("pengubah",window.scrollY > 0);
});
window.addEventListener("scroll",function(){
var warna = document.querySelector('.nav-item #atasku');
warna.classList.toggle("warnanav",window.scrollY > 0);
}); |
#include <clocks.h>
#include <gpio.h>
#include <uart.h>
#include <spi.h>
#include <pwr.h>
#include <rtc.h>
#include <blink.h>
#include <blink.h>
typedef SYSCLK_T<> SYSCLK;
typedef LSE_OSC_T<32768> LSECLK;
typedef SYSTICK_T<SYSCLK> SYSTICK;
typedef PWR_T<> POWER;
#ifdef OLIMEXINO
typedef GPIO_T<PA, 1, OUTPUT_10MHZ, PUSH_PULL, LOW> LED_RED;
typedef GPIO_T<PC, 13, OUTPUT_10MHZ, PUSH_PULL, LOW> LED_GREEN;
typedef GPIO_PORT_T<PA, LED_RED> PORT_A;
typedef GPIO_PORT_T<PB> PORT_B;
typedef GPIO_PORT_T<PC, LED_GREEN> PORT_C;
#else
typedef GPIO_T<PB, 8, OUTPUT_10MHZ, PUSH_PULL, LOW> LED_RED;
typedef GPIO_T<PB, 9, OUTPUT_10MHZ, PUSH_PULL, LOW> LED_GREEN;
typedef GPIO_PORT_T<PA> PORT_A;
typedef GPIO_PORT_T<PB, LED_RED, LED_GREEN> PORT_B;
typedef GPIO_PORT_T<PC> PORT_C;
#endif
typedef RTC_T<LSECLK, 16> RTCLK;
typedef TIMEOUT_T<RTCLK> TIMEOUT;
typedef BLINK_T<RTCLK, LED_RED> BLINKER;
extern "C" {
void RTC_IRQHandler(void)
{
RTCLK::handle_irq();
BLINKER::handle_irq();
if (TIMEOUT::count_down()) exit_idle();
}
}
int main(void)
{
POWER::init();
SYSCLK::init();
LSECLK::init();
PORT_A::init();
PORT_B::init();
PORT_C::init();
RTCLK::init();
while (1) {
TIMEOUT::set_and_wait(1000);
BLINKER::blink(3, 100);
LED_GREEN::toggle();
}
return 0;
}
|
<div>
<label for="name">Name:</label>
<input type="text" id="name">
</div> |
#!/bin/bash
set -e
source hack/common.sh
source hack/ensure-opm.sh
echo
echo "Did you push the bundle image? It must be pullable from '$IMAGE_REGISTRY'."
echo "Run '${IMAGE_BUILD_CMD} push ${BUNDLE_FULL_IMAGE_NAME}'"
echo
${OPM} index add --bundles "${BUNDLE_FULL_IMAGE_NAME}" --tag "${INDEX_FULL_IMAGE_NAME}"
echo
echo "Run '${IMAGE_BUILD_CMD} push ${INDEX_FULL_IMAGE_NAME}' to push operator index to image registry."
|
package com.thinkgem.jeesite.modules.oa.utils;
import java.util.List;
import org.activiti.engine.delegate.DelegateTask;
import org.activiti.engine.delegate.TaskListener;
import com.thinkgem.jeesite.common.service.BaseService;
import com.thinkgem.jeesite.common.utils.SpringContextHolder;
import com.thinkgem.jeesite.modules.sys.dao.RoleDao;
import com.thinkgem.jeesite.modules.sys.dao.UserDao;
import com.thinkgem.jeesite.modules.sys.entity.Role;
import com.thinkgem.jeesite.modules.sys.entity.User;
import com.thinkgem.jeesite.modules.sys.utils.UserUtils;
/**
* 部门领导任务分配
*/
@SuppressWarnings("serial")
public class DeptTaskHandler implements TaskListener {
private static UserDao userDao = SpringContextHolder.getBean(UserDao.class);
private static RoleDao roleDao = SpringContextHolder.getBean(RoleDao.class);
@Override
public void notify(DelegateTask delegateTask) {
// 获取当前登录的用户,在获取用户对应的部门领导
User user = UserUtils.getUser();
User deptLeader = new User(); // 保存当前用户部门Leader
// 获取当前用户部门下的所有用户,查找出当前部门的Leader
List<User> userList = userDao.findUserByOfficeId(user);
for(int i=0;i<userList.size();i++){
Role role = new Role();
role.getSqlMap().put("dsf", BaseService.dataScopeFilter(userList.get(i), "o", "u"));
List<Role> roleList = roleDao.findList(role);
userList.get(i).setRoleList(roleList);
// 遍历用户的角色,判断是否是“dept_leader”(部门领导)
for(Role r:roleList){
if("dept_leader".equals(r.getEnname())){
deptLeader = userList.get(i);
// 设置个人任务的办理人
delegateTask.setAssignee(deptLeader.getLoginName());
}
}
}
}
}
|
#!/bin/bash
COROR_GREEN="\e[0;32m"
COROR_GREY="\e[0;90m"
COLOR_BLUE="\e[0;94m"
COROR_END="\033[0m"
print_title()
{
echo
echo -e "${COLOR_BLUE}$1${COROR_END}"
local len=$((${#1}+1))
echo -e "${COROR_GREY}`seq -s- ${len} | tr -d '[:digit:]'`${COROR_END}"
}
print_subtitle()
{
echo
echo -e "${COROR_GREEN}$1${COROR_END}"
}
dir_exists()
{
test -d "$1"
return $?
}
file_exists()
{
test -f "$1"
return $?
}
rm_r_quiet()
{
echo " Removing $1"
rm -rf $1
}
rm_quiet()
{
echo " Removing $1"
rm -f $1
}
array_contains()
{
local e
for e in "${@:2}"; do [[ "$e" == "$1" ]] && return 0; done
return 1
}
start_target()
{
_utils_current_dir_name=$1
_utils_current_dir_path=$DEPS_SRC/$1
print_subtitle $_utils_current_dir_name
}
is_built()
{
# Directory exists?
test -d "$_utils_current_dir_path"
if [ $? -ne 0 ] ; then
echo "Error: Sources not found from $_utils_current_dir_path"
exit 1
fi
# Build done marker file exists?
test -f "$_utils_current_dir_path/tundra-urho3d-build.meta"
result=$?
if [ $result -eq 0 ] ; then
echo " Build OK"
cd $DEPS
else
echo " Building, please wait..."
cd $_utils_current_dir_path
fi
return $result
}
is_cloned()
{
# Directory exists?
test -d "$_utils_current_dir_path"
result=$?
if [ $result -eq 0 ] ; then
echo " Clone OK"
else
echo " Cloning, please wait..."
cd $DEPS_SRC
fi
return $result
}
mark_built()
{
touch $_utils_current_dir_path/tundra-urho3d-build.meta
}
|
// Copyright (c) 2015-2016, ETH Zurich, <NAME>, Zurich Eye
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the ETH Zurich, Wyss Zurich, Zurich Eye nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL ETH Zurich, Wyss Zurich, Zurich Eye BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include <cstdint>
#pragma diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
// Eigen 3.2.7 uses std::binder1st and std::binder2nd which are deprecated since c++11
// Fix is in 3.3 devel (http://eigen.tuxfamily.org/bz/show_bug.cgi?id=872).
#include <Eigen/Core>
#pragma diagnostic pop
#include <Eigen/StdVector>
#include <ze/common/config.hpp>
namespace ze {
//------------------------------------------------------------------------------
// Scalars and fp precision.
using size_t = std::size_t;
using int8_t = std::int8_t;
using int16_t = std::int16_t;
using int64_t = std::int64_t;
using uint8_t = std::uint8_t;
using uint16_t = std::uint16_t;
using uint32_t = uint32_t;
using uint64_t = std::uint64_t;
#ifdef ZE_SINGLE_PRECISION_FLOAT
using real_t = float;
#else
using real_t = double;
#endif
//------------------------------------------------------------------------------
// Typedefs of commonly used Eigen matrices and vectors.
// MatrixMN, MatrixN = MatrixNN, I_NxN, and Z_NxN, for M,N=1..9.
#define ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(SIZE, SUFFIX) \
using Matrix##SUFFIX = Eigen::Matrix<real_t, SIZE, SIZE>; \
using Matrix1##SUFFIX = Eigen::Matrix<real_t, 1, SIZE>; \
using Matrix2##SUFFIX = Eigen::Matrix<real_t, 2, SIZE>; \
using Matrix3##SUFFIX = Eigen::Matrix<real_t, 3, SIZE>; \
using Matrix4##SUFFIX = Eigen::Matrix<real_t, 4, SIZE>; \
using Matrix5##SUFFIX = Eigen::Matrix<real_t, 5, SIZE>; \
using Matrix6##SUFFIX = Eigen::Matrix<real_t, 6, SIZE>; \
using Matrix7##SUFFIX = Eigen::Matrix<real_t, 7, SIZE>; \
using Matrix8##SUFFIX = Eigen::Matrix<real_t, 8, SIZE>; \
using Matrix9##SUFFIX = Eigen::Matrix<real_t, 9, SIZE>; \
using Matrix##SUFFIX##X = Eigen::Matrix<real_t, SIZE, Eigen::Dynamic>; \
using MatrixX##SUFFIX = Eigen::Matrix<real_t, Eigen::Dynamic, SIZE>; \
static const Eigen::MatrixBase<Matrix##SUFFIX>::IdentityReturnType I_##SUFFIX##x##SUFFIX = Matrix##SUFFIX::Identity(); \
static const Eigen::MatrixBase<Matrix##SUFFIX>::ConstantReturnType Z_##SUFFIX##x##SUFFIX = Matrix##SUFFIX::Zero()
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(1,1);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(2,2);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(3,3);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(4,4);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(5,5);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(6,6);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(7,7);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(8,8);
ZE_MAKE_EIGEN_MATRIX_TYPEDEFS(9,9);
// Typedef arbitary length vector and arbitrary sized matrix.
using VectorX = Eigen::Matrix<real_t, Eigen::Dynamic, 1>;
using MatrixX = Eigen::Matrix<real_t, Eigen::Dynamic, Eigen::Dynamic>;
using VectorXi = Eigen::VectorXi;
// Commonly used fixed size vectors.
using Vector1 = Eigen::Matrix<real_t, 1, 1>;
using Vector2 = Eigen::Matrix<real_t, 2, 1>;
using Vector3 = Eigen::Matrix<real_t, 3, 1>;
using Vector4 = Eigen::Matrix<real_t, 4, 1>;
using Vector5 = Eigen::Matrix<real_t, 5, 1>;
using Vector6 = Eigen::Matrix<real_t, 6, 1>;
using Vector7 = Eigen::Matrix<real_t, 7, 1>;
using Vector8 = Eigen::Matrix<real_t, 8, 1>;
using Vector9 = Eigen::Matrix<real_t, 9, 1>;
using Vector2i = Eigen::Vector2i;
//------------------------------------------------------------------------------
// Feature containers.
using Keypoint = Vector2;
using Bearing = Vector3;
using Position = Vector3;
using HomPosition = Vector4;
using Gradient = Vector2;
using Seed = Vector4;
using LineMeasurement = Vector3;
using Keypoints = Matrix2X;
using Bearings = Matrix3X;
using Positions = Matrix3X;
using HomPositions = Matrix4X;
using Gradients = Matrix2X;
using Seeds = Matrix4X;
//! Normal vector on line end-points bearings, as explained in
//! ze_geometry/doc/line_parametrization.pdf
using LineMeasurements = Matrix3X;
using KeypointLevel = int8_t;
using KeypointType = int8_t;
using KeypointIndex = uint16_t;
using KeypointLevels = Eigen::Matrix<KeypointLevel, Eigen::Dynamic, 1>;
using KeypointTypes = Eigen::Matrix<KeypointType, Eigen::Dynamic, 1>;
using KeypointAngles = VectorX;
using KeypointScores = VectorX;
using KeypointSizes = VectorX;
using KeypointIndices = Eigen::Matrix<KeypointIndex, Eigen::Dynamic, 1>;
using Descriptors = Eigen::Matrix<uint8_t, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor>;
//------------------------------------------------------------------------------
// Inertial containers.
using ImuStamps = Eigen::Matrix<int64_t, Eigen::Dynamic, 1>;
using ImuAccGyrContainer = Matrix6X;
// Order: Accelerometer, Gyroscope
using ImuAccGyr = Vector6;
} // namespace ze
|
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.stargate.metrics.jersey;
import io.dropwizard.jersey.setup.JerseyEnvironment;
import io.micrometer.jersey2.server.JerseyTagsProvider;
import io.micrometer.jersey2.server.MetricsApplicationEventListener;
import io.stargate.core.metrics.StargateMetricConstants;
import io.stargate.core.metrics.api.HttpMetricsTagProvider;
import io.stargate.core.metrics.api.Metrics;
import io.stargate.metrics.jersey.config.MetricsListenerConfig;
import io.stargate.metrics.jersey.config.SystemPropsMetricsListenerConfig;
import io.stargate.metrics.jersey.listener.CounterApplicationEventListener;
import io.stargate.metrics.jersey.tags.CompositeJerseyTagsProvider;
import io.stargate.metrics.jersey.tags.ConstantTagsProvider;
import io.stargate.metrics.jersey.tags.DocsApiModuleTagsProvider;
import io.stargate.metrics.jersey.tags.HeadersTagProvider;
import io.stargate.metrics.jersey.tags.HttpCounterTagsProvider;
import io.stargate.metrics.jersey.tags.HttpMeterTagsProvider;
import io.stargate.metrics.jersey.tags.NonApiModuleTagsProvider;
import io.stargate.metrics.jersey.tags.PathParametersTagsProvider;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* Class that registers all the needed metric application listeners to the Jersey environment for a
* module.
*
* <p>By default it registers the:
*
* <ol>
* <li>Meter listener, that meters HTTP requests
* <li>Counter listener, that counts HTTP requests
* </ol>
*
* Both listeners can be disabled by setting <code>stargate.metrics.http_meter_listener.enabled
* </code> or <code>stargate.metrics.http_counter_listener.enabled</code> to false.
*
* <p>Both listeners use default tag providers, module name and path parameters (if enabled). The
* meter listener adds {@link HttpMeterTagsProvider} in addition, that adds method, URI and status
* to the tags. The counter listener adds {@link HttpCounterTagsProvider} in addition, that adds
* only the exception tag.
*
* <p>Both of those providers extend on top of the {@link HttpMetricsTagProvider} that is passed as
* the constructor parameter to this class. However, each provider can ignore the global {@link
* HttpMetricsTagProvider} by setting <code>
* stargate.metrics.http_meter_listener.ignore_http_tags_provider</code> or <code>
* stargate.metrics.http_counter_listener.ignore_http_tags_provider</code> to true.
*/
public class MetricsBinder {
private final Metrics metrics;
private final HttpMetricsTagProvider httpMetricsTagProvider;
private final String module;
private final Collection<String> nonApiUriRegexes;
private final MetricsListenerConfig meterListenerConfig;
private final MetricsListenerConfig counterListenerConfig;
/**
* Default constructor with no non-APR URI regexes. Uses {@link SystemPropsMetricsListenerConfig}
* for metering and counting configuration.
*
* @param metrics {@link Metrics} instance.
* @param httpMetricsTagProvider Global {@link HttpMetricsTagProvider} registered in the OSGi
* @param module Module name
*/
public MetricsBinder(
Metrics metrics, HttpMetricsTagProvider httpMetricsTagProvider, String module) {
this(metrics, httpMetricsTagProvider, module, Collections.emptyList());
}
/**
* Default constructor. Uses {@link SystemPropsMetricsListenerConfig} for metering and counting
* configuration.
*
* @param metrics {@link Metrics} instance.
* @param httpMetricsTagProvider Global {@link HttpMetricsTagProvider} registered in the OSGi
* @param module Module name
* @param nonApiUriRegexes List of regexes for URIs that should be tagged with #module-other tag.
*/
public MetricsBinder(
Metrics metrics,
HttpMetricsTagProvider httpMetricsTagProvider,
String module,
Collection<String> nonApiUriRegexes) {
this(
metrics,
httpMetricsTagProvider,
module,
nonApiUriRegexes,
new SystemPropsMetricsListenerConfig("stargate.metrics.http_meter_listener"),
new SystemPropsMetricsListenerConfig("stargate.metrics.http_counter_listener"));
}
/**
* Secondary constructor.
*
* @param metrics {@link Metrics} instance.
* @param httpMetricsTagProvider Global {@link HttpMetricsTagProvider} registered in the OSGi
* @param module Module name
* @param nonApiUriRegexes List of regexes for URIs that should be tagged with #module-other tag.
* @param meterListenerConfig config for metering HTTP requests
* @param counterListenerConfig config for counting HTTP requests
*/
public MetricsBinder(
Metrics metrics,
HttpMetricsTagProvider httpMetricsTagProvider,
String module,
Collection<String> nonApiUriRegexes,
MetricsListenerConfig meterListenerConfig,
MetricsListenerConfig counterListenerConfig) {
this.metrics = metrics;
this.httpMetricsTagProvider = httpMetricsTagProvider;
this.module = module;
this.nonApiUriRegexes = nonApiUriRegexes;
this.meterListenerConfig = meterListenerConfig;
this.counterListenerConfig = counterListenerConfig;
}
/**
* Registers application listeners in the {@link JerseyEnvironment}, based on the {@link
* #meterListenerConfig} and {@link #counterListenerConfig}.
*
* @param jersey {@link JerseyEnvironment}
*/
public void register(JerseyEnvironment jersey) {
if (meterListenerConfig.isEnabled()) {
JerseyTagsProvider meterTagsProvider =
getMeterTagsProvider(
meterListenerConfig, metrics, httpMetricsTagProvider, module, nonApiUriRegexes);
MetricsApplicationEventListener listener =
new MetricsApplicationEventListener(
metrics.getMeterRegistry(),
meterTagsProvider,
StargateMetricConstants.METRIC_HTTP_SERVER_REQUESTS,
true);
jersey.register(listener);
}
if (counterListenerConfig.isEnabled()) {
JerseyTagsProvider counterTagsProvider =
getCounterTagsProvider(
counterListenerConfig, metrics, httpMetricsTagProvider, module, nonApiUriRegexes);
CounterApplicationEventListener listener =
new CounterApplicationEventListener(
metrics.getMeterRegistry(),
counterTagsProvider,
StargateMetricConstants.METRIC_HTTP_SERVER_REQUESTS_COUNTER);
jersey.register(listener);
}
}
private static JerseyTagsProvider getMeterTagsProvider(
MetricsListenerConfig config,
Metrics metrics,
HttpMetricsTagProvider httpMetricsTagProvider,
String module,
Collection<String> nonApiUriRegexes) {
// resolve if http tag provider should be ignored or not
HttpMeterTagsProvider resourceProvider =
config.isIgnoreHttpMetricProvider()
? new HttpMeterTagsProvider()
: new HttpMeterTagsProvider(httpMetricsTagProvider);
// get default tags and add the meter provider
List<JerseyTagsProvider> allProviders =
new ArrayList<>(getDefaultTagsProvider(metrics, module, nonApiUriRegexes));
allProviders.add(resourceProvider);
// return composite containing all the providers
return new CompositeJerseyTagsProvider(allProviders);
}
private static JerseyTagsProvider getCounterTagsProvider(
MetricsListenerConfig config,
Metrics metrics,
HttpMetricsTagProvider httpMetricsTagProvider,
String module,
Collection<String> nonApiUriRegexes) {
// resolve if http tag provider should be ignored or not
HttpCounterTagsProvider resourceProvider =
config.isIgnoreHttpMetricProvider()
? new HttpCounterTagsProvider()
: new HttpCounterTagsProvider(httpMetricsTagProvider);
// get default tags and add the meter provider
List<JerseyTagsProvider> allProviders =
new ArrayList<>(getDefaultTagsProvider(metrics, module, nonApiUriRegexes));
allProviders.add(resourceProvider);
// return composite containing all the providers
return new CompositeJerseyTagsProvider(allProviders);
}
private static List<JerseyTagsProvider> getDefaultTagsProvider(
Metrics metrics, String module, Collection<String> nonApiUriRegexes) {
ConstantTagsProvider defaultProvider = new ConstantTagsProvider(metrics.tagsForModule(module));
PathParametersTagsProvider pathParametersProvider = new PathParametersTagsProvider();
HeadersTagProvider headersTagProvider = new HeadersTagProvider();
NonApiModuleTagsProvider nonApiModuleTagsProvider =
new NonApiModuleTagsProvider(metrics, module, nonApiUriRegexes);
DocsApiModuleTagsProvider docsApiProvider = new DocsApiModuleTagsProvider(metrics);
return Arrays.asList(
defaultProvider,
pathParametersProvider,
headersTagProvider,
nonApiModuleTagsProvider,
docsApiProvider);
}
}
|
package com.telenav.osv.recorder.persistence;
import com.telenav.osv.data.sequence.model.LocalSequence;
import com.telenav.osv.utils.Size;
import io.reactivex.Completable;
import io.reactivex.schedulers.Schedulers;
/**
* Interface that holds all the functionality of the local recording persistence.
* Created by cameliao on 2/6/18.
*/
public interface RecordingPersistence {
/**
* Start method which handles all the setup for recording persistence.
* This method should be called when a new sequence of frame is provided to the persistence.
* The method runs by default on {@link Schedulers#single()} which is the same thread for all the operations,
* in order to prevent concurrency issues. Using this thread the sequence is synchronized across all the streams.
* @param sequence the new sequence which will hold all the frame.
* @param formatSize the size of teh videos.
* @param imageFormat the frame format received for encoding.
* @return {@code Completable} which notifies its the observers when the initialization was finished.
*/
Completable start(LocalSequence sequence, Size formatSize, int imageFormat);
/**
* Saves a frame in a sequence to the local storage.
* The method runs by default on {@link Schedulers#single()} which is the same thread for all the operations,
* in order to prevent concurrency issues. Using this thread the sequence is synchronized across all the streams.
* @return {@code Completable} object to notify when the frame was saved.
*/
Completable save(RecordingFrame frame);
/**
* Stop method which is responsible to release all the resources.
* This method should be called when a sequence recording is finished.
* The method runs by default on {@link Schedulers#single()} which is the same thread for all the operations,
* in order to prevent concurrency issues. Using this thread the sequence is synchronized across all the streams.
*/
Completable stop();
}
|
"""
Automate the task of finding the information about the device operating system.
"""
# Import the necessary module
import platform
# Get the information about the device operating system
info = platform.uname()
print(info) # system, node, release, version, machine, processor |
<reponame>RainbowDragon/USACO_Silver_Java
/**
* USACO 2020 - Open - Problem 1 - Social Distancing
*
*/
import java.io.*;
import java.lang.*;
import java.util.*;
public class SocialDistancing {
public static String FileName = "socdist";
static ArrayList<Interval> intervals;
public static void main (String [] args) throws IOException {
// Input:
BufferedReader f = new BufferedReader(new FileReader(FileName + ".in"));
// Output:
PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(FileName + ".out")));
StringTokenizer st = new StringTokenizer(f.readLine());
int n = Integer.parseInt(st.nextToken());
int m = Integer.parseInt(st.nextToken());
intervals = new ArrayList<>();
long min = 1;
long max = 0;
for (int i = 0; i < m; i++)
{
st = new StringTokenizer(f.readLine());
long start = Long.parseLong(st.nextToken());
long end = Long.parseLong(st.nextToken());
intervals.add(new Interval(start, end));
max = Math.max(max, end);
}
intervals.sort(new IntervalComparator());
while (min != max) {
long mid = (min + max + 1) / 2;
if (isValid(n, mid)) {
min = mid;
} else {
max = mid - 1;
}
}
long result = min;
out.println(result);
out.close();
}
static boolean isValid(int n, long d) {
int count = 0;
long last = Long.MIN_VALUE;
for (Interval interval : intervals)
{
while (Math.max(last+d, interval.start) <= interval.end)
{
last = Math.max(last+d, interval.start);
count++;
}
if (count >= n) {
break;
}
}
return count >= n;
}
static class Interval {
public long start;
public long end;
public Interval (long start, long end) {
this.start = start;
this.end = end;
}
}
static class IntervalComparator implements Comparator<Interval> {
public int compare(Interval i1, Interval i2) {
return i1.start < i2.start ? -1 : 1;
}
}
} |
#compdef brew
#autoload
# Brew ZSH completion function
# Drop this somewhere in your $fpath (like /usr/share/zsh/site-functions)
# and rename it _brew
#
# altered from _fink
_brew_all_formulae() {
formulae=(`brew search`)
}
_brew_installed_formulae() {
installed_formulae=(`brew list`)
}
_brew_installed_taps() {
installed_taps=(`brew tap`)
}
_brew_official_taps() {
official_taps=(`brew tap --list-official`)
}
_brew_pinned_taps() {
pinned_taps=(`brew tap --list-pinned`)
}
_brew_outdated_formulae() {
outdated_formulae=(`brew outdated`)
}
local -a _1st_arguments
_1st_arguments=(
'audit:check formulae for Homebrew coding style'
'cat:display formula file for a formula'
'cleanup:uninstall unused and old versions of packages'
'commands:show a list of commands'
'config:show homebrew and system configuration'
'create:create a new formula'
'deps:list dependencies and dependants of a formula'
'desc:display a description of a formula'
'doctor:audits your installation for common issues'
'edit:edit a formula'
'fetch:download formula resources to the cache'
'gist-logs:generate a gist of the full build logs'
'home:visit the homepage of a formula or the brew project'
'info:information about a formula'
'install:install a formula'
'reinstall:install a formula anew; re-using its current options'
'leaves:show installed formulae that are not dependencies of another installed formula'
'link:link a formula'
'linkapps:symlink .app bundles provided by formulae into /Applications'
'list:list files in a formula or not-installed formulae'
'log:git commit log for a formula'
'missing:check all installed formuale for missing dependencies.'
'migrate:migrate renamed formula to new name'
'outdated:list formulae for which a newer version is available'
'pin:pin specified formulae'
'postinstall:perform post_install for a given formula'
'prune:remove dead links'
'remove:remove a formula'
'search:search for a formula (/regex/ or string)'
'switch:switch between different versions of a formula'
'tap:tap a new formula repository from GitHub, or list existing taps'
'tap-info:information about a tap'
'tap-pin:pin a tap'
'tap-unpin:unpin a tap'
'test-bot:test a formula and build a bottle'
'uninstall:uninstall a formula'
'unlink:unlink a formula'
'unlinkapps:remove symlinked .app bundles provided by formulae from /Applications'
'unpin:unpin specified formulae'
'untap:remove a tapped repository'
'update:fetch latest version of Homebrew and all formulae'
'upgrade:upgrade outdated formulae'
'uses:show formulae which depend on a formula'
`brew commands --quiet --include-aliases`
)
local expl
local -a formulae installed_formulae installed_taps official_taps outdated_formulae
_arguments \
'(-v)-v[verbose]' \
'(--cellar)--cellar[brew cellar]' \
'(--env)--env[brew environment]' \
'(--repository)--repository[brew repository]' \
'(--version)--version[version information]' \
'(--prefix)--prefix[where brew lives on this system]' \
'(--cache)--cache[brew cache]' \
'*:: :->subcmds' && return 0
if (( CURRENT == 1 )); then
_describe -t commands "brew subcommand" _1st_arguments
return
fi
case "$words[1]" in
install|reinstall|audit|home|homepage|log|info|abv|uses|cat|deps|desc|edit|options|switch)
_brew_all_formulae
_wanted formulae expl 'all formulae' compadd -a formulae ;;
linkapps|unlinkapps)
_arguments \
'(--local)--local[operate on ~/Applications instead of /Applications]' \
'1: :->forms' && return 0
if [[ "$state" == forms ]]; then
_brew_installed_formulae
_wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae
fi ;;
list|ls)
_arguments \
'(--unbrewed)--unbrewed[files in brew --prefix not controlled by brew]' \
'(--pinned)--pinned[list all versions of pinned formulae]' \
'(--versions)--versions[list all installed versions of a formula]' \
'1: :->forms' && return 0
if [[ "$state" == forms ]]; then
_brew_installed_formulae
_wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae
fi ;;
remove|rm|uninstall|unlink|cleanup|link|ln|pin|unpin)
_brew_installed_formulae
_wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae ;;
search|-S)
_arguments \
'(--macports)--macports[search the macports repository]' \
'(--fink)--fink[search the fink repository]' ;;
untap|tap-info|tap-pin)
_brew_installed_taps
_wanted installed_taps expl 'installed taps' compadd -a installed_taps ;;
tap)
_brew_official_taps
_wanted official_taps expl 'official taps' compadd -a official_taps ;;
tap-unpin)
_brew_pinned_taps
_wanted pinned_taps expl 'pinned taps' compadd -a pinned_taps ;;
upgrade)
_brew_outdated_formulae
_wanted outdated_formulae expl 'outdated formulae' compadd -a outdated_formulae ;;
esac
|
var searchData=
[
['short_5fregions',['short_regions',['../structgeopm_1_1_application_sampler_imp_1_1m__process__s.html#a5603b38cd40ab57298a3b601710e0ea7',1,'geopm::ApplicationSamplerImp::m_process_s']]],
['signal',['signal',['../structgeopm_1_1record__s.html#a1b0528511764424b1ce8a5c7d7bb8515',1,'geopm::record_s']]],
['subcommand',['subcommand',['../structgeopm_1_1sst__mbox__interface__s.html#a1a210ba819a735ac6e72fcfe12a3c9a0',1,'geopm::sst_mbox_interface_s']]]
];
|
#!/bin/bash
#in this sh, the job are:
#1\a while loop to create path to each sub simulation
#2\in each sub simulation, cp the auto_QF_multi_result_gathering_no_multi.sh there and run
#3\output the result in to the whole summary for all the sub simulations.
#4\use an R to do calculation on the whole summary and output the result into a table and a few related figures(bar plot) in PDF.
#5\input the subgroup data into corresponding tables.
if [ $# -ne 3 ]
then
echo ""
echo "Usage: auto_QF_multi_result_gathering_wrapper.sh simulation_matrix_num_path fusion_simulator_path folder_name_extention"
echo ""
echo "simulation_matrix_num_path - matrix of simulation groups."
echo "fusion_simulator_path - path to fusion simulator"
echo "folder_name_extention - _QF or _QF_5k_expanded in most cases"
echo "example: auto_QF_multi_result_gathering_wrapper.sh /restricted/projectnb/montilab-p/LinGA_unprotected/ytan/ENCODE_simulation_data/simulation_only_split_matrix.sim_out.txt /usr3/graduate/ytan7/CBMrepository/utilities/tags/Fusion_Simulator/ _QF"
exit 1
fi
if [ ! -f $1 ]
then
echo ""
echo "The file $1 does not exist"
echo ""
exit 1
fi
simulation_matrix_num_path=$1
fusion_simulator_path=$2"/"
folder_name_extention=$3
#if [ -s QF_all_sims_summary_table.txt ]
#then
# rm -rf QF_all_sims_summary_table.txt
#fi
#echo header
#echo -e "split_span\tquery_num\tquery_found\tfusion_num\tfusion_all_found\tfusion_TP\tsplit_all_found\tsplit_TP\tspan_all_found\tspan_TP" > QF_all_sims_summary_table.txt
#output header
echo -e "NUM of perfectly found\tNUM of fusion found within 10bp of the simulated breakpoint\tNUM of False Positive found" > QF_simulation_all_summary_no_multi.txt
ID1="$(head -1 $simulation_matrix_num_path | cut -f1)";
ID2="$(head -1 $simulation_matrix_num_path | cut -f2)";
first_ID="_"$[ID1]"_"$[ID2];
#group_num=`wc -l "coverage_on_exons.txt"$first_ID".expression_groups" | cut -f1 -d" "`
#build the headers first
#for i in `seq 1 $group_num`;
#do
# for j in `seq 1 $group_num`;
# do
# echo -e "GroupID\tTruePos\tTotalFusion" > "QF_all_sims_summary_"$i"_"$j"_table.txt"
# done
#done
#for job 1,2,3
while read myfile1;
do
ID1="$(echo $myfile1 | cut -f1 -d" ")";
ID2="$(echo $myfile1 | cut -f2 -d" ")";
ID="_"$[ID1]"_"$[ID2];
folder_path="coverage_on_exons.txt"$ID$folder_name_extention
#job1
cd $folder_path"/results"
pwd
#job2
$fusion_simulator_path"auto_QF_multi_result_gathering_no_multi.sh" $ID1 $ID2 $fusion_simulator_path
num_query="$(wc -l temp_folder_list |cut -f1 -d" ")"
#job5
#input data to subgroup table
#for i in `seq 1 $group_num`;
#do
# for j in `seq 1 $group_num`;
# do
# row_num=`expr $i \* $group_num + $j - $group_num `
# TP=`sed -n "${row_num}p" QF_subgroup_summary_table.txt | cut -f2`
# TF=`sed -n "${row_num}p" QF_subgroup_summary_table.txt | cut -f3`
# echo -e "$ID\t$TP\t$TF" >> "../../QF_all_sims_summary_"$i"_"$j"_table.txt"
# done
#done
#job3
cd ../../
#grep the needed information
PF=`sed -n '1p' "QF_on"$ID"_ref_bp_summary_no_multi" |cut -f2 -d":" | cut -f2 -d" "`
F10=`sed -n '2p' "QF_on"$ID"_ref_bp_summary_no_multi" |cut -f2 -d":" | cut -f2 -d" "`
FP=`sed -n '3p' "QF_on"$ID"_ref_bp_summary_no_multi" |cut -f2 -d":" | cut -f2 -d" "`
echo -e $ID"\t"$PF"\t"$F10"\t"$FP >> QF_simulation_all_summary_no_multi.txt
#echo $i"/QF_simulation_all_summary_no_multi.txt">>../file_defuse_summary_list.txt
#line1=`sed -ne 's/\t/\\\t/g; 2p' $folder_path"/results/QF_sim_summary_table.txt"`
#echo -e $ID"\t"$line1 >> QF_all_sims_summary_table.txt
done < $simulation_matrix_num_path
#job4
Rscript $fusion_simulator_path"barplot_auto_defuse_multi_summary_only_split.R" file.in=QF_simulation_all_summary_no_multi.txt file.matrix=$simulation_matrix_num_path file.out=QF_simulation_all_summary_no_multi num_fusion.in=$num_query
#Rscript $fusion_simulator_path"barplot_auto_QF_multi_result_gathering_only_split.R" file.matrix=$simulation_matrix_num_path file.in=QF_all_sims_summary_table.txt file.out=QF_all_sims_summary
#for i in `seq 1 $group_num`;
#do
# for j in `seq 1 $group_num`;
# do
# Rscript $fusion_simulator_path"barplot_auto_QF_subgroup_wrapup_only_split.R" file.matrix=$simulation_matrix_num_path file.in="QF_all_sims_summary_"$i"_"$j"_table.txt" file.out="QF_all_sims_summary_"$i"_"$j
# done
#done
|
#include "precompiled.h"
#pragma hdrstop
#include "CameraComponent.h"
#include <utility>
#include "Modules/ECS/ECS.h"
CameraComponent::CameraComponent()
{
}
void CameraComponent::setCamera(std::shared_ptr<Camera> camera)
{
m_camera = std::move(camera);
}
std::shared_ptr<Camera> CameraComponent::getCamera() const
{
return m_camera;
}
CameraComponent::BindingParameters CameraComponent::getBindingParameters() const
{
return CameraComponent::BindingParameters{
.position = m_camera->getTransform()->getPosition(),
.lookAtPoint = m_camera->getTransform()->getPosition() + m_camera->getTransform()->getFrontDirection(),
.nearDistance = m_camera->getNearClipDistance(),
.farDistance = m_camera->getFarClipDistance(),
.fov = glm::degrees(m_camera->getFOVy()),
};
}
CameraComponentBinder::CameraComponentBinder(const ComponentBindingParameters& componentParameters)
: m_bindingParameters(componentParameters)
{
}
void CameraComponentBinder::bindToObject(GameObject& gameObject)
{
std::shared_ptr<Camera> camera = std::make_shared<Camera>();
camera->getTransform()->setPosition(m_bindingParameters.position);
camera->getTransform()->lookAt(m_bindingParameters.lookAtPoint);
camera->setNearClipDistance(m_bindingParameters.nearDistance);
camera->setFarClipDistance(m_bindingParameters.farDistance);
camera->setFOVy(glm::radians(m_bindingParameters.fov));
auto& cameraComponent = *gameObject.addComponent<CameraComponent>().get();
cameraComponent.setCamera(camera);
}
|
1. Polar bear - Arctic regions
2. Gorilla - African tropical forests
3. Tiger - Sub-continental forests
4. Giraffe - African savannas
5. Elephant - African savannas and tropical forests
6. Penguin - Antarctic and Arctic Ocean regions
7. Panda - Sub-tropical bamboo forests in China
8. Sea turtle - Coastal waters around the world
9. Salmon - Oceans, rivers and lakes
10. Honey badger - Deserts, grasslands and savannas |
import { getRepository, Repository } from 'typeorm';
import IOrderRepository from '@modules/customer/Repositories/IOrderRepository';
import IOrderDTO from '@modules/customer/dtos/ICreateOrderDTO';
import orders from '@modules/customer/infra/typeorm/models/Order';
import order from '@modules/customer/infra/typeorm/models/Order';
class OrderRepository implements IOrderRepository{
private ormRepository:Repository<orders>;
constructor(){
this.ormRepository=getRepository(orders);
}
public async findById(id:string): Promise<orders | undefined>{
const findSubs=await this.ormRepository.findOne({
where:{id}
});
return findSubs;
}
public async findByEmail(email:string): Promise<orders | undefined>{
const findSubs=await this.ormRepository.findOne({
where:{email}
});
return findSubs;
}
public async findByMarket(customer_id:string): Promise<orders | undefined>{
const findSubs=await this.ormRepository.findOne({
where:{customer_id}
});
return findSubs;
}
public async create({
customer_id,
restaurant_id,
items,
subtotal,
fees,
delivery_price,
total,}:IOrderDTO):Promise<orders>{
const subs=this.ormRepository.create({
customer_id,
restaurant_id,
items,
subtotal,
fees,
delivery_price,
total
});
await this.ormRepository.save(subs);
return subs;
}
public async delete(id:string): Promise<any>{
const find = await this.ormRepository.findOne({
where:{id}
});
//@ts-ignore>
const findSubs=await this.ormRepository.delete(find)
// return 0;
}
public async save(subs:orders):Promise<orders>{
await this.ormRepository.save(subs)
return subs;
}
public async find(subs:orders):Promise<orders>{
await this.ormRepository.find(subs);
return subs;
}
}
export default OrderRepository;
|
#!/bin/bash
set -e
rm -rf /etc/nginx/conf.d/*
if [ "$ENABLE_GCS_SYNC" = "true" ]
then
/downloader
fi
cp -R $CONFIGMAP_FOLDER/. /etc/nginx/conf.d/
if [ -f "/etc/nginx/conf.d/nginx.conf" ]
then
rm /etc/nginx/nginx.conf
mv /etc/nginx/conf.d/nginx.conf /etc/nginx/nginx.conf
fi
/enable_modules.sh
if [ -f "/etc/nginx/conf.d/before.sh" ]
then
mv /etc/nginx/conf.d/before.sh /before.sh
/before.sh
fi |
const express = require("express");
const app = express();
app.set('view engine', 'ejs');
app.get("/", (req, res) => {
res.render('index.ejs');
})
port = 3000 || process.env.port;
app.listen(port,() =>
console.log(`running on port ${port}`)
)
|
#!/bin/bash
INF=$1
if [ "$INF" == "" ]; then
echo $0.$LINENO required arg to $0 is missing. Should be path of pmt.csv file"
exit 1
fi
if [ ! -e $INF ]; then
echo $0.$LINENO didn't find pmt.csv filename $INF"
exit 1
fi
if [ -d $INF ]; then
if [ -e $INF/pmt.csv ]; then
INF=$INF/pmt.csv
else
echo "$0.$LINENO didn't find pmt.csv file in dir $INF"
exit 1
fi
fi
DIRNM=`dirname $INF`
# Area,Value,Unit,Description,Sample,Stddev,Multiplex,Bottleneck,Idle
# Frontend_Bound,28.4,% Slots,,frontend_retired.latency_ge_4:pp,0.0,3.71,,
# Bad_Speculation,0.0,% Slots <,This category represents fraction of slots wasted due to incorrect speculations...,,0.0,3.71,,
# Backend_Bound,21.7,% Slots,,,0.0,3.71,,
# Retiring,49.9,% Slots <,This category represents fraction of slots utilized by useful work i...,uops_retired.retire_slots,0.0,3.71,,
# CPU_Utilization,1.0,Metric,Average CPU Utilization,,0.0,3.7,,
# Retiring.Light_Operations.Other_Light_Ops,100.0,% Uops <,This metric represents non-floating-point (FP) uop fraction the CPU has executed...,,0.0,3.71,,
awk -F, -v fmt="%8.3f" -v dirnm="$DIRNM" -v dlm=" " '
BEGIN{
n_lkup=split("time mem_bw %busy frqGHz LatCycls Lat(ns) L3MssBW IPC %retiring %bad_spec %frt_end %bck_end %cyc_be %cyc_uopRet pkg_watts LatUnc(ns) LatUncCycls LatUncBW %L3_miss bw_rmt", list, " ");
#printf("n_lkup= %d\n", n_lkup);
for (i=1; i <= n_lkup; i++) { lkup[list[i]] = i; }
add_xtra = 0;
}
/metric_memory bandwidth read .MB\/sec./{ i=lkup["mem_bw"]; sv[i] = sprintf(fmt, 0.001*$2); }
/metric_Average LLC data read miss latency .in ns./{ i = lkup["LatCycls"]; sv[i] = sprintf(fmt, frq*$2);
i = lkup["Lat(ns)"]; sv[i] = sprintf(fmt, $2);
i = lkup["L3MssBW"]; sv[i] = "";
}
/metric_CPI/{ i = lkup["IPC"]; sv[i] = sprintf(fmt, 1.0/$2); }
/metric_package power/{ i = lkup["pkg_watts"]; sv[i] = sprintf(fmt, $2); }
FNR == 1 { if ($1 == "CPUs") {printf("got_cpus\n"); add_xtra = 1;}}
add_xtra == 1 {if (add_xtra == 1 && index($1, "-T1") > 1) { next; }}
$(1+add_xtra) == "Frequency"{ i = lkup["frqGHz"]; frq = $(2+add_xtra)+0.0; sv[i] += sprintf(fmt, frq); ++num[i]; }
$(1+add_xtra) == "CPU_Utilization"{ i=lkup["%busy"]; sv[i] += sprintf(fmt, $(2+add_xtra)*100); ++num[i]; }
$(1+add_xtra) == "Retiring"{ i = lkup["%retiring"]; sv[i] += sprintf(fmt, $(2+add_xtra)); ++num[i]; }
$(1+add_xtra) == "Bad_Speculation"{ i = lkup["%bad_spec"]; sv[i] += sprintf(fmt, $(2+add_xtra)); ++num[i]; }
$(1+add_xtra) == "Frontend_Bound"{ i = lkup["%frt_end"]; sv[i] += sprintf(fmt, $(2+add_xtra)); ++num[i]; }
$(1+add_xtra) == "Backend_Bound"{ i = lkup["%bck_end"]; sv[i] += sprintf(fmt, $(2+add_xtra)); ++num[i]; }
END {
# avg_tot 0.023 98.947 2.700 373.124 169.602 0.001 0.998 49.922 0.006 28.375 21.698 0.653 99.216 132.456 85.557 230.987 0.005 21.772 0.004
# time mem_bw %busy frqGHz LatCycls Lat(ns) L3MssBW IPC %retiring %bad_spec %frt_end %bck_end %cyc_be %cyc_uopRet pkg_watts LatUnc(ns) LatUncCycls LatUncBW %L3_miss bw_rmt
0.023 98.712 2.700 451.029 167.058 0.998 49.860 0.023 28.334 21.783 132.136
for (j=1; j <= 2; j++) {
for (i=1; i <= n_lkup; i++) {
v = sv[i];
n = num[i];
if (n > 0) { v = v / n ; }
if (j == 1 && v != "") {
printf("%s%8s", dlm, list[i]);
}
if (j == 2 && v != "") {
v = (dlm == " " && v == "" ? "_" : v);
printf("%s%8s", dlm, v);
}
}
printf("%s%s%s%s\n", dlm, (j==1?"directory":dirnm), dlm, (j==1 ? "_rk0" : "_rv0"));
}
}
' $INF
|
<filename>src/Main.java
/**
* Created by IntelliJ IDEA.
* User: swyna
* Date: Jun 3, 2011
* Time: 12:21:45 AM
* To change this template use File | Settings | File Templates.
*/
public class Main {
public static void main(String[] args) {
new Server();
}
}
|
<reponame>fujunwei/dldt
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include <string.h>
#include <ie_builders.hpp>
#include <builders/ie_const_layer.hpp>
#include "builder_test.hpp"
using namespace testing;
using namespace InferenceEngine;
class ConstLayerBuilderTest : public BuilderTestCommon {};
TEST_F(ConstLayerBuilderTest, getExistsLayerFromNetworkBuilder) {
Builder::Network net("network");
Builder::ConstLayer layer("const layer");
layer.setData(generateBlob(Precision::FP32, {3}, Layout::C));
const size_t ind = net.addLayer(layer);
ASSERT_NO_THROW(net.getLayer(ind)->validate(false));
}
TEST_F(ConstLayerBuilderTest, cannotCreateLayerWithoutData) {
Builder::Network net("network");
Builder::ConstLayer layer("const layer");
ASSERT_THROW(net.addLayer(layer),
InferenceEngine::details::InferenceEngineException);
} |
<reponame>daniel-beck-bot/loadcomplete-plugin<gh_stars>0
/*
* The MIT License
*
* Copyright (c) 2018, SmartBear Software
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.smartbear.jenkins.plugins.loadcomplete;
import hudson.FilePath;
import hudson.model.Run;
import java.io.File;
import java.io.IOException;
/**
* @author <NAME>
*/
public class Workspace {
private final FilePath slaveWorkspacePath;
private final String logId;
private final FilePath slaveZIPFilePath;
private final FilePath slavePDFFilePath;
private final FilePath slaveMHTFilePath;
private final FilePath masterZIPFilePath;
private final FilePath masterPDFFilePath;
private final FilePath masterMHTFilePath;
private final FilePath slaveErrorFilePath;
private final FilePath masterLogDirectory;
public Workspace(Run<?, ?> run, FilePath filePath) throws IOException, InterruptedException {
this.slaveWorkspacePath = getSlaveWorkspace(filePath);
this.logId = Long.toString(System.currentTimeMillis() % 10000000);
String zipName = this.logId + Constants.ZIP_FILE_EXTENSION;
String mhtName = this.logId + Constants.MHT_FILE_EXTENSION;
String pdfName = this.logId + Constants.PDF_FILE_EXTENSION;
slaveZIPFilePath = new FilePath(slaveWorkspacePath, zipName);
slavePDFFilePath = new FilePath(slaveWorkspacePath, pdfName);
slaveMHTFilePath = new FilePath(slaveWorkspacePath, mhtName);
this.masterLogDirectory = getMasterLogDirectory(run);
masterZIPFilePath = new FilePath(masterLogDirectory, zipName);
masterPDFFilePath = new FilePath(masterLogDirectory, pdfName);
masterMHTFilePath = new FilePath(masterLogDirectory, mhtName);
this.slaveErrorFilePath = new FilePath(slaveWorkspacePath, this.logId + Constants.ERROR_FILE_EXTENSION);
}
private FilePath getMasterLogDirectory(Run<?, ?> run) throws IOException, InterruptedException {
String buildDir = run.getRootDir().getAbsolutePath();
FilePath masterLogDirectory = new FilePath(new File(buildDir +
File.separator + Constants.REPORTS_DIRECTORY_NAME));
masterLogDirectory.mkdirs();
return masterLogDirectory;
}
private FilePath getSlaveWorkspace(FilePath filePath) throws IOException, InterruptedException {
if (filePath == null) {
throw new IOException(Messages.LCTestBuilder_WorkspaceNotSpecified());
}
filePath.mkdirs();
return filePath.absolutize();
}
public FilePath getSlaveWorkspacePath() {
return slaveWorkspacePath;
}
public String getLogId() {
return logId;
}
public FilePath getSlaveZIPFilePath() {
return slaveZIPFilePath;
}
public FilePath getSlavePDFFilePath() {
return slavePDFFilePath;
}
public FilePath getSlaveMHTFilePath() {
return slaveMHTFilePath;
}
public FilePath getMasterZIPFilePath() {
return masterZIPFilePath;
}
public FilePath getMasterPDFFilePath() {
return masterPDFFilePath;
}
public FilePath getMasterMHTFilePath() {
return masterMHTFilePath;
}
public FilePath getSlaveErrorFilePath() {
return slaveErrorFilePath;
}
public FilePath getMasterLogDirectory() {
return masterLogDirectory;
}
} |
<gh_stars>0
import pandas as pd
from tqdm import tqdm
from model.QACGBERT import *
from util.tokenization import *
from torch.utils.data import DataLoader, TensorDataset
import random
import warnings
warnings.filterwarnings('ignore')
context_id_map_fiqa = {'stock': 0,
'corporate': 1,
'market': 2,
'economy': 3}
class InputExample(object):
def __init__(self, guid, text_a, text_b=None, label=None):
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class InputFeatures(object):
def __init__(self, input_ids, input_mask, segment_ids, score, seq_len, context_ids):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.score = score
self.seq_len = seq_len
self.context_ids = context_ids
def convert_to_unicode(text):
if six.PY3:
if isinstance(text, str):
return text
elif isinstance(text, bytes):
return text.decode("utf-8", "ignore")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
else:
raise ValueError("Not running on Python 3")
def get_test_examples(path):
test_data = pd.read_csv(path, header=None).values
def _create_examples(lines, set_type):
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = convert_to_unicode(str(line[2]))
text_b = convert_to_unicode(str(line[1]))
label = float(0)
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
return _create_examples(test_data, "test")
def truncate_seq_pair(tokens_a, tokens_b, max_length):
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def convert_examples_to_features(examples, max_seq_length,
tokenizer, max_context_length,
context_standalone, args):
features = []
for (ex_index, example) in enumerate(tqdm(examples)):
tokens_a = tokenizer.tokenize(example.text_a)
tokens_b = None
if example.text_b:
tokens_b = tokenizer.tokenize(example.text_b)
tokens_context = None
if example.text_b:
tokens_context = tokenizer.tokenize(example.text_b)
if tokens_b and not context_standalone:
truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0:(max_seq_length - 2)]
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
if tokens_b and not context_standalone:
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
context_ids = []
if tokens_context:
context_ids = [context_id_map_fiqa[example.text_b]]
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
seq_len = len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
while len(context_ids) < max_context_length:
context_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
assert len(context_ids) == max_context_length
features.append(
InputFeatures(
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
score=example.label,
seq_len=seq_len,
context_ids=context_ids))
return features
def get_model_and_tokenizer(vocab_file,
bert_config_file=None, init_checkpoint=None,
do_lower_case=True,
init_lrp=False):
tokenizer = FullTokenizer(
vocab_file=vocab_file, do_lower_case=do_lower_case, pretrain=False)
if bert_config_file is not None:
bert_config = BertConfig.from_json_file(bert_config_file)
else:
bert_config = BertConfig(
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=2,
initializer_range=0.02
)
bert_config.vocab_size = len(tokenizer.vocab)
model = QACGBertForSequenceScore(
bert_config,
init_weight=True,
init_lrp=init_lrp)
if init_checkpoint is not None:
if "checkpoint" in init_checkpoint:
state_dict = torch.load(init_checkpoint, map_location='cpu')
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in state_dict.items():
if k.startswith('module.'):
name = k[7:]
new_state_dict[name] = v
else:
new_state_dict[k] = v
model.load_state_dict(new_state_dict)
else:
model.bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu'), strict=False)
return model, tokenizer
def system_setups(args):
# system related setups
if args.local_rank == -1 or args.no_cuda:
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
n_gpu = torch.cuda.device_count()
else:
device = torch.device("cuda", args.local_rank)
n_gpu = 1
torch.distributed.init_process_group(backend='nccl')
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
if args.bert_config_file is not None:
bert_config = BertConfig.from_json_file(args.bert_config_file)
if args.max_seq_length > bert_config.max_position_embeddings:
raise ValueError(
"Cannot use sequence length {} because the BERT model was only trained up to sequence length {}".format(
args.max_seq_length, bert_config.max_position_embeddings))
return device, n_gpu
def data_and_model_loader(device, n_gpu, args):
model, tokenizer = get_model_and_tokenizer(vocab_file=args.vocab_file,
bert_config_file=args.bert_config_file, init_checkpoint=args.init_checkpoint,
do_lower_case=True,
init_lrp=False)
test_examples = get_test_examples(args.path)
test_features = convert_examples_to_features(
test_examples, args.max_seq_length,
tokenizer, args.max_context_length,
args.context_standalone, args)
all_input_ids = torch.tensor([f.input_ids for f in test_features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in test_features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in test_features], dtype=torch.long)
all_score = torch.tensor([f.score for f in test_features], dtype=torch.float)
all_seq_len = torch.tensor([[f.seq_len] for f in test_features], dtype=torch.long)
all_context_ids = torch.tensor([f.context_ids for f in test_features], dtype=torch.long)
test_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids,
all_score, all_seq_len, all_context_ids)
test_dataloader = DataLoader(test_data, shuffle=False)
if args.local_rank != -1:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank],
output_device=args.local_rank)
elif n_gpu > 1:
model = torch.nn.DataParallel(model)
model.to(device)
return model, test_dataloader
def pred(args):
device, n_gpu = system_setups(args)
model, test_dataloader = data_and_model_loader(device, n_gpu, args)
model.eval()
y_pred = []
for batch in list(test_dataloader):
if torch.cuda.is_available():
torch.cuda.empty_cache()
input_ids, input_mask, segment_ids, score, seq_lens, \
context_ids = batch
max_seq_lens = max(seq_lens)[0]
input_ids = input_ids[:, :max_seq_lens]
input_mask = input_mask[:, :max_seq_lens]
segment_ids = segment_ids[:, :max_seq_lens]
input_ids = input_ids.to(device)
input_mask = input_mask.to(device)
segment_ids = segment_ids.to(device)
score = score.to(device)
seq_lens = seq_lens.to(device)
context_ids = context_ids.to(device)
_, pred_score, _, _, _, _ = \
model(input_ids, segment_ids, input_mask, seq_lens, device=device, labels=score,
context_ids=context_ids)
y_pred.append(pred_score.detach().numpy()[0][0])
return y_pred
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--path")
parser.add_argument("--max_seq_length", default=128, type=int)
parser.add_argument("--vocab_file")
parser.add_argument("--bert_config_file")
parser.add_argument("--init_checkpoint")
parser.add_argument('--local_rank', type=int, default=-1)
parser.add_argument("--no_cuda", default=False, action='store_true')
parser.add_argument("--max_context_length", default=1, type=int)
parser.add_argument("--context_standalone", default=False, action='store_true')
parser.add_argument('--seed', type=int, default=123)
args = parser.parse_args()
pred_score = pred(args)
print(pred_score)
|
#!/bin/sh
v=$(git tag --contains HEAD)
if [ "$v" = "" ]; then
echo "Cannot determine tag." >&2
exit 1
fi
v=$(echo "$v" | sed -Ee 's#^v(.*)#\1#')
cat <<END >version.go
// Code generated by gen_version.sh, DO NOT EDIT.
package main
//go:generate sh -c ./gen_version.sh
const version = "$v"
END
|
adb shell screenrecord /sdcard/$(date +%T).mp4 |
<filename>migrations/20141020175650_dumped_migration_76/down.sql
DROP TRIGGER trigger_crates_tsvector_update
ON crates;
DROP FUNCTION trigger_crates_name_search(); |
# code is based on https://github.com/katerakelly/pytorch-maml
import torchvision
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torch
from torch.utils.data import DataLoader,Dataset
import random
import os
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
from torch.utils.data.sampler import Sampler
def imshow(img):
npimg = img.numpy()
plt.axis("off")
plt.imshow(np.transpose(npimg,(1,2,0)))
plt.show()
class Rotate(object):
def __init__(self, angle):
self.angle = angle
def __call__(self, x, mode="reflect"):
x = x.rotate(self.angle)
return x
def omniglot_character_folders():
data_folder = '../datas/omniglot_resized/'
character_folders = [os.path.join(data_folder, family, character) \
for family in os.listdir(data_folder) \
if os.path.isdir(os.path.join(data_folder, family)) \
for character in os.listdir(os.path.join(data_folder, family))]
random.seed(1)
random.shuffle(character_folders)
num_train = 1200
metatrain_character_folders = character_folders[:num_train]
metaval_character_folders = character_folders[num_train:]
return metatrain_character_folders,metaval_character_folders
class OmniglotTask(object):
# This class is for task generation for both meta training and meta testing.
# For meta training, we use all 20 samples without valid set (empty here).
# For meta testing, we use 1 or 5 shot samples for training, while using the same number of samples for validation.
# If set num_samples = 20 and chracter_folders = metatrain_character_folders, we generate tasks for meta training
# If set num_samples = 1 or 5 and chracter_folders = metatest_chracter_folders, we generate tasks for meta testing
def __init__(self, character_folders, num_classes, train_num,test_num):
self.character_folders = character_folders
self.num_classes = num_classes
self.train_num = train_num
self.test_num = test_num
class_folders = random.sample(self.character_folders,self.num_classes)
labels = np.array(range(len(class_folders)))
labels = dict(zip(class_folders, labels))
samples = dict()
self.train_roots = []
self.test_roots = []
for c in class_folders:
temp = [os.path.join(c, x) for x in os.listdir(c)]
samples[c] = random.sample(temp, len(temp))
self.train_roots += samples[c][:train_num]
self.test_roots += samples[c][train_num:train_num+test_num]
self.train_labels = [labels[self.get_class(x)] for x in self.train_roots]
self.test_labels = [labels[self.get_class(x)] for x in self.test_roots]
def get_class(self, sample):
return os.path.join(*sample.split('/')[:-1])
class FewShotDataset(Dataset):
def __init__(self, task, split='train', transform=None, target_transform=None):
self.transform = transform # Torch operations on the input image
self.target_transform = target_transform
self.task = task
self.split = split
self.image_roots = self.task.train_roots if self.split == 'train' else self.task.test_roots
self.labels = self.task.train_labels if self.split == 'train' else self.task.test_labels
def __len__(self):
return len(self.image_roots)
def __getitem__(self, idx):
raise NotImplementedError("This is an abstract class. Subclass this class for your particular dataset.")
class Omniglot(FewShotDataset):
def __init__(self, *args, **kwargs):
super(Omniglot, self).__init__(*args, **kwargs)
def __getitem__(self, idx):
image_root = self.image_roots[idx]
image = Image.open(image_root)
image = image.convert('L')
image = image.resize((28,28), resample=Image.LANCZOS) # per Chelsea's implementation
#image = np.array(image, dtype=np.float32)
if self.transform is not None:
image = self.transform(image)
label = self.labels[idx]
if self.target_transform is not None:
label = self.target_transform(label)
return image, label
class ClassBalancedSampler(Sampler):
''' Samples 'num_inst' examples each from 'num_cl' pools
of examples of size 'num_per_class' '''
def __init__(self, num_per_class, num_cl, num_inst,shuffle=True):
self.num_per_class = num_per_class
self.num_cl = num_cl
self.num_inst = num_inst
self.shuffle = shuffle
def __iter__(self):
# return a single list of indices, assuming that items will be grouped by class
if self.shuffle:
batch = [[i+j*self.num_inst for i in torch.randperm(self.num_inst)[:self.num_per_class]] for j in range(self.num_cl)]
else:
batch = [[i+j*self.num_inst for i in range(self.num_inst)[:self.num_per_class]] for j in range(self.num_cl)]
batch = [item for sublist in batch for item in sublist]
if self.shuffle:
random.shuffle(batch)
return iter(batch)
def __len__(self):
return 1
def get_data_loader(task, num_per_class=1, split='train',shuffle=True,rotation=0):
# NOTE: batch size here is # instances PER CLASS
#normalize = transforms.Normalize(mean=[0.92206, 0.92206, 0.92206], std=[0.08426, 0.08426, 0.08426])
normalize = transforms.Normalize(mean=[0.92206], std=[0.08426])
#if dataset is omniglot
dataset = Omniglot(task,split=split,transform=transforms.Compose([Rotate(rotation),transforms.ToTensor(),normalize]))
if split == 'train':
sampler = ClassBalancedSampler(num_per_class, task.num_classes, task.train_num,shuffle=shuffle)
else:
sampler = ClassBalancedSampler(num_per_class, task.num_classes, task.test_num,shuffle=shuffle)
loader = DataLoader(dataset, batch_size=num_per_class*task.num_classes, sampler=sampler)
return loader
|
import numpy as np
from typing import List, Optional, Tuple
class C:
RPIN = type('RPIN', (), {'MASK_LOSS_WEIGHT': True}) # Example configuration object
def process_data(bbox_data: np.ndarray, mask_data: Optional[np.ndarray], C: object) -> Tuple[np.ndarray, Optional[np.ndarray], np.ndarray, List[np.ndarray], List[np.ndarray]]:
batch_size, num_objs, _ = bbox_data.shape
if_destroyed = 1 / (1 + np.exp(-bbox_data[:, :, -1])) # Apply Sigmoid function to the last element of each bounding box
bbox_data = bbox_data[:, :, :-1] # Remove the last element from each bounding box
mask = None
if C.RPIN.MASK_LOSS_WEIGHT:
mask = mask_decoder(mask_data.reshape(batch_size, num_objs, -1)) # Apply mask decoder if MASK_LOSS_WEIGHT is True
bbox_rollout = bbox_data.tolist() # Append processed bounding box data to the rollout list
if_destroyed_rollout = if_destroyed.tolist() # Append if_destroyed data to the rollout list
mask_rollout = [mask.tolist()] if mask is not None else [] # Append mask data to the rollout list if applicable
state_list = state_list[1:] + [s] # Update state_list by removing the first element and adding the input s
state_list_buffer.append(s) # Append s to the state_list_buffer
return bbox_data, mask, if_destroyed, bbox_rollout, mask_rollout |
/*
* IXGzipCodec.cpp
* Author: <NAME>
* Copyright (c) 2020 Machine Zone, Inc. All rights reserved.
*/
#include "IXGzipCodec.h"
#include "IXBench.h"
#include <array>
#include <string.h>
#ifdef IXWEBSOCKET_USE_ZLIB
#include <zlib.h>
#endif
#ifdef IXWEBSOCKET_USE_DEFLATE
#include <libdeflate.h>
#endif
namespace ix
{
#ifdef IXWEBSOCKET_USE_ZLIB
std::string gzipCompress(const std::string& str)
{
#ifdef IXWEBSOCKET_USE_DEFLATE
int compressionLevel = 6;
struct libdeflate_compressor* compressor;
compressor = libdeflate_alloc_compressor(compressionLevel);
const void* uncompressed_data = str.data();
size_t uncompressed_size = str.size();
void* compressed_data;
size_t actual_compressed_size;
size_t max_compressed_size;
max_compressed_size = libdeflate_gzip_compress_bound(compressor, uncompressed_size);
compressed_data = malloc(max_compressed_size);
if (compressed_data == NULL)
{
return std::string();
}
actual_compressed_size = libdeflate_gzip_compress(
compressor, uncompressed_data, uncompressed_size, compressed_data, max_compressed_size);
libdeflate_free_compressor(compressor);
if (actual_compressed_size == 0)
{
free(compressed_data);
return std::string();
}
std::string out;
out.assign(reinterpret_cast<char*>(compressed_data), actual_compressed_size);
free(compressed_data);
return out;
#else
z_stream zs; // z_stream is zlib's control structure
memset(&zs, 0, sizeof(zs));
// deflateInit2 configure the file format: request gzip instead of deflate
const int windowBits = 15;
const int GZIP_ENCODING = 16;
deflateInit2(&zs,
Z_DEFAULT_COMPRESSION,
Z_DEFLATED,
windowBits | GZIP_ENCODING,
8,
Z_DEFAULT_STRATEGY);
zs.next_in = (Bytef*) str.data();
zs.avail_in = (uInt) str.size(); // set the z_stream's input
int ret;
char outbuffer[32768];
std::string outstring;
// retrieve the compressed bytes blockwise
do
{
zs.next_out = reinterpret_cast<Bytef*>(outbuffer);
zs.avail_out = sizeof(outbuffer);
ret = deflate(&zs, Z_FINISH);
if (outstring.size() < zs.total_out)
{
// append the block to the output string
outstring.append(outbuffer, zs.total_out - outstring.size());
}
} while (ret == Z_OK);
deflateEnd(&zs);
return outstring;
#endif
}
bool gzipDecompress(const std::string& in, std::string& out)
{
z_stream inflateState;
memset(&inflateState, 0, sizeof(inflateState));
inflateState.zalloc = Z_NULL;
inflateState.zfree = Z_NULL;
inflateState.opaque = Z_NULL;
inflateState.avail_in = 0;
inflateState.next_in = Z_NULL;
if (inflateInit2(&inflateState, 16 + MAX_WBITS) != Z_OK)
{
return false;
}
inflateState.avail_in = (uInt) in.size();
inflateState.next_in = (unsigned char*) (const_cast<char*>(in.data()));
const int kBufferSize = 1 << 14;
std::array<unsigned char, kBufferSize> compressBuffer;
do
{
inflateState.avail_out = (uInt) kBufferSize;
inflateState.next_out = &compressBuffer.front();
int ret = inflate(&inflateState, Z_SYNC_FLUSH);
if (ret == Z_NEED_DICT || ret == Z_DATA_ERROR || ret == Z_MEM_ERROR)
{
inflateEnd(&inflateState);
return false;
}
out.append(reinterpret_cast<char*>(&compressBuffer.front()),
kBufferSize - inflateState.avail_out);
} while (inflateState.avail_out == 0);
inflateEnd(&inflateState);
return true;
}
#endif
} // namespace ix
|
#!/bin/bash
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
#
# Fabian Schuiki <fschuiki@iis.ee.ethz.ch>
# Andreas Kurth <akurth@iis.ee.ethz.ch>
set -e
ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)
[ ! -z "$VSIM" ] || VSIM=vsim
call_vsim() {
echo "log -r /*; run -all" | $VSIM -c -coverage -voptargs='+acc +cover=sbecft' "$@" | tee vsim.log 2>&1
grep "Errors: 0," vsim.log
}
call_vsim snitch_tcdm_interconnect_tb |
<reponame>bcgov/citz-imb-sp-vdr
import { RestCall } from '../RestCall/RestCall';
export const GetSite = async ({ expand, filter, select }) => {
let endPoint = `/_api/web`;
let endPointParameters = [];
if (expand) endPointParameters.push(`$expand=${expand}`);
if (filter) endPointParameters.push(`$filter=${filter}`);
if (select) endPointParameters.push(`$select=${select}`);
if (endPointParameters.length)
endPoint = `${endPoint}?${endPointParameters.join('&')}`;
const response = await RestCall({ endPoint });
return response.d;
};
|
<filename>spec/rubocop/cop/lint/suppressed_exception_spec.rb<gh_stars>10-100
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::Lint::SuppressedException, :config do
context 'with AllowComments set to false' do
let(:cop_config) { { 'AllowComments' => false } }
it 'registers an offense for empty rescue block' do
expect_offense(<<~RUBY)
begin
something
rescue
^^^^^^ Do not suppress exceptions.
#do nothing
end
RUBY
end
it 'does not register an offense for rescue with body' do
expect_no_offenses(<<~RUBY)
begin
something
return
rescue
file.close
end
RUBY
end
context 'when empty rescue for `def`' do
it 'registers an offense for empty rescue without comment' do
expect_offense(<<~RUBY)
def foo
do_something
rescue
^^^^^^ Do not suppress exceptions.
end
RUBY
end
it 'registers an offense for empty rescue with comment' do
expect_offense(<<~RUBY)
def foo
do_something
rescue
^^^^^^ Do not suppress exceptions.
# do nothing
end
RUBY
end
end
context 'when empty rescue for defs' do
it 'registers an offense for empty rescue without comment' do
expect_offense(<<~RUBY)
def self.foo
do_something
rescue
^^^^^^ Do not suppress exceptions.
end
RUBY
end
it 'registers an offense for empty rescue with comment' do
expect_offense(<<~RUBY)
def self.foo
do_something
rescue
^^^^^^ Do not suppress exceptions.
# do nothing
end
RUBY
end
end
context 'Ruby 2.5 or higher', :ruby25 do
context 'when empty rescue for `do` block' do
it 'registers an offense for empty rescue without comment' do
expect_offense(<<~RUBY)
foo do
do_something
rescue
^^^^^^ Do not suppress exceptions.
end
RUBY
end
it 'registers an offense for empty rescue with comment' do
expect_offense(<<~RUBY)
foo do
rescue
^^^^^^ Do not suppress exceptions.
# do nothing
end
RUBY
end
end
end
end
context 'with AllowComments set to true' do
let(:cop_config) { { 'AllowComments' => true } }
it 'does not register an offense for empty rescue with comment' do
expect_no_offenses(<<~RUBY)
begin
something
return
rescue
# do nothing
end
RUBY
end
context 'when empty rescue for `def`' do
it 'registers an offense for empty rescue without comment' do
expect_offense(<<~RUBY)
def foo
do_something
rescue
^^^^^^ Do not suppress exceptions.
end
RUBY
end
it 'does not register an offense for empty rescue with comment' do
expect_no_offenses(<<~RUBY)
def foo
do_something
rescue
# do nothing
end
RUBY
end
end
context 'when empty rescue for `defs`' do
it 'registers an offense for empty rescue without comment' do
expect_offense(<<~RUBY)
def self.foo
do_something
rescue
^^^^^^ Do not suppress exceptions.
end
RUBY
end
it 'does not register an offense for empty rescue with comment' do
expect_no_offenses(<<~RUBY)
def self.foo
do_something
rescue
# do nothing
end
RUBY
end
end
context 'Ruby 2.5 or higher', :ruby25 do
context 'when empty rescue for `do` block' do
it 'registers an offense for empty rescue without comment' do
expect_offense(<<~RUBY)
foo do
do_something
rescue
^^^^^^ Do not suppress exceptions.
end
RUBY
end
it 'does not register an offense for empty rescue with comment' do
expect_no_offenses(<<~RUBY)
foo do
rescue
# do nothing
end
RUBY
end
end
end
it 'registers an offense for empty rescue on single line with a comment after it' do
expect_offense(<<~RUBY)
RSpec.describe Dummy do
it 'dummy spec' do
# This rescue is here to ensure the test does not fail because of the `raise`
expect { begin subject; rescue ActiveRecord::Rollback; end }.not_to(change(Post, :count))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not suppress exceptions.
# Done
end
end
RUBY
end
end
end
|
<filename>common/utils/mobile_url_hash_util.py
from hashids import Hashids
from common.constants import MOBILE_SALT
class MobileUrlHashUtil(object):
def __init__(self):
self.instance = Hashids(MOBILE_SALT, min_length=8)
def encode(self, str):
return self.instance.encode(str)
def decode(self, str):
return self.instance.decode(str)[0]
|
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.server.encryption.vault;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.cloud.config.environment.Environment;
import org.springframework.cloud.config.environment.PropertySource;
import org.springframework.cloud.config.server.encryption.CipherEnvironmentEncryptor;
import org.springframework.cloud.config.server.encryption.EnvironmentEncryptor;
import org.springframework.util.StringUtils;
import org.springframework.vault.core.VaultKeyValueOperations;
import org.springframework.vault.support.VaultResponse;
/**
* VaultEnvironmentEncryptor that can decrypt property values prefixed with {vault}
* marker.
*
* @author <NAME>
*/
public class VaultEnvironmentEncryptor implements EnvironmentEncryptor {
private static final Log logger = LogFactory.getLog(CipherEnvironmentEncryptor.class);
private final VaultKeyValueOperations keyValueTemplate;
public VaultEnvironmentEncryptor(VaultKeyValueOperations keyValueTemplate) {
this.keyValueTemplate = keyValueTemplate;
}
@Override
public Environment decrypt(Environment environment) {
// 秘钥存储容器
Map<String, VaultResponse> loadedVaultKeys = new HashMap<>();
// 创建环境对象
Environment result = new Environment(environment);
// 从环境对象中获取属性源
for (PropertySource source : environment.getPropertySources()) {
// 获取源对象
Map<Object, Object> map = new LinkedHashMap<>(source.getSource());
// 循环处理源对象
for (Map.Entry<Object, Object> entry : new LinkedHashSet<>(map.entrySet())) {
// 获取源对象的key
Object key = entry.getKey();
// 获取key的值
String name = key.toString();
// 如果值存在并且十一{valut}开头
if (entry.getValue() != null && entry.getValue().toString().startsWith("{vault}")) {
// 提取值
String value = entry.getValue().toString();
// 移除当前处理数据
map.remove(key);
try {
// 切分字符串得到加密字符串
value = value.substring("{vault}".length());
if (!value.startsWith(":")) {
throw new RuntimeException("Wrong format");
}
value = value.substring(1);
if (!value.contains("#")) {
throw new RuntimeException("Wrong format");
}
String[] parts = value.split("#");
if (parts.length == 1) {
throw new RuntimeException("Wrong format");
}
if (StringUtils.isEmpty(parts[0]) || StringUtils.isEmpty(parts[1])) {
throw new RuntimeException("Wrong format");
}
String vaultKey = parts[0];
String vaultParamName = parts[1];
if (!loadedVaultKeys.containsKey(vaultKey)) {
loadedVaultKeys.put(vaultKey, keyValueTemplate.get(vaultKey));
}
// 获取VaultResponse对象
VaultResponse vaultResponse = loadedVaultKeys.get(vaultKey);
// 解密
if (vaultResponse == null || (vaultResponse.getData() == null
|| !vaultResponse.getData().containsKey(vaultParamName))) {
value = null;
} else {
value = vaultResponse.getData().get(vaultParamName).toString();
}
} catch (Exception e) {
value = "<n/a>";
name = "invalid." + name;
String message = "Cannot resolve key: " + key + " (" + e.getClass() + ": " + e.getMessage()
+ ")";
if (logger.isDebugEnabled()) {
logger.debug(message, e);
} else if (logger.isWarnEnabled()) {
logger.warn(message);
}
}
map.put(name, value);
}
}
result.add(new PropertySource(source.getName(), map));
}
return result;
}
}
|
<gh_stars>1-10
package net.dodogang.plume.client.gui.item_group;
import net.dodogang.plume.Plume;
import net.dodogang.plume.item.item_group.TabbedItemGroup;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraft.client.gui.screen.ingame.CreativeInventoryScreen;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.tag.Tag;
import net.minecraft.text.TranslatableText;
import net.minecraft.util.Identifier;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@Environment(EnvType.CLIENT)
public record ItemGroupTab(@NotNull Identifier id, @NotNull ItemStack icon, @Nullable Tag<Item> tag) {
private static final Identifier TAB_WIDGET_TEXTURE = new Identifier(Plume.MOD_ID, "textures/gui/creative_inventory/item_group/tab_widget.png");
public ItemGroupTab(@NotNull Identifier id, @NotNull ItemStack icon, @Nullable Tag<Item> tag) {
this.id = id;
this.icon = icon;
this.tag = tag;
}
public Identifier getWidgetBackgroundTexture() {
return ItemGroupTab.TAB_WIDGET_TEXTURE;
}
public TranslatableText getTranslationKey() {
return new TranslatableText("itemGroup.tab." + id);
}
public ItemGroupTabWidget createWidget(int x, int y, int index, TabbedItemGroup tab, CreativeInventoryScreen screen) {
return new ItemGroupTabWidget(x, y, index, tab, screen, this.getWidgetBackgroundTexture());
}
public boolean contains(Item item) {
return tag != null && tag.contains(item);
}
}
|
python ./src/models/scripts/main.py --logdir='logs' --params_file='models_params.json' --epochs=100 -debug |
#include <unordered_map>
#include <string>
#include <string_view>
std::unordered_map<std::string, std::string> parseKeyValuePairs(const std::string& input, char fieldSeparator) {
std::unordered_map<std::string, std::string> keyValuePairs;
size_t start = 0;
size_t end = input.find(fieldSeparator, start);
while (end != std::string::npos) {
std::string_view pair(input.c_str() + start, end - start);
size_t equalsPos = pair.find('=');
if (equalsPos != std::string::npos) {
std::string_view key = pair.substr(0, equalsPos);
std::string_view value = pair.substr(equalsPos + 1);
keyValuePairs[std::string(key)] = std::string(value);
}
start = end + 1;
end = input.find(fieldSeparator, start);
}
// Process the last key-value pair after the last delimiter
std::string_view lastPair(input.c_str() + start);
size_t equalsPos = lastPair.find('=');
if (equalsPos != std::string::npos) {
std::string_view key = lastPair.substr(0, equalsPos);
std::string_view value = lastPair.substr(equalsPos + 1);
keyValuePairs[std::string(key)] = std::string(value);
}
return keyValuePairs;
} |
<style>
* {
box-sizing: border-box;
}
body {
margin: 0;
padding: 0;
font-family: sans-serif;
}
#grid {
display: grid;
grid-template-columns: 1fr 2fr;
grid-gap: 5px;
padding: 1rem;
}
#nav {
padding: 5px;
margin-bottom: 1rem;
}
#nav ul {
list-style-type: none;
padding: 0;
margin: 0;
}
#nav li
{
display: inline;
}
#main {
padding: 1rem;
}
#footer {
padding: 1rem;
text-align: center;
}
</style> |
// run this when the meteor app is started
var em;
Meteor.startup(function() {
em = new EventDDP('twoway');
//load test
Assets.getText("tests/staic.json",
function(err, result){
if(err){
console.log("Assets err: " + err);
}
if(result){
var loadedTest = JSON.parse(result);
if(loadedTest.name){
console.log("Asset log: " + "Importing Test");
console.log(loadedTest);
Tests.upsert({
// Selector
name: loadedTest.name,
}, {
// Modifier
$set: loadedTest
});
} else {
console.log("Assets err: " + "Invalid test" + loadedTest['name']);
console.log(loadedTest);
}
}
}
);
Assets.getText("tests/messy.json",
function(err, result){
if(err){
console.log("Assets err: " + err);
}
if(result){
var loadedTest = JSON.parse(result);
if(loadedTest.name){
console.log("Asset log: " + "Importing Test");
console.log(loadedTest);
Tests.upsert({
// Selector
name: loadedTest.name,
}, {
// Modifier
$set: loadedTest
});
} else {
console.log("Assets err: " + "Invalid test" + loadedTest['name']);
console.log(loadedTest);
}
}
}
);
Meteor.methods({
join: function(participant) {
console.log('Hello', participant, this.connection.id);
//checking for new user, avoiding collision
if(participant.isNew == true){
// if(Participants.find({'name' : { $regex : /^ sad asd$/i } , 'connection_id': {$not: {$size: 0}} } ).fetch().length)
if(Participants.find({'name' : { $regex : new RegExp("^" + participant.name.trim(), "i") } , 'connection_id': {$not: {$size: 0}} } ).fetch().length)
return "Nombre ya utilizado";
}
//checking for name changes
if(Participants.findOne( {connection_id : this.connection.id } )) {
Participants.upsert({
// Selector
connection_id: this.connection.id
}, {
// Modifier
$set: {
name: participant.name.trim(),
age: participant.age,
last_activity : new Date()
}
}, removeOldConnectionsFromParticipants);
} else {
// Handling multiple sessions on same browser, differnt session IDs but same name
Participants.upsert({
// Selector
name: participant.name.trim()
}, {
// Modifier
$set: {
name: participant.name.trim(),
age: participant.age,
online: true,
last_activity : new Date()
},
'$addToSet' : { "connection_id" : this.connection.id }
}, removeOldConnectionsFromParticipants);
}
return 0;
},
endAllSessions: function() {
TestSessions.update({active: true}, {$set: {active: false}});
},
forceNameChange: function(payload) {
em.emit('forceNameChange', payload);
},
beginSession: function(session_id) {
TestSessions.update({ 'id' : {'$ne' : session_id} }, {$set: {active: false}}, {multi: true});
TestSessions.update({'_id' : session_id}, {$set: {active: true}});
TestSessions.update({'_id' : session_id}, {$set: {current_question_idx: 0}});
},
IncreaseCurrentTestQuestionIndex: function(){
console.log('INCREASING ac');
TestSessions.update( {'active': true}, { $inc : { "current_question_idx" : 1 }, $set: {'answers_allowed' : false} });
},
DecreaseCurrentTestQuestionIndex: function(){
console.log('DECREASING ac');
TestSessions.update( {'active': true}, { $inc : { "current_question_idx" : -1 }, $set: {'answers_allowed' : false} });
}
});
/* HELPERS */
var _oldsessions = JSON.stringify([]);
Meteor.setInterval(function() {
if(_oldsessions != JSON.stringify(Object.keys(Meteor.server.sessions))){
removeOldConnectionsFromParticipants();
_oldsessions = JSON.stringify(Object.keys(Meteor.server.sessions));
}
}, 1000);
//cleanup of expired sessions
var removeOldConnectionsFromParticipants = function(){
Participants.find().fetch().forEach( function(Participant) {
var old_ids = Participant.connection_id;
if(typeof old_ids == 'string') old_ids = [old_ids];
var old_ids_snapshot = JSON.stringify(old_ids);
if(!old_ids) return;
var new_ids = old_ids.filter(function(n) {
return Object.keys(Meteor.server.sessions).indexOf(n) != -1;
});
var new_ids_snapshot = JSON.stringify(new_ids);
var newData = { $set : { connection_id : new_ids } };
if(old_ids_snapshot != new_ids_snapshot) {
newData['$set']['last_activity'] = new Date();
}
Participants.update({ _id : Participant._id }, newData);
});
};
Meteor.onConnection(function (conn) {
var connId = conn.id;
console.log('welcome, ', connId);
conn.onClose(function () {
console.log('bye ', connId);
});
});
Meteor.publish("SessionAnswersRaw", function () {
ReactiveAggregate(this, Answers, [{
$group: {
_id: "$name",
answers: { $push: "$$ROOT" }
}
}], { clientCollection: "SessionAnswersLive" });
});
Meteor.publish("Results", function(options) {
// This does this: Run aggregation of the fields below on the Answers
// Collection...
console.log('SESSION_ID', options, options.session_id);
ReactiveAggregate(this, Answers,
//PIPELINE
[
{ $match : {session_id: options.session_id} },
{ $sort : {created_at: -1 } },
{ $group: {
_id: "$name",
answers: { $push: "$$ROOT" },
age: { $first: "$age" },
}
},
{ $project: {
"insensitive_name": { "$toLower": "$_id" },
"answers": "$answers",
"age": "$age"
}}
// { $sort : {_id: 1 } },
], {
//OPTIONS
// and send the results to another collection called below
clientCollection: "SessionAnswersLive"
});
});
});
|
tar -cjf ~/lab0/tars/mrozowskik.tar.bz2 ~/lab0/prog1.c # tar
tar -tjf ~/lab0/tars/mrozowskik.tar.bz2 # see files
tar -xvf mrozowskik.tar.bz2 # untar |
#!/usr/bin/env bash
############################# PARAMETERS
app_name='oh_my_vim'
[ -z "$APP_PATH" ] && APP_PATH="$HOME/.vim_runenv"
[ -z "$REPO_URI" ] && REPO_URI="https://github.com/smartcub/oh_my_vim.git"
[ -z "$PWLF_URI" ] && PWLF_URI="https://github.com/powerline/fonts.git"
[ -z "$FICD_URI" ] && FICD_URI="https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Light.ttf"
[ -z "$VIMPLUG_URI" ] && VIMPLUG_URI="https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim"
############################# BASIC TOOLS
msg()
{
printf '%b\n' "$1" >&2
}
success()
{
if [ "$ret" -eq '0' ];then
# Insert mode, <C-v>u2714
msg "\33[32m[✔]\33[0m ${1}${2}"
fi
}
error()
{
# Insert mode, <C-v>u2718
msg "\33[31m[✘]\33[0m ${1}${2}"
exit 1
}
program_exists()
{
local ret='0'
# Check if program is available or not
command -v $1 >/dev/null 2>&1 || { local ret='1'; }
# Not available return 1
if [ "$ret" -ne 0 ]; then
return 1
fi
return 0
}
program_should_exist()
{
program_exists $1
# Throw error on non-zero return value
if [ "$?" -ne 0 ]; then
error "You should have '$1' installed to continue."
fi
}
variable_set()
{
if [ -z "$1" ]; then
error "You must setup your \$HOME environmental variable first!"
fi
}
lnif()
{
if [ -e "$1" ]; then
ln -sf "$1" "$2"
fi
ret="$?"
}
############################# FUNCTIONS
old_backup()
{
local bk=$HOME/.vim_backup
if [ -e "$1" ] || [ -e "$2" ] || [ -e "$3" ]; then
msg "Attempt to backup your original vim configuration."
today=`date +%Y%m%d_%s`
mkdir -p "$bk"
for i in "$1" "$2" "$3"; do
# File or directory exists and NOT a link, rename it
[ -e "$i" ] && [ ! -L "$i" ] && mv -v "$i" "$bk/${i##*/}.$today";
done
ret="$?"
success "Your original vim configuration has been backed up."
fi
}
setup_vimplug()
{
local system_shell="$SHELL"
export SHELL='/bin/sh'
# Install vim-plug
curl -fLo $APP_PATH/autoload/plug.vim --create-dirs \
"$VIMPLUG_URI"
#https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
# Setup all plug-ins
vim \
"+PlugInstall!" \
"+PlugClean" \
"+qall"
export SHELL="$system_shell"
ret="$?"
success "Oh_my_vim Done! Enjoy!"
}
setup_dirs()
{
mkdir -p $APP_PATH/{autoload,colors,plugged,vimrcs}
cp {./basic_settings.vim,./vim_global_env.vim,./plugins.vim,./vimrc} \
$APP_PATH/vimrcs
if [ -f "$HOME/.vimrc" ]; then
rm -rf $HOME/.vimrc
fi
ln -s $APP_PATH/vimrcs/vimrc $HOME/.vimrc
}
install_fonts()
{
# Set source and target directories
powerline_fonts_dir="$( cd "$( dirname "$0" )" && pwd )/fonts"
# if an argument is given it is used to select which fonts to install
prefix="$1"
if test "$(uname)" = "Darwin" ; then
# MacOS
font_dir="$HOME/Library/Fonts"
else
# Linux
font_dir="$HOME/.fonts"
mkdir -p $font_dir
fi
# Copy all fonts to user fonts directory
echo "Copying fonts..."
find "$powerline_fonts_dir" \( -name "$prefix*.[ot]tf" -or -name "$prefix*.pcf.gz" \) -type f -print0 | xargs -0 -n1 -I % cp "%" "$font_dir/"
# Reset font cache on Linux
if which fc-cache >/dev/null 2>&1 ; then
echo "Resetting font cache, this may take a moment..."
fc-cache -f "$font_dir"
fi
echo "Powerline fonts installed to $font_dir"
}
setup_fonts()
{
local curr_path=$(cd `dirname $0`; pwd)
# Clone fonts
#git clone https://github.com/powerline/fonts.git
git clone "$PWLF_URI"
#curl -fLo ./fonts/FiraCode-Light.ttf --create-dirs https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Light.ttf
curl -fLo ./fonts/FiraCode-Light.ttf --create-dirs "$FICD_URI"
# Install fonts
install_fonts
# Clean-up a bit
cd $curr_path
rm -rf fonts
ret="$?"
success "Fonts have been installed!"
}
setup_conf()
{
# Record current time for modification time(sec based)
local modtime=`date +%s`
# Replace mtime in %gconf.xml
# Use " instead of ' in sed because " is weak transfer, otherwise
# variable modtime will not available!
sed -i -e "s/mtime=\"[0-9]*\"/mtime=\"$modtime\"/g" %gconf.xml
# Replace gnome scheme
mkdir -p $HOME/.gconf/apps/gnome-terminal/profiles/Default/
cp -f %gconf.xml $HOME/.gconf/apps/gnome-terminal/profiles/Default/%gconf.xml
ret="$?"
success "Terminal Scheme OK!"
}
############################# MAIN()
variable_set "$HOME"
program_should_exist "vim"
program_should_exist "git"
old_backup "$HOME/.vim" \
"$HOME/.vimrc" \
"$HOME/.gvimrc"
setup_fonts
setup_dirs
setup_conf
setup_vimplug
msg "\nPlease \33[36mreboot\33[0m for greate VIM experience."
msg "\nThanks for installing $app_name."
msg "\33[36m© `date +%Y` https://github.com/smartcub/\33[0m"
|
#!/bin/bash
set -e
# Make sure, we run in the root of the repo and
# therefore run the tests on all packages
base_dir="$( cd "$(dirname "$0")/.." && pwd )"
cd "$base_dir" || {
echo "Cannot cd to '$base_dir'. Aborting." >&2
exit 1
}
rc=0
function buildPlugins {
go build \
-buildmode plugin \
-tags=plugin \
-o ./pkg/plugins/builtin/executable.so \
./pkg/plugins/builtin/executable.go
}
function runTest {
local name=$1
local result="SUCCESS"
printf "============== begin %s\n" "$name"
$name
local code=$?
rc=$((rc || $code))
if [ $code -ne 0 ]; then
result="FAILURE"
fi
printf "============== end %s : %s code=%d\n\n\n" "$name" "$result" $code
}
function testGoLangCILint {
golangci-lint run ./...
}
function testGoTest {
go test -v ./...
}
function testExamples {
mdrip --mode test --label test README.md ./examples
}
# Use of GOPATH is optional if go modules are
# used. This script tries to work for people who
# don't have GOPATH set, and work for travis.
#
# Upon entry, travis has GOPATH set, and used it
# to install mdrip and the like.
#
# Use GOPATH to define XDG_CONFIG_HOME, then unset
# GOPATH so that go.mod is unambiguously honored.
echo "GOPATH=$GOPATH"
if [ -z ${GOPATH+x} ]; then
echo GOPATH is unset
tmp=$HOME/gopath
if [ -d "$tmp" ]; then
oldGoPath=$tmp
else
tmp=$HOME/go
if [ -d "$tmp" ]; then
oldGoPath=$tmp
fi
fi
else
oldGoPath=$GOPATH
unset GOPATH
fi
echo "oldGoPath=$oldGoPath"
export XDG_CONFIG_HOME=$oldGoPath/src/sigs.k8s.io
echo "XDG_CONFIG_HOME=$XDG_CONFIG_HOME"
if [ ! -d "$XDG_CONFIG_HOME" ]; then
echo "$XDG_CONFIG_HOME is not a directory."
exit 1
fi
# Until go v1.13, set this explicitly.
export GO111MODULE=on
echo "HOME=$HOME"
echo "GOPATH=$GOPATH"
echo "GO111MODULE=$GO111MODULE"
echo pwd=`pwd`
echo " "
echo "Beginning tests..."
runTest buildPlugins
runTest testGoLangCILint
runTest testGoTest
PATH=$HOME/go/bin:$PATH
runTest testExamples
if [ $rc -eq 0 ]; then
echo "SUCCESS!"
else
echo "FAILURE; exit code $rc"
fi
exit $rc
|
export var nullOrMap = (array: any[], mapFunc: (arrayItem: any) => JSX.Element) =>
!array ? null : array.map(mapFunc);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.