text stringlengths 1 1.05M |
|---|
<reponame>Denis220795/Textokit
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.textocat.textokit.morph.lemmatizer;
import com.textocat.textokit.commons.util.PipelineDescriptorUtils;
import com.textocat.textokit.morph.dictionary.resource.MorphDictionaryHolder;
import com.textocat.textokit.morph.fs.Wordform;
import com.textocat.textokit.postagger.PosTaggerAPI;
import org.apache.uima.fit.factory.TypeSystemDescriptionFactory;
import org.apache.uima.resource.metadata.Import;
import org.apache.uima.resource.metadata.TypeSystemDescription;
import org.apache.uima.resource.metadata.impl.Import_impl;
/**
* A class that provides constants and methods to use a lemmatizer.
* <p/>
* Lemmatizer implementation requires a CAS with word annotations, i.e., it
* should be pre-processed by a pos-tagger (see {@link PosTaggerAPI}).
* <p/>
* Lemmatizer should set 'lemma' feature of each {@link Wordform}s of an input
* CAS.
* <p/>
* If a lemmatizer implementation needs an external resource with
* {@link MorphDictionaryHolder} then this resource should be named '
* {@value #MORPH_DICTIONARY_RESOURCE_NAME}' and be available among resources
* managed by the comprising pipeline.
*
* @author <NAME>
*/
public class LemmatizerAPI {
/**
* A name of type-system description that define types of annotations that
* are affected by lemmatizer.
*/
public static final String TYPESYSTEM_LEMMATIZER = "com.textocat.textokit.morph.morphology-ts";
/**
* A name of analysis engine description that can be imported. An
* implementation of lemmatizer should provide its description at this
* location either in classpath or UIMA datapath.
*/
public static final String AE_LEMMATIZER = "com.textocat.textokit.morph.lemmatizer.lemmatizer-ae";
/**
* a resource name to declare MorphDictionaryHolder implementation
*/
public static final String MORPH_DICTIONARY_RESOURCE_NAME =
PosTaggerAPI.MORPH_DICTIONARY_RESOURCE_NAME;
/**
* @return type-system description instance
*/
public static TypeSystemDescription getTypeSystemDescription() {
return TypeSystemDescriptionFactory.createTypeSystemDescription(TYPESYSTEM_LEMMATIZER);
}
/**
* @return import instance. This is preferred way to include the AE into
* pipeline, especially when a pipeline descriptor is expected to be
* serialized into XML.
* @see PipelineDescriptorUtils#createAggregateDescription(java.util.List,
* java.util.List)
*/
public static Import getAEImport() {
Import result = new Import_impl();
result.setName(AE_LEMMATIZER);
return result;
}
private LemmatizerAPI() {
}
}
|
#!/bin/bash
## Bootstrap the installation of (Home)brew packages, by provisioning brew itself.
## By Stephen D. Rogers <inbox.c7r@steve-rogers.com>
##
## Installs Homebrew, and then a standard set of taps, extensions, formulas, and bundles.
##
## Arguments:
##
## None.
##
## Typical use:
##
## brew-bootstrap.sh
##
## Known bugs & limitations:
##
## Only installs Homebrew to `/usr/local`. This is a limitation of the standard Homebrew installation script.
##
## Only tested on mac OS, even though Homebrew now supports other platforms.
##
set -e
set -o pipefail 2>&- || :
this_script_pnp="${0%.*sh}"
this_script_fbn="$(basename "$0")"
this_script_stem="${this_script_fbn%.*sh}"
this_script_dpn="$(cd "$(dirname "$0")" && pwd -P)"
this_package_dpn="$(cd "$(dirname "${this_script_dpn}")" && pwd -P)"
##
## configuration:
##
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_DPN="/usr/local" # do not change; presumed by the bootstrapping installation script
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_BACKUP_FBN=".sb.brew-bootstrap.before.tar.gz" # relative to installation root
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_BACKUP_FPN="${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_DPN%/}/${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_BACKUP_FBN:?}"
#
BREW_BOOTSTRAP_POLICY_ALWAYS_DOWNLOAD_PACKAGE_BREW_INSTALLATION_SCRIPTS= #
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_SCRIPT_PNP="${this_script_dpn%/}/${this_script_stem:?}.cached.installation"
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_REDO_SCRIPT_SUFFIX=".redo.rb"
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_UNDO_SCRIPT_SUFFIX=".undo.rb"
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_REDO_SCRIPT_URL="https://raw.githubusercontent.com/Homebrew/install/master/install"
BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_UNDO_SCRIPT_URL="https://raw.githubusercontent.com/Homebrew/install/master/uninstall"
##
BREW_BOOTSTRAP_STANDARD_BREW_TAP_LIST=(
homebrew/bundle
homebrew/cask
homebrew/cask-versions
homebrew/core
)
##
BREW_BOOTSTRAP_STANDARD_BREW_EXTENSION_LIST=(
bundle
cask
)
##
BREW_BOOTSTRAP_STANDARD_BREW_FORMULA_LIST=(
mas
pkg-config
)
##
BREW_BOOTSTRAP_STANDARD_BREW_BUNDLE_LIST=(
"${this_package_dpn:?}/etc/${this_script_stem:?}".conf.initial-bundle
)
[ "${#BREW_BOOTSTRAP_STANDARD_BREW_BUNDLE_LIST[@]}" -gt 0 ]
##
## from snippets library:
##
function xx() { # ...
echo 1>&2 "+" "$@"
"$@"
}
function without_output() { # ...
"$@" >/dev/null 2>&1
}
function without_interaction() { # ...
"$@" </dev/null
}
function briefly_cache_sudo_authentication() { # ...
sudo true # side effect: briefly caches sudo authentication
}
function install_brew_tap() { # [tap_name ...]
local tap_name_list=( "$@" )
local x1
##
for x1 in "${tap_name_list[@]}" ; do
xx :
xx brew tap "${x1}"
done
}
function install_brew_extension() { # [extension_name ...]
local extension_name_list=( "$@" )
local x1
##
for x1 in "${extension_name_list[@]}" ; do
xx :
xx without_output brew "${x1}" --help
xx : "^-- side effect: installs extension '${x1}'"
done
}
function install_brew_formula() { # [formula_name ...]
local formula_name_list=( "$@" )
local x1
##
for x1 in "${formula_name_list[@]}" ; do
xx :
xx brew install "${x1}"
done
}
function install_brew_bundle() { # [bundle_name ...]
local bundle_name_list=( "$@" )
local x1
##
for x1 in "${bundle_name_list[@]}" ; do
xx :
xx brew bundle install --file="${x1}"
done
}
##
## core logic:
##
function check_package_brew_installation_root() { # installation_root_dpn
local installation_root_dpn="${1:?}" ; shift
[ $# -eq 0 ]
##
case "${installation_root_dpn:?}" in
/*)
true
;;
*)
echo 1>&2 "Must be absolute (not relative) path: ${installation_root_dpn:?}"
(false ; return)
;;
esac
! [ -L "${installation_root_dpn:?}" ] || {
echo 1>&2 "Must be directory (not symbolic link): ${installation_root_dpn:?}"
(false ; return)
}
[ -d "${installation_root_dpn:?}" ] || {
echo 1>&2 "Must be directory (created already): ${installation_root_dpn:?}"
(false ; return)
}
}
function download_package_brew_installation_script_to() { # script_destination_fpn script_url
local script_destination_fpn="${1:?}" ; shift
local script_url="${1:?}" ; shift
[ $# -eq 0 ]
##
xx :
xx curl -fsSL "${script_url:?}" > "${script_destination_fpn:?}"
}
function ensure_download_of_package_brew_installation_script() { # script_destination_fpn script_url
local script_destination_fpn="${1:?}" ; shift
local script_url="${1:?}" ; shift
[ $# -eq 0 ]
##
! [ -n "${BREW_BOOTSTRAP_POLICY_ALWAYS_DOWNLOAD_PACKAGE_BREW_INSTALLATION_SCRIPTS}" ] || {
> "${script_destination_fpn:?}"
}
[ -s "${script_destination_fpn:?}" ] || {
download_package_brew_installation_script_to "${script_destination_fpn:?}" "${script_url:?}"
xx :
xx chmod a+rx "${script_destination_fpn:?}"
}
}
function backup_package_brew_installation_root_to() { # backup_destination_fpn installation_root_dpn
local backup_destination_fpn="${1:?}" ; shift
local installation_root_dpn="${1:?}" ; shift
[ $# -eq 0 ]
##
local backup_destination_fpn_exclusion_pattern="${backup_destination_fpn#${installation_root_dpn%/}/}"
(cd "${installation_root_dpn:?}"
xx :
xx sudo tar czf "${backup_destination_fpn:?}" --exclude "${backup_destination_fpn_exclusion_pattern:?}" .
)
}
function ensure_backup_of_package_brew_installation_root() { # installation_root_dpn backup_destination_fpn
local installation_root_dpn="${1:?}" ; shift
local backup_destination_fpn="${1:?}" ; shift
[ $# -eq 0 ]
##
[ -s "${backup_destination_fpn:?}" ] ||
backup_package_brew_installation_root_to "${backup_destination_fpn:?}" "${installation_root_dpn:?}"
}
function install_package_brew() { #
local pnp="${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_SCRIPT_PNP:?}"
local package_brew_installation_redo_script_fpn="$(echo \
"${pnp:?}${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_REDO_SCRIPT_SUFFIX:?}"
)"
local package_brew_installation_undo_script_fpn="$(echo \
"${pnp:?}${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_UNDO_SCRIPT_SUFFIX:?}"
)"
##
check_package_brew_installation_root \
"${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_DPN:?}" #
##
briefly_cache_sudo_authentication
ensure_backup_of_package_brew_installation_root \
"${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_DPN:?}" \
"${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_ROOT_BACKUP_FPN:?}" #
ensure_download_of_package_brew_installation_script \
"${package_brew_installation_redo_script_fpn:?}" \
"${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_REDO_SCRIPT_URL:?}" #
ensure_download_of_package_brew_installation_script \
"${package_brew_installation_undo_script_fpn:?}" \
"${BREW_BOOTSTRAP_PACKAGE_BREW_INSTALLATION_UNDO_SCRIPT_URL:?}" #
xx :
xx without_interaction /usr/bin/ruby "${package_brew_installation_redo_script_fpn:?}"
}
function install_standard_brew_taps() { #
xx :
xx install_brew_tap "${BREW_BOOTSTRAP_STANDARD_BREW_TAP_LIST[@]}"
}
function install_standard_brew_extensions() { #
xx :
xx install_brew_extension "${BREW_BOOTSTRAP_STANDARD_BREW_EXTENSION_LIST[@]}"
}
function install_standard_brew_formulas() { #
xx :
xx install_brew_formula "${BREW_BOOTSTRAP_STANDARD_BREW_FORMULA_LIST[@]}"
}
function install_standard_brew_bundles() { #
xx :
xx install_brew_bundle "${BREW_BOOTSTRAP_STANDARD_BREW_BUNDLE_LIST[@]}"
}
function create_brew_bootstrap_bundle { # [--provide-parting-advice]
local brew_global_bundle_fpn="${HOME:?}"/.Brewfile
local brew_bootstrap_bundle_fpn="${brew_global_bundle_fpn:?}".bootstrap
local provide_parting_advice_p=
while [ $# -gt 0 ] ; do
case "${1}" in
--provide-parting-advice)
provide_parting_advice_p=t
shift
;;
--)
shift
break
;;
*)
break
;;
esac
done
[ $# -eq 0 ]
##
local brew_bundle_dump_action_performed="Created"
if [ -e "${brew_bootstrap_bundle_fpn:?}" ] ; then
brew_bundle_dump_action_performed="Updated existing"
fi
xx :
xx cp "${BREW_BOOTSTRAP_STANDARD_BREW_BUNDLE_LIST[0]:?}" "${brew_bootstrap_bundle_fpn:?}" || return $?
if [ -n "${provide_parting_advice_p}" ] ; then
echo 1>&2
echo 1>&2 "${brew_bundle_dump_action_performed:?} brew bundle file: ${brew_bootstrap_bundle_fpn:?}"
if [ "${brew_bootstrap_bundle_fpn:?}" != "${brew_global_bundle_fpn:?}" ] ; then
echo 1>&2
echo 1>&2 "Use it as a starting point for your global bundle file: ${brew_global_bundle_fpn:?}"
fi
echo 1>&2
echo 1>&2 "For details, see the $(brew bundle --help)"
fi
}
function main() { # ...
install_package_brew "$@"
install_standard_brew_taps
install_standard_brew_extensions
install_standard_brew_formulas
install_standard_brew_bundles
create_brew_bootstrap_bundle --provide-parting-advice
}
! [ "$0" = "${BASH_SOURCE:?}" ] || main "$@"
|
def sortAscending(arr):
# bubble sort algorithm
for i in range(len(arr)):
for j in range(len(arr) - i - 1):
if arr[j] > arr[j+1]:
arr[j], arr[j+1] = arr[j+1], arr[j]
return arr
sortAscending([3, 5, 2, 1, 4]) # [1, 2, 3, 4, 5] |
<gh_stars>1-10
package org.egovframe.rte.psl.dataaccess.mybatis.mapper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.math.BigDecimal;
import java.util.List;
import org.egovframe.rte.psl.dataaccess.TestBase;
import org.egovframe.rte.psl.dataaccess.mapper.EmployerMapper;
import org.egovframe.rte.psl.dataaccess.vo.EmpVO;
import javax.annotation.Resource;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.core.io.ClassPathResource;
import org.springframework.jdbc.datasource.init.ScriptUtils;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
/**
* == 개정이력(Modification Information) ==
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2014.01.22 권윤정 SimpleJdbcTestUtils -> JdbcTestUtils 변경
* 2014.01.22 권윤정 SimpleJdbcTemplate -> JdbcTemplate 변경
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath*:META-INF/spring/context-*.xml" })
@Transactional
public class EmployerMapperTest extends TestBase {
@Resource(name = "employerMapper")
EmployerMapper employerMapper;
@Before
public void onSetUp() throws Exception {
// 외부에 sql file 로부터 DB 초기화 (기존 테이블 삭제/생성 및 초기데이터 구축)
// Spring 의 JdbcTestUtils 사용,
// continueOnError 플래그는 true로 설정 - cf.) DDL 이 포함된 경우 rollback 에 유의
ScriptUtils.executeSqlScript(dataSource.getConnection(), new ClassPathResource("META-INF/testdata/sample_schema_ddl_" + usingDBMS + ".sql"));
}
public EmpVO makeVO() {
EmpVO vo = new EmpVO();
vo.setEmpNo(new BigDecimal(100));
vo.setEmpName("홍길동");
vo.setJob("대리");
return vo;
}
public void checkResult(EmpVO vo, EmpVO resultVO) {
assertNotNull(resultVO);
assertEquals(vo.getEmpNo(), resultVO.getEmpNo());
assertEquals(vo.getEmpName(), resultVO.getEmpName());
assertEquals(vo.getJob(), resultVO.getJob());
}
@Test
public void testInsert() throws Exception {
EmpVO vo = makeVO();
// insert
employerMapper.insertEmployer(vo);
// select
EmpVO resultVO = employerMapper.selectEmployer(vo.getEmpNo());
// check
checkResult(vo, resultVO);
}
@Test
public void testUpdate() throws Exception {
EmpVO vo = makeVO();
// insert
employerMapper.insertEmployer(vo);
// data change
vo.setEmpName("홍길서");
vo.setJob("과장");
// update
int effectedRows = employerMapper.updateEmployer(vo);
assertEquals(1, effectedRows);
// select
EmpVO resultVO = employerMapper.selectEmployer(vo.getEmpNo());
// check
checkResult(vo, resultVO);
}
@Test
public void testDelete() throws Exception {
EmpVO vo = makeVO();
// insert
employerMapper.insertEmployer(vo);
// delete
int effectedRows = employerMapper.deleteEmployer(vo.getEmpNo());
assertEquals(1, effectedRows);
// select
EmpVO resultVO = employerMapper.selectEmployer(vo.getEmpNo());
// null 이어야 함
assertNull(resultVO);
}
@Test
public void testSelectList() throws Exception {
EmpVO vo = makeVO();
// insert
employerMapper.insertEmployer(vo);
// 검색조건으로 key 설정
EmpVO searchVO = new EmpVO();
searchVO.setEmpName("홍길");
// selectList
List<EmpVO> resultList = employerMapper.selectEmployerList(searchVO);
// key 조건에 대한 결과는 한건일 것임
assertNotNull(resultList);
assertTrue(resultList.size() > 0);
assertEquals(1, resultList.size());
// assertTrue(resultList.get(0) instanceof DeptVO);
checkResult(vo, resultList.get(0));
}
}
|
// You should implement your task here.
module.exports =
function towelSort(matrix) {
let arr = [];
let arrNumb = [];
if (matrix == undefined) return [];
matrix.forEach((subArr, index) => {
if (index % 2 == 0) {
arr.push(subArr);
} else {
let subArrReverc = subArr.reverse();
arr.push(subArrReverc);
}
});
arr.forEach(subArr => {
if (subArr.length === 0) {
arrNumb.push([]);
}
subArr.forEach(el => {
arrNumb.push(el);
});
});
return arrNumb;
};
|
/**
* Author: <NAME>
* Date: 2/5/22
* File: SavingsAccount.java
* Description: This java file contains the getter and setters for the SavingsAccountRunner.java file
*/
import java.util.*;
public class SavingsAccount {
private int id;
private double balance;
private double annualIntRate;
private double weeklyIntRate;
private double deposit;
private Date accountCreationDate;
public SavingsAccount() {
}
public SavingsAccount(int id, double balance, double annualIntRate) {
this.id = id;
this.balance = balance;
this.annualIntRate = annualIntRate;
}
// getters, accessors
public int getId() {
return id;
}
public double getBalance() {
return balance;
}
public double getAnnualIntRate() {
annualIntRate /= 100;
return annualIntRate;
}
public double getWeeklyIntRate() {
weeklyIntRate = (annualIntRate / 100) / 52;
weeklyIntRate *= balance;
return weeklyIntRate;
}
public double getDeposit() {
balance += deposit;
return balance;
}
public Date getDate() {
// I couldn't figure out how to make it so that the date variable
// wouldn't change every time that the 3 option was selected
Date accountCreationDate = new Date();
this.accountCreationDate = accountCreationDate;
return this.accountCreationDate;
}
// setters, mutators
public void setId(int id) {
this.id = id;
}
public void setBalance(double balance) {
this.balance = balance;
}
public void setAnnualIntRate(double annualIntRate) {
this.annualIntRate = annualIntRate;
}
public void setDeposit(double deposit) {
this.deposit = deposit;
}
}
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from pipeline.contrib.external_plugins.models import GIT, S3
from gcloud.tests.external_plugins.mock import * # noqa
from gcloud.tests.external_plugins.mock_settings import * # noqa
from gcloud.external_plugins.tasks import sync_task
class TestSyncTask(TestCase):
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_SYNC_TASK_GET, MockSyncTaskModel)
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_GIT_ORIGINAL_PACKAGE_SOURCE_ALL,
MagicMock(return_value=[MockWriterAndReader(name='git', id=1, type=GIT)]))
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_S3_ORIGINAL_PACKAGE_SOURCE_ALL,
MagicMock(return_value=[MockWriterAndReader(name='s3', id=1, type=S3)]))
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_FS_ORIGINAL_PACKAGE_SOURCE_ALL, MagicMock(return_val=[]))
def test_sync_task__git_and_s3_normal(self):
with patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_CACHE_PACKAGE_SOURCE_ALL,
MagicMock(return_value=[MockWriterAndReader(name='cache', id=1, type=GIT)])):
self.assertTrue(sync_task(1))
with patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_CACHE_PACKAGE_SOURCE_ALL,
MagicMock(return_value=[MockWriterAndReader(name='cache', id=1, type=GIT, raise_exception=True)])):
self.assertFalse(sync_task(1))
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_SYNC_TASK_GET, MockSyncTaskModel)
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_GIT_ORIGINAL_PACKAGE_SOURCE_ALL,
MagicMock(return_value=[MockWriterAndReader(name='git', id=1, type=GIT)]))
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_S3_ORIGINAL_PACKAGE_SOURCE_ALL,
MagicMock(return_value=[MockWriterAndReader(name='s3', id=1, type=S3, raise_exception=True)]))
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_FS_ORIGINAL_PACKAGE_SOURCE_ALL, MagicMock(return_val=[]))
@patch(GCLOUD_EXTERNAL_PLUGINS_MODELS_GIT_ORIGINAL_PACKAGE_SOURCE_ALL, MagicMock(return_val=[]))
def test_sync_task__git_normal_and_s3_abnormal(self):
self.assertFalse(sync_task(1))
|
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
# Update bazel
install_bazelisk
# Export required variables for running pip.sh
export OS_TYPE="UBUNTU"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.10'
# Setup virtual environment and install dependencies
setup_venv_ubuntu ${TF_PYTHON_VERSION}
export PYTHON_BIN_PATH="$(which ${TF_PYTHON_VERSION})"
# Get the default test targets for bazel.
source tensorflow/tools/ci_build/build_scripts/DEFAULT_TEST_TARGETS.sh
# Export optional variables for running pip.sh
export TF_BUILD_FLAGS="--config=release_cpu_linux"
export TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1"
export TF_TEST_TARGETS="${DEFAULT_BAZEL_TARGETS} -//tensorflow/lite/... "
export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean"
export TF_TEST_FILTER_TAGS='-no_oss,-oss_serial,-no_oss_py310,-v1only'
#export IS_NIGHTLY=0 # Not nightly; uncomment if building from tf repo.
export TF_PROJECT_NAME="tensorflow_cpu"
export TF_PIP_TEST_ROOT="pip_test"
./tensorflow/tools/ci_build/builds/pip_new.sh
|
<reponame>liangklfangl/structor-usage
import expect from "expect";
import modalReducerReducer from "../reducer";
describe("modalReducerReducer", () => {
it("returns the initial state", () => {
expect(modalReducerReducer(undefined, {})).toEqual({ name: "<NAME>" });
});
});
|
import Foundation
class WeatherConstants {
static let sunny = 0
static let cloudy = 1
static let rainy = 2
static let stormy = 3
static let snowy = 4
static func weatherCode(for condition: String) -> Int? {
switch condition.lowercased() {
case "sunny":
return sunny
case "cloudy":
return cloudy
case "rainy":
return rainy
case "stormy":
return stormy
case "snowy":
return snowy
default:
return nil
}
}
} |
import aws, { S3 } from 'aws-sdk';
import { config } from 'dotenv';
config();
aws.config.update({
accessKeyId: process.env.ID,
secretAccessKey: process.env.SECRET,
});
export const s3 = new S3();
export const bucketName: string = process.env.BUCKET_NAME;
|
<reponame>gme/three20
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface UIColor (TTCategory)
- (UIColor*)transformHue:(CGFloat)hd saturation:(CGFloat)sd value:(CGFloat)vd;
/**
* Uses transformHue to create a lighter version of the color.
*/
- (UIColor*)highlight;
/**
* Uses transformHue to create a darker version of the color.
*/
- (UIColor*)shadow;
- (CGFloat)hue;
- (CGFloat)saturation;
- (CGFloat)value;
@end
|
#!/bin/sh
# Make sure our cache is setup
mkdir -p /cache/squid3
mkdir -p /cache/apt-cacher-ng
mkdir -p /cache/logs/squid3
mkdir -p /cache/logs/apt-cacher-ng
mkdir -p /cache/logs/supervisor
mkdir -p /cache/apt-cacher-ng/_import
chown -R squid:squid /cache /squid_config
#[ -e /cache/squid3/swap.state ] || /usr/sbin/squid -f /squid_config/squid.conf -z 2>/dev/null
IFS=","
SQUID_ALLOWED_NETWORKS=
for ALLOWED_NETWORK in ${ALLOWED_NETWORKS}; do
SQUID_ALLOWED_NETWORKS="${SQUID_ALLOWED_NETWORKS}acl localnet src ${ALLOWED_NETWORK}
"
done
SQUID_EXTRA_HTTPS_PORTS=
for EXTRA_HTTPS_PORT in ${EXTRA_HTTPS_PORTS}; do
SQUID_EXTRA_HTTPS_PORTS="acl SSL_ports port ${EXTRA_HTTPS_PORT}
"
done
PROXY_ENABLED=""
if [ "${ENABLE_PROXY}" = "TRUE" ] ; then
PROXY_ENABLED=${PROXY_ENABLED}"http_port 3128\n";
fi
if [ "${ENABLE_ACCEL}" = "TRUE" ] ; then
PROXY_ENABLED=${PROXY_ENABLED}"http_port 3142 accel\n";
fi
if [ "${ENABLE_INTERCEPT}" = "TRUE" ] ; then
PROXY_ENABLED=${PROXY_ENABLED}"http_port 3129 intercept\n";
fi
if [ -z "${PARENT_PROXY}" ] || [ ${PARENT_PROXY} == '""' ] ; then
ACNG_PARENT_PROXY=
SQUID_PARENT_PROXY=
else
ACNG_PARENT_PROXY="Proxy: ${PARENT_PROXY}"
SQUID_PARENT_PROXY=`echo ${PARENT_PROXY} | sed -E \
-e 's/(http(|s)):\/\/((.+)\:(.+)@|)(.+)\:([0-9]+)/cache_peer \6 parent \7 0 default 888\4:\5888 999\2999 name=parent/' \
-e 's/999999//' \
-e 's/999s999/ssl/' \
-e 's/888:888//' \
-e 's/888(.+)888/login=\1/'`
SQUID_PARENT_PROXY="${SQUID_PARENT_PROXY}
cache_peer_access parent deny aptget
cache_peer_access parent deny deburl
cache_peer_access parent deny to_ubuntu_mirrors
cache_peer_access parent allow all
never_direct allow all
"
fi
perl -pe "s|\Q#SQUID_ALLOWED_NETWORKS\E|$SQUID_ALLOWED_NETWORKS|g" /squid_config/squid.tmpl.conf > /squid_config/squid.conf
perl -pe "s|\Q#SQUID_EXTRA_HTTPS_PORTS\E|$SQUID_EXTRA_HTTPS_PORTS|g" -i /squid_config/squid.conf
perl -pe "s|\Q#SQUID_PARENT_PROXY\E|$SQUID_PARENT_PROXY|g" -i /squid_config/squid.conf
perl -pe "s|\Q#PROXY_ENABLED\E|$PROXY_ENABLED|g" -i /squid_config/squid.conf
ln -sf /cache/logs/squid3 /var/log/squid
/usr/sbin/squid -f /squid_config/squid.conf -z
sleep 5
/usr/sbin/squid -N -f /squid_config/squid.conf
|
#!/bin/bash -ex
. ./setup.sh
for testdir in `readlink -f */`
do
cd ${testdir}
APP=`basename ${testdir}`
ARG=""
if [ "$APP" = "nine-pt-stencil-solver" -o "$APP" = "poisson-solver" -o "$APP" = "three-pt-stencil-solver" ]; then
ARG="4"
fi
./${APP} ${ARG}
done
|
import tensorflow as tf
# Loading the dataset
dataset = tf.data.Dataset.from_tensor_slices((features, labels))
train_dataset = dataset.take(70).batch(32)
val_dataset = dataset.skip(70).take(15).batch(32)
# Creating the model
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(64, activation='relu', input_shape=(6,)))
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(1))
# Compiling the model
model.compile(optimizer='adam', loss='mse', metrics=['mae'])
# Training the model
model.fit(train_dataset, epochs=20, validation_data=val_dataset) |
#!/bin/sh
set -e
SERENITY_ROOT=../../
mkdir -p $SERENITY_ROOT/Root/usr/include/sys/
mkdir -p $SERENITY_ROOT/Root/usr/lib/
cp ./*.h $SERENITY_ROOT/Root/usr/include/
cp libpthread.a $SERENITY_ROOT/Root/usr/lib/
|
#!/bin/sh
rm -rf tmp
mkdir tmp
cd tmp
##########################################################
## install oatpp
MODULE_NAME="oatpp"
git clone --depth=1 https://github.com/oatpp/$MODULE_NAME
cd $MODULE_NAME
mkdir build
cd build
cmake ..
make install
cd ../../
##########################################################
## install oatpp-websocket
MODULE_NAME="oatpp-websocket"
git clone --depth=1 https://github.com/oatpp/$MODULE_NAME
cd $MODULE_NAME
mkdir build
cd build
cmake ..
make install
cd ../../
##########################################################
cd ../
rm -rf tmp
|
import express from 'express';
import ProducerController from '../controllers/controllers.producer';
const router = express.Router();
router.post('/produce', ProducerController.produce);
router.get('/status/:id', ProducerController.status);
export default router;
|
#!/bin/bash
export ExploitEducation=$( python3 -c 'print(("A" * 64) + "\x0a\x09\x0a\x0d")' )
exec /opt/phoenix/amd64/stack-two
|
#!/usr/bin/env bash
CDIR="$(cd "$(dirname "$0")" && pwd)"
build_dir=$CDIR/build
while getopts A:K:q option
do
case "${option}"
in
q) QUIET=1;;
A) ARCH=${OPTARG};;
K) KERNEL=${OPTARG};;
esac
done
rm -rf $build_dir
mkdir -p $build_dir
for f in pluginrc.zsh
do
cp $CDIR/$f $build_dir/
done
cd $build_dir
[ $QUIET ] && arg_q='-q' || arg_q=''
[ $QUIET ] && arg_s='-s' || arg_s=''
[ $QUIET ] && arg_progress='' || arg_progress='--show-progress'
ohmyzsh_home=$build_dir/ohmyzsh
if [ -x "$(command -v git)" ]; then
git clone $arg_q --depth 1 https://github.com/robbyrussell/oh-my-zsh.git $ohmyzsh_home
git clone $arg_q https://github.com/zsh-users/zsh-history-substring-search $ohmyzsh_home/plugins/zsh-history-substring-search
git clone $arg_q https://github.com/zsh-users/zsh-autosuggestions $ohmyzsh_home/plugins/zsh-autosuggestions
git clone $arg_q https://github.com/zsh-users/zsh-syntax-highlighting.git $ohmyzsh_home/plugins/zsh-syntax-highlighting
rm -rf $ohmyzsh_home/themes
git clone $arg_q https://github.com/denysdovhan/spaceship-prompt.git $ohmyzsh_home/themes
else
echo Install git
exit 1
fi
|
#!/bin/sh
chmod a+x "./Test App.app/Contents/MacOS/Test App"
echo "Made Test App.app executable"
|
<reponame>Nodlik/SWRating<filename>src/Config.ts
module SW {
export interface ConfigObject {
pointNumber: number;
starSize: number;
stretch: boolean;
isLocked: boolean;
starType: string;
}
export class Config {
private config: ConfigObject = {
pointNumber: 5,
starSize: 20,
stretch: false,
isLocked: false,
starType: 'svg'
};
constructor(config: Object = {}) {
this.set(config);
}
get(): ConfigObject {
return this.config;
}
public set(newConfig: Object = {}) {
for (var k in newConfig) {
if (newConfig.hasOwnProperty(k)) {
if (k in this.config) {
this.config[k] = newConfig[k];
}
}
}
}
}
} |
// https://github.com/michael-ciniawsky/postcss-load-config
module.exports = {
"plugins": {
postcss: [require('postcss-cssnext')()]
}
}
|
import { Component, ContentChild } from '@angular/core';
import { CardTitleDirective } from './card-title.directive';
import { CardSubtitleDirective } from './card-subtitle.directive';
import { CardContentDirective } from './card-content.directive';
import { CardFooterDirective } from './card-footer.directive';
@Component({
selector: 'sfeir-card',
templateUrl: 'card.component.html',
styleUrls: ['card.component.css']
})
export class CardComponent {
@ContentChild(CardTitleDirective) cardTitle: CardTitleDirective;
@ContentChild(CardSubtitleDirective) cardSubtitle: CardSubtitleDirective;
@ContentChild(CardContentDirective) cardContent: CardContentDirective;
@ContentChild(CardFooterDirective) cardFooter: CardFooterDirective;
constructor() {}
}
|
// @flow
import type { GuiContact } from '../../types'
export const loadContactsStart = () => ({
type: 'CONTACTS/LOAD_CONTACTS_START'
})
export const loadContactsSuccess = (contacts: Array<GuiContact>) => ({
type: 'CONTACTS/LOAD_CONTACTS_SUCCESS',
data: { contacts }
})
|
<filename>test/unit/getFormat.test.ts
import getFormat from "../../src/helpers/getFormat";
it("should parse png format", () => {
expect(getFormat("image.png")).toBe("png");
expect(getFormat("image.PNG")).toBe("png");
});
it("should parse jpeg format", () => {
expect(getFormat("image.JPG")).toBe("jpeg");
expect(getFormat("image.JPEG")).toBe("jpeg");
expect(getFormat("image.jpg")).toBe("jpeg");
expect(getFormat("image.jpeg")).toBe("jpeg");
});
it("should parse webp format", () => {
expect(getFormat("image.webp")).toBe("webp");
expect(getFormat("image.WEBP")).toBe("webp");
});
it("should parse avif format", () => {
expect(getFormat("image.avif")).toBe("avif");
expect(getFormat("image.AVIF")).toBe("avif");
});
it("should parse webp format", () => {
expect(getFormat("image.tiff")).toBe("tiff");
expect(getFormat("image.tif")).toBe("tiff");
expect(getFormat("image.TIFF")).toBe("tiff");
expect(getFormat("image.TIF")).toBe("tiff");
});
it("should throw when image is not recognized", () => {
expect(() => getFormat("image.app")).toThrow();
});
|
#!/bin/sh
#
# $NetBSD: osrelease.sh,v 1.4.2.3 1997/12/30 00:14:21 perry Exp $
#
# Copyright (c) 1997 The NetBSD Foundation, Inc.
# All rights reserved.
#
# This code is derived from software contributed to The NetBSD Foundation
# by Luke Mewburn.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. All advertising materials mentioning features or use of this software
# must display the following acknowledgement:
# This product includes software developed by the NetBSD
# Foundation, Inc. and its contributors.
# 4. Neither the name of The NetBSD Foundation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Release number to use
release=1.3
case $1 in
-s)
echo $release | sed -e 's,\.,,g'
;;
*)
echo $release
;;
esac
|
// Copyright ©2016 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package stat_test
import (
"fmt"
"log"
"github.com/ArkaGPL/gonum/floats"
"github.com/ArkaGPL/gonum/mat"
"github.com/ArkaGPL/gonum/stat"
)
// symView is a helper for getting a View of a SymDense.
type symView struct {
sym *mat.SymDense
i, j, r, c int
}
func (s symView) Dims() (r, c int) { return s.r, s.c }
func (s symView) At(i, j int) float64 {
if i < 0 || s.r <= i {
panic("i out of bounds")
}
if j < 0 || s.c <= j {
panic("j out of bounds")
}
return s.sym.At(s.i+i, s.j+j)
}
func (s symView) T() mat.Matrix { return mat.Transpose{Matrix: s} }
func ExampleCC() {
// This example is directly analogous to Example 3.5 on page 87 of
// Koch, Inge. Analysis of multivariate and high-dimensional data.
// Vol. 32. Cambridge University Press, 2013. ISBN: 9780521887939
// bostonData is the Boston Housing Data of Harrison and Rubinfeld (1978)
n, _ := bostonData.Dims()
var xd, yd = 7, 4
// The variables (columns) of bostonData can be partitioned into two sets:
// those that deal with environmental/social variables (xdata), and those
// that contain information regarding the individual (ydata). Because the
// variables can be naturally partitioned in this way, these data are
// appropriate for canonical correlation analysis. The columns (variables)
// of xdata are, in order:
// per capita crime rate by town,
// proportion of non-retail business acres per town,
// nitric oxide concentration (parts per 10 million),
// weighted distances to Boston employment centres,
// index of accessibility to radial highways,
// pupil-teacher ratio by town, and
// proportion of blacks by town.
xdata := bostonData.Slice(0, n, 0, xd)
// The columns (variables) of ydata are, in order:
// average number of rooms per dwelling,
// proportion of owner-occupied units built prior to 1940,
// full-value property-tax rate per $10000, and
// median value of owner-occupied homes in $1000s.
ydata := bostonData.Slice(0, n, xd, xd+yd)
// For comparison, calculate the correlation matrix for the original data.
var cor mat.SymDense
stat.CorrelationMatrix(&cor, bostonData, nil)
// Extract just those correlations that are between xdata and ydata.
var corRaw = symView{sym: &cor, i: 0, j: xd, r: xd, c: yd}
// Note that the strongest correlation between individual variables is 0.91
// between the 5th variable of xdata (index of accessibility to radial
// highways) and the 3rd variable of ydata (full-value property-tax rate per
// $10000).
fmt.Printf("corRaw = %.4f", mat.Formatted(corRaw, mat.Prefix(" ")))
// Calculate the canonical correlations.
var cc stat.CC
err := cc.CanonicalCorrelations(xdata, ydata, nil)
if err != nil {
log.Fatal(err)
}
// Unpack cc.
var pVecs, qVecs, phiVs, psiVs mat.Dense
ccors := cc.CorrsTo(nil)
cc.LeftTo(&pVecs, true)
cc.RightTo(&qVecs, true)
cc.LeftTo(&phiVs, false)
cc.RightTo(&psiVs, false)
// Canonical Correlation Matrix, or the correlations between the sphered
// data.
var corSph mat.Dense
corSph.CloneFrom(&pVecs)
col := make([]float64, xd)
for j := 0; j < yd; j++ {
mat.Col(col, j, &corSph)
floats.Scale(ccors[j], col)
corSph.SetCol(j, col)
}
corSph.Product(&corSph, qVecs.T())
fmt.Printf("\n\ncorSph = %.4f", mat.Formatted(&corSph, mat.Prefix(" ")))
// Canonical Correlations. Note that the first canonical correlation is
// 0.95, stronger than the greatest correlation in the original data, and
// much stronger than the greatest correlation in the sphered data.
fmt.Printf("\n\nccors = %.4f", ccors)
// Left and right eigenvectors of the canonical correlation matrix.
fmt.Printf("\n\npVecs = %.4f", mat.Formatted(&pVecs, mat.Prefix(" ")))
fmt.Printf("\n\nqVecs = %.4f", mat.Formatted(&qVecs, mat.Prefix(" ")))
// Canonical Correlation Transforms. These can be useful as they represent
// the canonical variables as linear combinations of the original variables.
fmt.Printf("\n\nphiVs = %.4f", mat.Formatted(&phiVs, mat.Prefix(" ")))
fmt.Printf("\n\npsiVs = %.4f", mat.Formatted(&psiVs, mat.Prefix(" ")))
// Output:
// corRaw = ⎡-0.2192 0.3527 0.5828 -0.3883⎤
// ⎢-0.3917 0.6448 0.7208 -0.4837⎥
// ⎢-0.3022 0.7315 0.6680 -0.4273⎥
// ⎢ 0.2052 -0.7479 -0.5344 0.2499⎥
// ⎢-0.2098 0.4560 0.9102 -0.3816⎥
// ⎢-0.3555 0.2615 0.4609 -0.5078⎥
// ⎣ 0.1281 -0.2735 -0.4418 0.3335⎦
//
// corSph = ⎡ 0.0118 0.0525 0.2300 -0.1363⎤
// ⎢-0.1810 0.3213 0.3814 -0.1412⎥
// ⎢ 0.0166 0.2241 0.0104 -0.2235⎥
// ⎢ 0.0346 -0.5481 -0.0034 -0.1994⎥
// ⎢ 0.0303 -0.0956 0.7152 0.2039⎥
// ⎢-0.0298 -0.0022 0.0739 -0.3703⎥
// ⎣-0.1226 -0.0746 -0.3899 0.1541⎦
//
// ccors = [0.9451 0.6787 0.5714 0.2010]
//
// pVecs = ⎡-0.2574 0.0158 0.2122 -0.0946⎤
// ⎢-0.4837 0.3837 0.1474 0.6597⎥
// ⎢-0.0801 0.3494 0.3287 -0.2862⎥
// ⎢ 0.1278 -0.7337 0.4851 0.2248⎥
// ⎢-0.6969 -0.4342 -0.3603 0.0291⎥
// ⎢-0.0991 0.0503 0.6384 0.1022⎥
// ⎣ 0.4260 0.0323 -0.2290 0.6419⎦
//
// qVecs = ⎡ 0.0182 -0.1583 -0.0067 -0.9872⎤
// ⎢-0.2348 0.9483 -0.1462 -0.1554⎥
// ⎢-0.9701 -0.2406 -0.0252 0.0209⎥
// ⎣ 0.0593 -0.1330 -0.9889 0.0291⎦
//
// phiVs = ⎡-0.0027 0.0093 0.0490 -0.0155⎤
// ⎢-0.0429 -0.0242 0.0361 0.1839⎥
// ⎢-1.2248 5.6031 5.8094 -4.7927⎥
// ⎢-0.0044 -0.3424 0.4470 0.1150⎥
// ⎢-0.0742 -0.1193 -0.1116 0.0022⎥
// ⎢-0.0233 0.1046 0.3853 -0.0161⎥
// ⎣ 0.0001 0.0005 -0.0030 0.0082⎦
//
// psiVs = ⎡ 0.0302 -0.3002 0.0878 -1.9583⎤
// ⎢-0.0065 0.0392 -0.0118 -0.0061⎥
// ⎢-0.0052 -0.0046 -0.0023 0.0008⎥
// ⎣ 0.0020 0.0037 -0.1293 0.1038⎦
}
|
#!/bin/bash
source /opt/intel/oneapi/setvars.sh > /dev/null 2>&1
/bin/echo "##" $(whoami) is compiling DPCPP_Essentials Module10 -- DPCPP Graphs and dependenices - 5 of 10 Linear_inorder_queues.cpp
dpcpp lab/Linear_inorder_queues.cpp -o bin/Linear_inorder_queues
if [ $? -eq 0 ]; then bin/Linear_inorder_queues; fi
|
<filename>src/main/java/net/projektfriedhof/nanorm/NanormConnectionFactory.java
package net.projektfriedhof.nanorm;
import org.apache.tomcat.jdbc.pool.DataSource;
import org.apache.tomcat.jdbc.pool.PoolProperties;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.Statement;
/**
* Created by falko on 3/12/15.
*/
public class NanormConnectionFactory {
static DataSource ds;
static boolean initialized = false;
private NanormConnectionFactory(){
}
public static void init(){
PoolProperties pp = new PoolProperties();
ds = new DataSource(pp);
ds.setCommitOnReturn(false);
initialized = true;
}
public static Connection getConnection(){
if(!initialized){
throw new RuntimeException("Factory not initialized!");
}
try{
Connection con = ds.getConnection();
con.setAutoCommit(false);
return con;
} catch (Exception ex){
throw new RuntimeException("Error creating Exception", ex);
}
}
}
|
import numpy as np
import scipy.signal as signal
def softplus2(x, a=1):
return np.log(1+np.exp((x+1)*a))/a -1
def envelope(data):
siga = signal.hilbert(data, axis=0)
return np.abs(siga)
def softclip(x, a=1, b=1):
"""
:param x: Data
:param a: Knee sharpness
:param b: Slope of linear region
:return:
"""
x = x*b
x_0 = np.log(1+np.exp((x+1)*a))/a -1# softplus(x, a)
return -np.log(1+np.exp((-x_0+1)*a))/a +1
def norm_softclip(data, sigma=8, zeta=10, global_pow=None, norm_by_chan=False):
"""
Normalize and soft-clip a signal to the range (-1,1).
:param data: (N, d) array
:param sigma: range of std dev the signal is normed to 1, e.g. sigma=6 means the +/-6-sigma line is set to +/-1
:param zeta: soft-knee coefficient of soft clip. Higher zeta means harder clip, but less distortion
:param global_pow: (Default: None) Total power to normalize signal by. If None, calculate power using np.std
:param norm_by_chan: Normalize each channel individually by std
:return:
"""
zdata = np.array(data - np.mean(data, axis=0))
if global_pow is None:
if norm_by_chan:
std = np.std(zdata, axis=0)
else:
std = np.std(zdata)
else:
std = global_pow
zdata /= (sigma * std)
return softclip(zdata, zeta) |
#!/usr/bin/env bash
source globeConfig.sh
source AndroidConfig.sh
source iOSConfig.sh
source MinGWConfig.sh
source maccatalystConfig.sh
function cross_compile_set_platform_Android(){
android_init_env "$1"
}
function cross_compile_set_platform_iOS(){
ios_init_env "$1"
}
function cross_compile_set_platform_win32(){
MinGW_init_env "$1"
}
function cross_compile_set_platform_maccatalyst() {
maccatalyst_init_env "$1"
} |
<gh_stars>1-10
package com.ibm.socialcrm.notesintegration.activitystream;
public class MicroblogPerspectiveFactoryAdvisor extends AbstractActivityStreamPerspectiveFactoryAdvisor {
@Override
public String getViewPartId() {
return "com.ibm.socialcrm.notesintegration.activitystream.MicroblogView"; //$NON-NLS-1$
}
}
|
<reponame>MehfoozurRehman/Aida-Pro-Website
import { auth } from "./firebase";
export function signup(email, password) {
return auth().createUserWithEmailAndPassword(email, password);
}
export function signin(email, password) {
return auth().signInWithEmailAndPassword(email, password);
}
|
package com.ramusthastudio.myloader;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.provider.ContactsContract;
import android.support.v4.widget.CursorAdapter;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import de.hdodenhof.circleimageview.CircleImageView;
public class ContactAdapter extends CursorAdapter {
public ContactAdapter(Context context, Cursor c, boolean autoRequery) {
super(context, c, autoRequery);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return LayoutInflater.from(context).inflate(R.layout.item_row_contact, parent, false);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
if (cursor != null) {
TextView nameTv = view.findViewById(R.id.tv_item_name);
CircleImageView userIv = view.findViewById(R.id.img_item_user);
userIv.setImageResource(R.drawable.ic_person_pin);
nameTv.setText(cursor.getString(cursor.getColumnIndexOrThrow(ContactsContract.Contacts.DISPLAY_NAME)));
if (cursor.getString(cursor.getColumnIndexOrThrow(ContactsContract.Contacts.PHOTO_URI)) != null) {
userIv.setImageURI(Uri.parse(cursor.getString(cursor.getColumnIndexOrThrow(ContactsContract.Contacts.PHOTO_URI))));
} else {
userIv.setImageResource(R.drawable.ic_person_pin);
}
}
}
}
|
import recreateSheet from "./recreateSheet";
export default async function chartExpenses(expenses: { category: string; cost: number }[]) {
await Excel.run(async context => {
//const book = context.workbook;
const sheet = recreateSheet(context, "budget");
sheet.activate();
// write range with expense data
const expenseRowHeader = ["Expense Category", "Cost"];
const expenseRows = [expenseRowHeader, ...expenses.map(({ category, cost }) => [category, cost])];
const expenseRange = sheet.getRangeByIndexes(0, 0, expenseRows.length, expenseRowHeader.length);
expenseRange.values = expenseRows;
expenseRange.format.autofitColumns();
const table = sheet.tables.add(expenseRange, true);
//table.sort.apply
//table.showFilterButton = true;
const sortFields = [
{
key: 1,
ascending: false
}
];
table.sort.apply(sortFields);
const chart = sheet.charts.add(Excel.ChartType.treemap, expenseRange, Excel.ChartSeriesBy.columns);
chart.name = "Controllable Expenses"; // can't control taxes
//chart.seriesNameLevel
// /? can't set a charts title? - oh its a buried property
chart.title.text = "Controllable Expenses";
// Size
chart.width = 650;
chart.height = 400;
const series = chart.series.getItemAt(0);
// Documentation does not specify valid value range
//series.doughnutHoleSize = 60;
const seriesDataLabels = series.dataLabels;
//seriesDataLabels.separator = "\n";
seriesDataLabels.showValue = true;
//seriesDataLabels.
seriesDataLabels.showCategoryName = true;
//seriesDataLabels.showPercentage = true;
//seriesDataLabels.separator = "\n"
const legend = chart.legend;
//legend.position =Excel.ChartLegendPosition.invalid
legend.visible = false;
//legend.position = Excel.ChartLegendPosition.left;
//legend.format.font.size = 14;
const dataLabels = chart.dataLabels;
//dataLabels.separator = " "; //weird interaction with hierarchy
dataLabels.showCategoryName = true;
dataLabels.showValue = true;
// dataLabels.separator = "\n";
//dataLabels.showSeriesName = true; //"cost"
// dataLabels.showBubbleSize = true; // ?
//dataLabels.showPercentage = true;
const dataLabelsFont = dataLabels.format.font;
dataLabelsFont.color = "white";
dataLabelsFont.size = 16;
});
}
|
exports.min = function min (array) {
if (array==undefined||array.length==0){
return 0;
} else {
let a=Math.min(...array);
return a;}
}
exports.max = function max (array) {
if (array==undefined||array.length==0){
return 0;
} else {
let b=Math.max(...array);
return b;}
}
exports.avg = function avg (array) {
if (array==undefined||array.length==0){
return 0;
} else {
let c=array.reduce((a, b) => (a + b)) / array.length;
return c;}
}
|
def rock_paper_scissors(player1_choice, player2_choice):
choices = {"rock", "paper", "scissors"}
player1_choice = player1_choice.lower()
player2_choice = player2_choice.lower()
if player1_choice == player2_choice:
return "It's a tie"
elif (player1_choice, player2_choice) in {("rock", "scissors"), ("scissors", "paper"), ("paper", "rock")}:
return "Player 1 wins"
else:
return "Player 2 wins" |
import pickle
def get_ordinal(number):
words = pickle.load(open("../data/constants/word_ordinals.p", "rb"))
if 1 <= number <= 25:
return list(words)[number-1]
else:
return "Out of range" |
class Result<T> {
readonly success: boolean;
message: string;
readonly data: T;
constructor(success: boolean, message: string, data: T) {
this.success = success;
this.message = message;
this.data = data;
}
isSuccess(): boolean {
return this.success;
}
} |
<gh_stars>0
import parseCosts
import filter
def run():
print('****************')
print('****************')
print('****************')
print('****************')
print('PROGRAM START***')
print('****************')
print('****************')
print('****************')
costsSum = 0
filtered = filter.filter(parseCosts.parse(), 19)
for item in filtered:
print(item)
costsSum += float(item['spot'])
print(costsSum*6*0.001)
print('////////////////')
print('////////////////')
print('////////////////')
print('////////////////')
print('PROGRAM END/////')
print('////////////////')
print('////////////////')
print('////////////////')
run()
|
import re
def extract_warranty_info(license_text):
warranty_info = re.search(r'WITHOUT ANY WARRANTY;.*', license_text, re.IGNORECASE)
if warranty_info:
return warranty_info.group(0)
else:
return "No warranty information found." |
<filename>LoginGoogle/app/src/main/java/com/example/curseudemy/MainActivity.java<gh_stars>0
package com.example.curseudemy;
import androidx.appcompat.app.AppCompatActivity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import com.google.android.gms.auth.api.signin.GoogleSignIn;
import com.google.android.gms.auth.api.signin.GoogleSignInAccount;
import com.google.android.gms.auth.api.signin.GoogleSignInClient;
import com.google.android.gms.auth.api.signin.GoogleSignInOptions;
import com.google.android.gms.common.SignInButton;
import com.google.android.gms.common.api.ApiException;
import com.google.android.gms.tasks.Task;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "AndroidClarified";
private SignInButton googleSignInButton;
private GoogleSignInClient googleSignInClient;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
onStart();
googleSignInButton = findViewById(R.id.sign_in_button);
googleSignInButton = findViewById(R.id.sign_in_button);
GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN)
.requestEmail()
.build();
googleSignInClient = GoogleSignIn.getClient(this, gso);
googleSignInButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent signInIntent = googleSignInClient.getSignInIntent();
startActivityForResult(signInIntent, 101);
}
});
}
@Override
public void onStart() {
super.onStart();
GoogleSignInAccount alreadyloggedAccount = GoogleSignIn.getLastSignedInAccount(this);
if (alreadyloggedAccount != null) {
Toast.makeText(this, "Você já está logado", Toast.LENGTH_SHORT).show();
onLoggedIn(alreadyloggedAccount);
} else {
Log.d(TAG, "Not logged in");
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_OK)
switch (requestCode) {
case 101:
try {
// The Task returned from this call is always completed, no need to attach
// a listener.
Task<GoogleSignInAccount> task = GoogleSignIn.getSignedInAccountFromIntent(data);
GoogleSignInAccount account = task.getResult(ApiException.class);
onLoggedIn(account);
} catch (ApiException e) {
// The ApiException status code indicates the detailed failure reason.
Toast.makeText(getApplicationContext(),"Deu merda", Toast.LENGTH_SHORT).show();
}
break;
}
}
private void onLoggedIn(GoogleSignInAccount googleSignInAccount) {
Intent intent = new Intent(this, PerfilActivity.class);
intent.putExtra(PerfilActivity.GOOGLE_ACCOUNT, googleSignInAccount);
startActivity(intent);
finish();
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-1818-1
#
# Security announcement date: 2013-05-07 00:00:00 UTC
# Script generation date: 2017-01-01 21:03:16 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - libgl1-mesa-dri:8.0.4-0ubuntu0.5
# - libgl1-mesa-swx11:8.0.4-0ubuntu0.5
# - libxatracker1:8.0.4-0ubuntu0.5
# - libgbm1:8.0.4-0ubuntu0.5
# - libegl1-mesa:8.0.4-0ubuntu0.5
# - libopenvg1-mesa:8.0.4-0ubuntu0.5
# - libgles1-mesa:8.0.4-0ubuntu0.5
# - libgles2-mesa:8.0.4-0ubuntu0.5
# - libglapi-mesa:8.0.4-0ubuntu0.5
# - libgl1-mesa-glx:8.0.4-0ubuntu0.5
# - libosmesa6:8.0.4-0ubuntu0.5
# - libglu1-mesa:8.0.4-0ubuntu0.5
# - libglu1-mesa:8.0.4-0ubuntu0.5
# - libgl1-mesa-glx:8.0.4-0ubuntu0.5
# - libosmesa6:8.0.4-0ubuntu0.5
# - libglapi-mesa:8.0.4-0ubuntu0.5
# - libopenvg1-mesa:8.0.4-0ubuntu0.5
# - libgl1-mesa-swx11:8.0.4-0ubuntu0.5
# - libgles2-mesa:8.0.4-0ubuntu0.5
# - libegl1-mesa:8.0.4-0ubuntu0.5
# - libxatracker1:8.0.4-0ubuntu0.5
# - libgles1-mesa:8.0.4-0ubuntu0.5
# - libgbm1:8.0.4-0ubuntu0.5
#
# Last versions recommanded by security team:
# - libgl1-mesa-dri:8.0.4-0ubuntu0.6
# - libgl1-mesa-swx11:8.0.4-0ubuntu0.6
# - libxatracker1:8.0.4-0ubuntu0.6
# - libgbm1:8.0.4-0ubuntu0.6
# - libegl1-mesa:8.0.4-0ubuntu0.6
# - libopenvg1-mesa:8.0.4-0ubuntu0.6
# - libgles1-mesa:8.0.4-0ubuntu0.6
# - libgles2-mesa:8.0.4-0ubuntu0.6
# - libglapi-mesa:8.0.4-0ubuntu0.6
# - libgl1-mesa-glx:8.0.4-0ubuntu0.6
# - libosmesa6:8.0.4-0ubuntu0.6
# - libglu1-mesa:8.0.4-0ubuntu0.6
# - libglu1-mesa:8.0.4-0ubuntu0.6
# - libgl1-mesa-glx:8.0.4-0ubuntu0.6
# - libosmesa6:8.0.4-0ubuntu0.6
# - libglapi-mesa:8.0.4-0ubuntu0.6
# - libopenvg1-mesa:8.0.4-0ubuntu0.6
# - libgl1-mesa-swx11:8.0.4-0ubuntu0.6
# - libgles2-mesa:8.0.4-0ubuntu0.6
# - libegl1-mesa:8.0.4-0ubuntu0.6
# - libxatracker1:8.0.4-0ubuntu0.6
# - libgles1-mesa:8.0.4-0ubuntu0.6
# - libgbm1:8.0.4-0ubuntu0.6
#
# CVE List:
# - CVE-2012-5129
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade libgl1-mesa-dri=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgl1-mesa-swx11=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libxatracker1=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgbm1=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libegl1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libopenvg1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgles1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgles2-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libglapi-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgl1-mesa-glx=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libosmesa6=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libglu1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libglu1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgl1-mesa-glx=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libosmesa6=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libglapi-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libopenvg1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgl1-mesa-swx11=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgles2-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libegl1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libxatracker1=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgles1-mesa=8.0.4-0ubuntu0.6 -y
sudo apt-get install --only-upgrade libgbm1=8.0.4-0ubuntu0.6 -y
|
<filename>src/main/java/com/alipay/api/domain/BusinessHoursInfo.java<gh_stars>0
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 门店营业时间
*
* @author auto create
* @since 1.0, 2021-07-08 19:46:00
*/
public class BusinessHoursInfo extends AlipayObject {
private static final long serialVersionUID = 1734773433675659329L;
/**
* 营业日:周一、周二、周三、周四、周五、周六、周日
*/
@ApiListField("business_date")
@ApiField("string")
private List<String> businessDate;
/**
* 结束营业时间
*/
@ApiField("end_time")
private String endTime;
/**
* 其他描述
*/
@ApiField("extra_desc")
private String extraDesc;
/**
* 开始营业时间
*/
@ApiField("start_time")
private String startTime;
public List<String> getBusinessDate() {
return this.businessDate;
}
public void setBusinessDate(List<String> businessDate) {
this.businessDate = businessDate;
}
public String getEndTime() {
return this.endTime;
}
public void setEndTime(String endTime) {
this.endTime = endTime;
}
public String getExtraDesc() {
return this.extraDesc;
}
public void setExtraDesc(String extraDesc) {
this.extraDesc = extraDesc;
}
public String getStartTime() {
return this.startTime;
}
public void setStartTime(String startTime) {
this.startTime = startTime;
}
}
|
<filename>databrowse/plugins/db_specimen_database/db_specimen_database.py
#!/usr/bin/env python
###############################################################################
## Databrowse: An Extensible Data Management Platform ##
## Copyright (C) 2012-2016 Iowa State University Research Foundation, Inc. ##
## All rights reserved. ##
## ##
## Redistribution and use in source and binary forms, with or without ##
## modification, are permitted provided that the following conditions are ##
## met: ##
## 1. Redistributions of source code must retain the above copyright ##
## notice, this list of conditions and the following disclaimer. ##
## 2. Redistributions in binary form must reproduce the above copyright ##
## notice, this list of conditions and the following disclaimer in the ##
## documentation and/or other materials provided with the distribution. ##
## 3. Neither the name of the copyright holder nor the names of its ##
## contributors may be used to endorse or promote products derived from ##
## this software without specific prior written permission. ##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ##
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED ##
## TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A ##
## PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER ##
## OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ##
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ##
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR ##
## PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF ##
## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING ##
## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ##
## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ##
## ##
## This material is based on work supported by the Air Force Research ##
## Laboratory under Contract #FA8650-10-D-5210, Task Order #023 and ##
## performed at Iowa State University. ##
## ##
## DISTRIBUTION A. Approved for public release: distribution unlimited; ##
## 19 Aug 2016; 88ABW-2016-4051. ##
###############################################################################
""" plugins/renderers/db_specimen_database.py - Basic Output for Any Folder """
import databrowse.plugins.db_directory.db_directory as db_directory_module
from lxml import etree
import os
from errno import EEXIST
import databrowse.support.specimen_support as ss
try:
import databrowse.plugins.db_mercurial_repository.db_mercurial_repository as hgmodule
hgrepo = hgmodule.db_mercurial_repository
hgavailable = True
except:
hgavailable = False
class db_specimen_database(db_directory_module.db_directory):
""" Image Directory Renderer """
_default_content_mode = "title"
_default_style_mode = "specimen_list"
_default_recursion_depth = 1
def __init__(self, relpath, fullpath, web_support, handler_support, caller, handlers, content_mode=_default_content_mode, style_mode=_default_style_mode, recursion_depth=_default_recursion_depth):
if caller == "databrowse":
self._namespace_uri = "http://thermal.cnde.iastate.edu/databrowse/specimendb"
self._namespace_local = "specimendb"
else:
self._namespace_uri = "http://thermal.cnde.iastate.edu/databrowse/dir"
self._namespace_local = "dir"
self._disable_load_style = True
if style_mode not in ["add_specimen", "add_specimen_group"]:
tmpref = self.getDirectoryList
self.getDirectoryList = self.getSpecimenDatabaseDirectoryList
super(db_specimen_database, self).__init__(relpath, fullpath, web_support, handler_support, caller, handlers, content_mode, style_mode)
self.getDirectoryList = tmpref
if hgavailable:
uncommitted = hgrepo.uncommittedlist(fullpath)
if len(uncommitted) > 0:
self._xml.set('uncommitted', str(len(uncommitted)))
else:
super(db_directory_module.db_directory, self).__init__(relpath, fullpath, web_support, handler_support, caller, handlers, content_mode, style_mode)
pass
def getSpecimenDatabaseDirectoryList(self, fullpath, sort=None, order="asc"):
""" Build a Sorted List of Files with Appropriate Files Removed """
#print "getDirectoryList being called"
reallist = os.listdir(fullpath)
returnlist = [n for n in reallist if n.endswith('.sdb') or n.endswith('.sdg')]
exec("returnlist.sort(%s%s)" % ("reverse=True" if order == "desc" else "reverse=False", ",key=%s" % sort if sort is not None else ",key=str.lower"))
return returnlist
def getContent(self):
if self._style_mode not in ["add_specimen", "add_specimen_group"]:
return super(db_specimen_database, self).getContent()
else:
if "ajax" in self._web_support.req.form and "save" in self._web_support.req.form:
if "file" in self._web_support.req.form and self._style_mode == 'add_specimen':
filestring = self._web_support.req.form["file"].value
xml = etree.XML(filestring)
specimentag = xml.xpath("/specimen:specimen/specimen:specimenid/.", namespaces={"specimen": "http://thermal.cnde.iastate.edu/specimen"})
dirtags = xml.xpath("/specimen:specimen/specimen:reldests/specimen:reldest", namespaces={"specimen":"http://thermal.cnde.iastate.edu/specimen"})
specimenid = specimentag[0].text
fullfilename = os.path.join(self._fullpath, specimenid + ".sdb")
# Let's check on the directory and make sure its writable and it exists
if not os.access(os.path.dirname(fullfilename), os.W_OK):
self._web_support.req.output = "Error Saving File: Save Directory Not Writable " + os.path.dirname(fullfilename)
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
else:
#Let's check on the file and make sure its writable and doesn't exist
if os.path.exists(fullfilename):
self._web_support.req.output = "Error Saving File: Specimen " + specimenid + " already exists"
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
else:
try:
for dirtag in dirtags:
newdirname = None
if dirtag.get('{http://www.w3.org/1999/xlink}href') is not None:
newdirname = dirtag.get('{http://www.w3.org/1999/xlink}href')
else:
newdirname = dirtag.text
if newdirname is not None:
os.makedirs(os.path.join(self._fullpath, newdirname))
except OSError as err:
if err.errno == EEXIST: # Handle the Race Condition
pass
else:
self._web_support.req.output = "Error Creating Files Directory: " + str(err)
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
f = open(fullfilename, "wb")
f.write(filestring)
f.close
self._web_support.req.output = "File Saved Successfully"
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
pass
elif "file" in self._web_support.req.form and self._style_mode == 'add_specimen_group':
filestring = self._web_support.req.form["file"].value
xml = etree.XML(filestring)
specimentag = xml.xpath("/specimen:specimengroup/specimen:groupid/.", namespaces={"specimen": "http://thermal.cnde.iastate.edu/specimen"})
specimenid = specimentag[0].text
fullfilename = os.path.join(self._fullpath, specimenid + ".sdg")
# Let's check on the directory and make sure its writable and it exists
if not os.access(os.path.dirname(fullfilename), os.W_OK):
self._web_support.req.output = "Error Saving File: Save Directory Not Writable " + os.path.dirname(fullfilename)
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
else:
#Let's check on the file and make sure its writable and doesn't exist
if os.path.exists(fullfilename):
self._web_support.req.output = "Error Saving File: Specimen Group " + specimenid + " already exists"
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
else:
try:
os.makedirs(os.path.join(self._fullpath, specimenid + "_files"))
except OSError as err:
if err.errno == EEXIST: # Handle the Race Condition
pass
else:
self._web_support.req.output = "Error Creating Files Directory: " + str(err)
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
f = open(fullfilename, "wb")
f.write(filestring)
f.close
self._web_support.req.output = "File Saved Successfully"
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
pass
else:
self._web_support.req.output = "Error Saving File: Incomplete Request"
self._web_support.req.response_headers['Content-Type'] = 'text/plain'
return [self._web_support.req.return_page()]
elif self._content_mode != "raw" and "ajax" in self._web_support.req.form and "checkdigit" in self._web_support.req.form:
if "specimen" in self._web_support.req.form:
spcstr = self._web_support.req.form['specimen'].value
chkdgt = ss.GenerateCheckdigit(spcstr)
self._web_support.req.output = spcstr+chkdgt
self._web_support.req.response_headers['Content-Type'] ='text/plain'
return [self._web_support.req.return_page()]
else:
raise self.RendererException('Incomplete Request')
else:
if self._style_mode == "add_specimen":
xmlroot = etree.Element("{%s}specimendb" % self._namespace_uri, nsmap=self.nsmap, templatefile=self.getURL("/specimens/src/specimen.xhtml", handler="db_default", content_mode="raw", ContentType="application/xml"))
elif self._style_mode == "add_specimen_group":
xmlroot = etree.Element("{%s}specimendb" % self._namespace_uri, nsmap=self.nsmap, templatefile=self.getURL("/specimens/src/specimengroup.xhtml", handler="db_default", content_mode="raw", ContentType="application/xml"))
return xmlroot
pass
pass
|
<gh_stars>10-100
#include "Config.h"
#include "BNetAuthLogonProof.h"
namespace Packets
{
namespace BNet
{
bool BNetAuthLogonProof::Pack()
{
_packet << status;
_packet.append(proof, 20);
if(!msg.empty())
_packet << msg.c_str();
return true;
}
}
}
|
$(document).ready(function(){
// create a popout box with given text
$('.popout-box').append('<div class="popout-text">This is some text</div>');
// add a close button
$('.popout-box').append('<div class="close-button">X</div>');
// create an animation to show the box
$('.popout-box').show("slide", { direction: "up" }, 1000);
// close the box when the close button is clicked
$('.close-button').on('click', function(){
$('.popout-box').hide("slide", { direction: "up" }, 1000);
});
}); |
#!/bin/sh
#
# Copyright (c) 2014-2015 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
# chkconfig: 345 25 30
### BEGIN INIT INFO
# Provides: sw-patch-controller-daemon
# Required-Start: $syslog
# Required-Stop: $syslog
# Default-Start: 2 3 5
# Default-Stop: 0 1 6
# Short-Description: sw-patch-controller-daemon
# Description: Provides the CGCS Patch Controller Daemon
### END INIT INFO
DESC="sw-patch-controller-daemon"
DAEMON="/usr/sbin/sw-patch-controller-daemon"
PIDFILE="/var/run/sw-patch-controller-daemon.pid"
start()
{
if [ -e $PIDFILE ]; then
PIDDIR=/proc/$(cat $PIDFILE)
if [ -d ${PIDDIR} ]; then
echo "$DESC already running."
exit 1
else
echo "Removing stale PID file $PIDFILE"
rm -f $PIDFILE
fi
fi
echo -n "Starting $DESC..."
start-stop-daemon --start --quiet --background \
--pidfile ${PIDFILE} --make-pidfile --exec ${DAEMON}
if [ $? -eq 0 ]; then
echo "done."
else
echo "failed."
fi
}
stop()
{
echo -n "Stopping $DESC..."
start-stop-daemon --stop --quiet --pidfile $PIDFILE
if [ $? -eq 0 ]; then
echo "done."
else
echo "failed."
fi
rm -f $PIDFILE
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart|force-reload)
stop
start
;;
*)
echo "Usage: $0 {start|stop|force-reload|restart}"
exit 1
;;
esac
exit 0
|
package impl
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/domonda/go-sqldb"
)
type transaction struct {
// The parent non-transaction connection is needed
// for its ctx, Ping(), Stats(), and Config()
parent *connection
tx *sql.Tx
opts *sql.TxOptions
structFieldNamer sqldb.StructFieldNamer
}
func newTransaction(parent *connection, tx *sql.Tx, opts *sql.TxOptions) *transaction {
return &transaction{
parent: parent,
tx: tx,
opts: opts,
structFieldNamer: parent.structFieldNamer,
}
}
func (conn *transaction) clone() *transaction {
c := *conn
return &c
}
func (conn *transaction) Context() context.Context { return conn.parent.ctx }
func (conn *transaction) WithContext(ctx context.Context) sqldb.Connection {
if ctx == conn.parent.ctx {
return conn
}
parent := conn.parent.clone()
parent.ctx = ctx
return newTransaction(parent, conn.tx, conn.opts)
}
func (conn *transaction) WithStructFieldNamer(namer sqldb.StructFieldNamer) sqldb.Connection {
c := conn.clone()
c.structFieldNamer = namer
return c
}
func (conn *transaction) StructFieldNamer() sqldb.StructFieldNamer {
return conn.structFieldNamer
}
func (conn *transaction) Ping(timeout time.Duration) error { return conn.parent.Ping(timeout) }
func (conn *transaction) Stats() sql.DBStats { return conn.parent.Stats() }
func (conn *transaction) Config() *sqldb.Config { return conn.parent.Config() }
func (conn *transaction) Exec(query string, args ...interface{}) error {
_, err := conn.tx.Exec(query, args...)
return WrapNonNilErrorWithQuery(err, query, conn.parent.argFmt, args)
}
func (conn *transaction) Insert(table string, columValues sqldb.Values) error {
return Insert(conn, table, conn.parent.argFmt, columValues)
}
func (conn *transaction) InsertUnique(table string, values sqldb.Values, onConflict string) (inserted bool, err error) {
return InsertUnique(conn, table, conn.parent.argFmt, values, onConflict)
}
func (conn *transaction) InsertReturning(table string, values sqldb.Values, returning string) sqldb.RowScanner {
return InsertReturning(conn, table, conn.parent.argFmt, values, returning)
}
func (conn *transaction) InsertStruct(table string, rowStruct interface{}, restrictToColumns ...string) error {
return InsertStruct(conn, table, rowStruct, conn.structFieldNamer, conn.parent.argFmt, nil, restrictToColumns)
}
func (conn *transaction) InsertStructIgnoreColumns(table string, rowStruct interface{}, ignoreColumns ...string) error {
return InsertStruct(conn, table, rowStruct, conn.structFieldNamer, conn.parent.argFmt, ignoreColumns, nil)
}
func (conn *transaction) InsertUniqueStruct(table string, rowStruct interface{}, onConflict string, restrictToColumns ...string) (inserted bool, err error) {
return InsertUniqueStruct(conn, table, rowStruct, onConflict, conn.structFieldNamer, conn.parent.argFmt, nil, restrictToColumns)
}
func (conn *transaction) InsertUniqueStructIgnoreColumns(table string, rowStruct interface{}, onConflict string, ignoreColumns ...string) (inserted bool, err error) {
return InsertUniqueStruct(conn, table, rowStruct, onConflict, conn.structFieldNamer, conn.parent.argFmt, ignoreColumns, nil)
}
func (conn *transaction) Update(table string, values sqldb.Values, where string, args ...interface{}) error {
return Update(conn, table, values, where, conn.parent.argFmt, args)
}
func (conn *transaction) UpdateReturningRow(table string, values sqldb.Values, returning, where string, args ...interface{}) sqldb.RowScanner {
return UpdateReturningRow(conn, table, values, returning, where, args)
}
func (conn *transaction) UpdateReturningRows(table string, values sqldb.Values, returning, where string, args ...interface{}) sqldb.RowsScanner {
return UpdateReturningRows(conn, table, values, returning, where, args)
}
func (conn *transaction) UpdateStruct(table string, rowStruct interface{}, restrictToColumns ...string) error {
return UpdateStruct(conn, table, rowStruct, conn.structFieldNamer, conn.parent.argFmt, nil, restrictToColumns)
}
func (conn *transaction) UpdateStructIgnoreColumns(table string, rowStruct interface{}, ignoreColumns ...string) error {
return UpdateStruct(conn, table, rowStruct, conn.structFieldNamer, conn.parent.argFmt, ignoreColumns, nil)
}
func (conn *transaction) UpsertStruct(table string, rowStruct interface{}, restrictToColumns ...string) error {
return UpsertStruct(conn, table, rowStruct, conn.structFieldNamer, conn.parent.argFmt, nil, restrictToColumns)
}
func (conn *transaction) UpsertStructIgnoreColumns(table string, rowStruct interface{}, ignoreColumns ...string) error {
return UpsertStruct(conn, table, rowStruct, conn.structFieldNamer, conn.parent.argFmt, ignoreColumns, nil)
}
func (conn *transaction) QueryRow(query string, args ...interface{}) sqldb.RowScanner {
rows, err := conn.tx.QueryContext(conn.parent.ctx, query, args...)
if err != nil {
err = WrapNonNilErrorWithQuery(err, query, conn.parent.argFmt, args)
return sqldb.RowScannerWithError(err)
}
return NewRowScanner(rows, conn.structFieldNamer, query, conn.parent.argFmt, args)
}
func (conn *transaction) QueryRows(query string, args ...interface{}) sqldb.RowsScanner {
rows, err := conn.tx.QueryContext(conn.parent.ctx, query, args...)
if err != nil {
err = WrapNonNilErrorWithQuery(err, query, conn.parent.argFmt, args)
return sqldb.RowsScannerWithError(err)
}
return NewRowsScanner(conn.parent.ctx, rows, conn.structFieldNamer, query, conn.parent.argFmt, args)
}
func (conn *transaction) IsTransaction() bool {
return true
}
func (conn *transaction) TransactionOptions() (*sql.TxOptions, bool) {
return conn.opts, true
}
func (conn *transaction) Begin(opts *sql.TxOptions) (sqldb.Connection, error) {
tx, err := conn.parent.db.BeginTx(conn.parent.ctx, opts)
if err != nil {
return nil, err
}
return newTransaction(conn.parent, tx, opts), nil
}
func (conn *transaction) Commit() error {
return conn.tx.Commit()
}
func (conn *transaction) Rollback() error {
return conn.tx.Rollback()
}
func (conn *transaction) ListenOnChannel(channel string, onNotify sqldb.OnNotifyFunc, onUnlisten sqldb.OnUnlistenFunc) (err error) {
return fmt.Errorf("notifications %w", sqldb.ErrNotSupported)
}
func (conn *transaction) UnlistenChannel(channel string) (err error) {
return fmt.Errorf("notifications %w", sqldb.ErrNotSupported)
}
func (conn *transaction) IsListeningOnChannel(channel string) bool {
return false
}
func (conn *transaction) Close() error {
return conn.Rollback()
}
|
<filename>enhanced-data-cache/src/main/java/com/yoloho/enhanced/cache/annotation/EnableCacheConfig.java
package com.yoloho.enhanced.cache.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* 缓存配置,标注在类上
*
* @author jason<<EMAIL>> Jun 5, 2018
*
*/
@Documented
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Inherited
public @interface EnableCacheConfig {
/**
* 缓存分组
*
* @return
*/
String group() default "";
/**
* 是否启用远程缓存(L1)(redis)
*
* @return
*/
EnableCacheBoolean remote() default EnableCacheBoolean.UNSET;
/**
* 是否启用本地缓存(L2)
* <p>
* 注意,本地缓存没有分布式一致性
*
* @return
*/
EnableCacheBoolean local() default EnableCacheBoolean.UNSET;
/**
* 过期时间,单位秒,不设置默认为300秒
*
* @return
*/
int expire() default 0;
/**
* 本地缓存过期时间,单位秒,不设置默认为与远端过期时间相同,
* 按组,同组只能有一个设置有效
* 覆盖顺序可能与执行顺序有关,这一点请平时使用时注意
* 尽量不要重复设置利用覆盖来设置
*
* @return
*/
int expireLocal() default 0;
/**
* 本地缓存最多放置对象个数,超出则lru,
* 按组,同组只能有一个设置有效
* 覆盖顺序可能与执行顺序有关,这一点请平时使用时注意
* 尽量不要重复设置利用覆盖来设置
*
* @return
*/
int maxSizeLocal() default 0;
}
|
#!/bin/bash
curl -sS -w "\n" -X GET localhost:3000/api/users -w "\n" | json2csv
|
#!/usr/bin/env bash
set -e
current=$(
cd $(dirname $0)
pwd
)
source ${current}/../variables.sh
if [[ -z "${SVN_DIR}" ]]; then
echo "<SVN_DIR> is required."
exit 1
fi
if [[ -d ${SVN_DIR} ]]; then
chmod -R +w ${SVN_DIR}
rm -rdf ${SVN_DIR}
fi
if [[ -z "${SVN_URL}" ]]; then
echo "<SVN_URL> is required."
exit
fi
if [[ -z "${SVN_USER}" ]] || [[ -z "${SVN_PASS}" ]]; then
echo "<SVN_USER>, <SVN_PASS> are required."
exit
fi
set +e
if [[ -z $(svn ls ${SVN_URL}) ]]; then
echo "repository [${SVN_URL}] is not exists."
exit
fi
set -e
echo ""
echo ">> Run svn checkout."
mkdir -p ${SVN_DIR}
svn co -q ${SVN_URL}/trunk ${SVN_DIR}
echo ""
echo ">> Prepare release files."
bash ${SCRIPT_DIR}/deploy/prepare_release_files.sh
echo ""
echo ">> Sync build files to svn."
rsync -a --exclude=".svn" --checksum --delete ${PACKAGE_DIR}/ ${SVN_DIR}/
pushd ${SVN_DIR}
echo ""
echo ">> Run svn del."
svn st | grep '^!' | sed -e 's/^\!\s*/svn del -q /g' | sh
echo ""
echo ">> Run svn add."
svn st | grep '^?' | sed -e 's/^\?\s*/svn add -q /g' | sh
pushd
|
import '../theme/index.css' //主题色
// import './common/css/index.css' //本地色
import Vue from 'vue'
import App from './App'
import ElementUI from 'element-ui'
import VueRouter from 'vue-router'
import routes from './routes'
import './common/css/common.css'
import common from './common/js/common'
import axios from 'axios'
import BaiduMap from 'vue-baidu-map'
import './styles/index.scss' // global css
import './common/js/const.js'
Vue.use(ElementUI)
Vue.use(VueRouter)
Vue.use(BaiduMap,{
ak:'Ona5elzlpSrwOeeAI1k0EE2yHtpLoxlN'
})
Vue.prototype.$axios=axios
Vue.prototype.$qs=require('qs')
Vue.prototype.common=common
const router = new VueRouter({
routes
})
router.beforeEach((to, from, next) => {
//NProgress.start();
if (to.path == '/loginCloud') {
sessionStorage.removeItem('user');
}
let user = JSON.parse(sessionStorage.getItem('user'));
if (!user && to.path != '/loginCloud') {
next({ path: '/loginCloud' })
} else {
next()
}
})
new Vue({
router,
render: h => h(App)
}).$mount('#app')
|
<reponame>SJTU-IPADS/dst
/*
Copyright 2010 Sun Microsystems, Inc.
All rights reserved. Use is subject to license terms.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package testsuite.clusterj.model;
import com.mysql.clusterj.annotation.Column;
import com.mysql.clusterj.annotation.Index;
import com.mysql.clusterj.annotation.Indices;
import com.mysql.clusterj.annotation.PersistenceCapable;
import com.mysql.clusterj.annotation.PrimaryKey;
import java.math.BigDecimal;
/** Schema
*
drop table if exists decimaltypes;
create table decimaltypes (
id int not null primary key,
decimal_null_hash decimal(10,5),
decimal_null_btree decimal(10,5),
decimal_null_both decimal(10,5),
decimal_null_none decimal(10,5)
) ENGINE=ndbcluster DEFAULT CHARSET=latin1;
create unique index idx_decimal_null_hash using hash on decimaltypes(decimal_null_hash);
create index idx_decimal_null_btree on decimaltypes(decimal_null_btree);
create unique index idx_decimal_null_both on decimaltypes(decimal_null_both);
*/
@Indices({
@Index(name="idx_decimal_null_both", columns=@Column(name="decimal_null_both"))
})
@PersistenceCapable(table="decimaltypes")
@PrimaryKey(column="id")
public interface DecimalTypes extends IdBase {
int getId();
void setId(int id);
// Decimal
@Column(name="decimal_null_hash")
@Index(name="idx_decimal_null_hash")
BigDecimal getDecimal_null_hash();
void setDecimal_null_hash(BigDecimal value);
@Column(name="decimal_null_btree")
@Index(name="idx_decimal_null_btree")
BigDecimal getDecimal_null_btree();
void setDecimal_null_btree(BigDecimal value);
@Column(name="decimal_null_both")
BigDecimal getDecimal_null_both();
void setDecimal_null_both(BigDecimal value);
@Column(name="decimal_null_none")
BigDecimal getDecimal_null_none();
void setDecimal_null_none(BigDecimal value);
}
|
//*********************************************************************************************************************
// All Winner Tech, All Right Reserved. 2014-2015 Copyright (c)
//
// File name : de_bws.c
//
// Description : display engine 2.0 bws basic function definition
//
// History : 2014/04/01 <NAME> v0.1 Initial version
// 2014/04/25 <NAME> v0.11 Add block updated function
//*********************************************************************************************************************
#include "de_bws_type.h"
#include "de_rtmx.h"
#include "de_fce_type.h"
#include "de_enhance.h"
#define BWS_OFST 0xA2000 //BWS offset based on RTMX
static volatile __bws_reg_t *bws_dev[DE_NUM][CHN_NUM];
static de_reg_blocks bws_block[DE_NUM][CHN_NUM];
static de_reg_blocks bws_para_block[DE_NUM][CHN_NUM];
static uintptr_t bws_hw_base[DE_NUM][CHN_NUM] = {{0}};
//for bws
__bws_status_t *g_bws_status[DE_NUM][CHN_NUM];
//for hist
extern unsigned int *g_hist[DE_NUM][CHN_NUM];
extern unsigned int *g_hist_p[DE_NUM][CHN_NUM];
extern unsigned int g_sum[DE_NUM][CHN_NUM];
extern __hist_status_t *g_hist_status[DE_NUM][CHN_NUM];
//*********************************************************************************************************************
// function : de_bws_set_reg_base(unsigned int sel, unsigned int chno, unsigned int base)
// description : set bws reg base
// parameters :
// sel <rtmx select>
// chno <overlay select>
// base <reg base>
// return :
// success
//*********************************************************************************************************************
int de_bws_set_reg_base(unsigned int sel, unsigned int chno, void *base)
{
bws_dev[sel][chno] = (__bws_reg_t *)base;
return 0;
}
#ifdef __VEP_DO_IT_IN_VBLANK
int de_bws_update_regs(unsigned int sel, unsigned int chno)
{
if (bws_block[sel][chno].dirty == 0x1){
memcpy((void *)bws_block[sel][chno].off,bws_block[sel][chno].val,bws_block[sel][chno].size);
bws_block[sel][chno].dirty = 0x0;
}
return 0;
}
#else
int de_bws_update_regs(unsigned int sel, unsigned int chno)
{
if (bws_block[sel][chno].dirty == 0x1){
memcpy((void *)bws_block[sel][chno].off,bws_block[sel][chno].val,bws_block[sel][chno].size);
bws_block[sel][chno].dirty = 0x0;
}
if (bws_para_block[sel][chno].dirty == 0x1){
memcpy((void *)bws_para_block[sel][chno].off,bws_para_block[sel][chno].val,bws_para_block[sel][chno].size);
bws_para_block[sel][chno].dirty = 0x0;
}
return 0;
}
#endif
int de_bws_init(unsigned int sel, unsigned int chno, uintptr_t reg_base)
{
uintptr_t base;
void *memory;
base = reg_base + (sel+1)*0x00100000 + BWS_OFST; //FIXME display path offset should be defined
bws_hw_base[sel][chno] = base;
__inf("sel %d, bws_base[%d]=0x%p\n", sel, chno, (void*)base);
memory = kmalloc(sizeof(__bws_reg_t), GFP_KERNEL | __GFP_ZERO);
if (NULL == memory) {
__wrn("malloc bws[%d][%d] memory fail! size=0x%x\n", sel, chno, sizeof(__bws_reg_t));
return -1;
}
bws_block[sel][chno].off = base;
bws_block[sel][chno].val = memory;
bws_block[sel][chno].size = 0x10;
bws_block[sel][chno].dirty = 0;
bws_para_block[sel][chno].off = base + 0x20;
bws_para_block[sel][chno].val = memory + 0x20;
bws_para_block[sel][chno].size = 0x10;
bws_para_block[sel][chno].dirty = 0;
de_bws_set_reg_base(sel, chno, memory);
//bws
g_bws_status[sel][chno] = kmalloc(sizeof(__bws_status_t), GFP_KERNEL | __GFP_ZERO); //FIXME where to FREE?
if (NULL == g_bws_status[sel][chno]) {
__wrn("malloc g_bws_status[%d][%d] memory fail! size=0x%x\n", sel, chno, sizeof(__bws_status_t));
return -1;
}
g_bws_status[sel][chno]->IsEnable = 0;
g_bws_status[sel][chno]->Runtime = 0;
g_bws_status[sel][chno]->PreSlopeReady = 0;
return 0;
}
//*********************************************************************************************************************
// function : de_bws_enable(unsigned int sel, unsigned int chno, unsigned int en)
// description : enable/disable bws
// parameters :
// sel <rtmx select>
// chno <overlay select>
// en <enable: 0-diable; 1-enable>
// return :
// success
//*********************************************************************************************************************
int de_bws_enable(unsigned int sel, unsigned int chno, unsigned int en)
{
bws_dev[sel][chno]->ctrl.bits.en = en;
bws_block[sel][chno].dirty = 1;
return 0;
}
//*********************************************************************************************************************
// function : de_bws_set_size(unsigned int sel, unsigned int chno, unsigned int width, unsigned int height)
// description : set bws size
// parameters :
// sel <rtmx select>
// chno <overlay select>
// width <input width>
// height <input height>
// return :
// success
//*********************************************************************************************************************
int de_bws_set_size(unsigned int sel, unsigned int chno, unsigned int width, unsigned int height)
{
bws_dev[sel][chno]->size.bits.width = width - 1;
bws_dev[sel][chno]->size.bits.height = height - 1;
bws_block[sel][chno].dirty = 1;
return 0;
}
//*********************************************************************************************************************
// function : de_bws_set_window(unsigned int sel, unsigned int chno, unsigned int win_enable, de_rect window)
// description : set bws window
// parameters :
// sel <rtmx select>
// chno <overlay select>
// win_enable <enable: 0-window mode diable; 1-window mode enable>
// window <window rectangle>
// return :
// success
//*********************************************************************************************************************
int de_bws_set_window(unsigned int sel, unsigned int chno, unsigned int win_enable, de_rect window)
{
bws_dev[sel][chno]->ctrl.bits.win_en = win_enable;
if (win_enable)
{
bws_dev[sel][chno]->win0.bits.win_left = window.x;
bws_dev[sel][chno]->win0.bits.win_top = window.y;
bws_dev[sel][chno]->win1.bits.win_right = window.x + window.w - 1;
bws_dev[sel][chno]->win1.bits.win_bot = window.y + window.h - 1;
}
bws_block[sel][chno].dirty = 1;
return 0;
}
//*********************************************************************************************************************
// function : de_bws_set_para(unsigned int sel, unsigned int chno,
// unsigned int min, unsigned int black, unsigned int white, unsigned int max,
// unsigned int slope0, unsigned int slope1, unsigned int slope2, unsigned int slope3)
//
// description : set bws para
// parameters :
// sel <rtmx select>
// chno <overlay select>
// min/max/black/white <bws threshold>
// slope0/1/2/3 <bws slope>
// return :
// success
//*********************************************************************************************************************
#ifdef __VEP_DO_IT_IN_VBLANK
int de_bws_set_para(unsigned int sel, unsigned int chno,
unsigned int min, unsigned int black, unsigned int white, unsigned int max,
unsigned int slope0, unsigned int slope1, unsigned int slope2, unsigned int slope3)
{
uintptr_t base;
base = bws_hw_base[sel][chno];
writel((black<<16) | min, (void __iomem *)(base + 0x20));
writel((max<<16) | white, (void __iomem *)(base + 0x24));
writel((slope1<<16) | slope0, (void __iomem *)(base + 0x28));
writel((slope3<<16) | slope2, (void __iomem *)(base + 0x2c));
return 0;
}
#else
int de_bws_set_para(unsigned int sel, unsigned int chno,
unsigned int min, unsigned int black, unsigned int white, unsigned int max,
unsigned int slope0, unsigned int slope1, unsigned int slope2, unsigned int slope3)
{
bws_dev[sel][chno]->blkthr.bits.min = min;
bws_dev[sel][chno]->blkthr.bits.black = black;
bws_dev[sel][chno]->whtthr.bits.max = max;
bws_dev[sel][chno]->whtthr.bits.white = white;
bws_dev[sel][chno]->blkslp.bits.slope0 = slope0;
bws_dev[sel][chno]->blkslp.bits.slope1 = slope1;
bws_dev[sel][chno]->whtslp.bits.slope2 = slope2;
bws_dev[sel][chno]->whtslp.bits.slope3 = slope3;
bws_para_block[sel][chno].dirty = 1;
return 0;
}
#endif
//*********************************************************************************************************************
// function : auto_bws_model(unsigned int width, unsigned int height, unsigned int hist[256], unsigned int hist_pre[256], unsigned int sum,
// unsigned int pre_slope_black, unsigned int pre_slope_white,
// unsigned int frame_bld_en, unsigned int bld_high_thr, unsigned int bld_low_thr, unsigned int bld_weight_lmt,
// unsigned int present_black, unsigned int present_white, unsigned int slope_black_lmt, unsigned int slope_white_lmt,
// unsigned int black_prec, unsigned int white_prec, unsigned int lowest_black, unsigned int highest_white,
// unsigned int *ymin, unsigned int *black, unsigned int *white, unsigned int *ymax,
// unsigned int *slope0, unsigned int *slope1, unsigned int *slope2, unsigned int *slope3)
// description : Auto-BWS Alg
// parameters :
// width <layer width>
// height <layer height>
// hist[256] <the latest frame histogram>
// hist_pre[256] <the frame before the latest frame histogram>
// sum <the latest frame pixel value sum>
// pre_slope_black/pre_slope_white <the frame before the latest frame auto-bws result>
// ymin/black/white/ymax/shope0/1/2/3 <auto-bws result>
// return :
//
//*********************************************************************************************************************
//R_ROPC_EN--frame_bld_en--1
//R_ROPC_TH_UPPER--bld_high_thr--90
//R_ROPC_TH_LOWER--bld_low_thr--74
//R_ROPC_WEIGHT_MIN--bld_weight_lmt--8
//R_PRESET_TILT_BLACK--present_black--53
//R_PRESET_TILT_WHITE--present_white--235
//R_SLOPE_LIMIT_BLACK--slope_black_lmt--512
//R_SLOPE_LIMIT_WHITE--slope_white_lmt--384
//R_BLACK_PERCENT--black_prec--5
//R_WHITE_PERCENT--white_prec--2
//R_LOWEST_BLACK--lowest_black--3
//R_HIGHEST_WHITE--highest_white--252
static void auto_bws_model(unsigned int width, unsigned int height, unsigned int hist[256], unsigned int hist_pre[256], unsigned int sum,
unsigned int pre_slope_black, unsigned int pre_slope_white,
unsigned int frame_bld_en, unsigned int bld_high_thr, unsigned int bld_low_thr, unsigned int bld_weight_lmt,
unsigned int present_black, unsigned int present_white, unsigned int slope_black_lmt, unsigned int slope_white_lmt,
unsigned int black_prec, unsigned int white_prec, unsigned int lowest_black, unsigned int highest_white,
unsigned int *ymin, unsigned int *black, unsigned int *white, unsigned int *ymax,
unsigned int *slope0, unsigned int *slope1, unsigned int *slope2, unsigned int *slope3)
{
int coeff, diff_hist, total, k;
int validcnt, validsum;
int ratio_b, ratio_w, cdf_b, cdf_w;
int weight;
int mean;
int pd_ymin = lowest_black, pd_ymax = highest_white;
int pd_black, pd_white;
int pd_ymin_fix, pd_ymax_fix;
int pd_s0, pd_s1, pd_s2, pd_s3;
int tmp;
//1.calculate frame different and frame blend weight
if (frame_bld_en)
{
diff_hist = 0;
total = 0;
for (k=0; k<256; k++)
{
diff_hist += abs(hist[k] - hist_pre[k]);
total += hist[k];
}
coeff = (100 * diff_hist) / total;
weight = (coeff >= bld_high_thr)? 256 :
(coeff < bld_low_thr)? bld_weight_lmt :
((256 - bld_weight_lmt)/(bld_high_thr - bld_low_thr) * (coeff - bld_low_thr) + bld_weight_lmt);
}
else
{
total = 0;
for (k=0; k<256; k++)
{
total += hist[k];
}
weight = 256;
}
//2.kick out the lowest black and the highest white in hist and sum
validcnt = total;
for (k=0;k<lowest_black;k++)
{
validcnt -= hist[k];
}
for (k=255;k>highest_white-1;k--)
{
validcnt -= hist[k];
}
validsum = sum;
for (k=0;k<lowest_black;k++)
{
validsum -= hist[k]*k;
}
for (k=255;k>highest_white-1;k--)
{
validsum -= hist[k]*k;
}
if (validcnt != 0)
{
mean = validsum / validcnt;
//3.find Ymin and Ymax
ratio_b = validcnt * black_prec/100;
cdf_b = 0;
for (k = lowest_black; k < 255; k++)
{
cdf_b += hist[k];
if (cdf_b > ratio_b) {
pd_ymin = k;
break;
}
}
ratio_w = validcnt * white_prec/100;
cdf_w = 0;
for (k = highest_white; k >=0; k--)
{
cdf_w += hist[k];
if (cdf_w > ratio_w) {
pd_ymax = k;
break;
}
}
//4.limit black and white don't cross mean
pd_black = (present_black<mean)?present_black:mean;
pd_white = (present_white>mean)?present_white:mean;
//5.calculate slope1/2 and limit to slope_black_lmt or slope_white_lmt
pd_s1 = (pd_ymin < pd_black)?((pd_black<<8)/(pd_black - pd_ymin)):256;
// pd_s1 = (pd_ymin < pd_black)?((pd_black<<8)/(pd_black - pd_ymin)):(pd_black<<8);
pd_s1 = (pd_s1 > slope_black_lmt)?slope_black_lmt:pd_s1;
pd_s2 = (pd_ymax > pd_white)?(((255-pd_white)<<8)/(pd_ymax - pd_white)):256;
pd_s2 = (pd_s2 > slope_white_lmt)?slope_white_lmt:pd_s2;
//6.frame blend to slope1/2
pd_s1 = (pre_slope_black * (256 - weight) + weight * pd_s1)>>8;
pd_s2 = (pre_slope_white * (256 - weight) + weight * pd_s2)>>8;
tmp = pd_black + ((pd_s1 * (pd_ymin - pd_black)+128) >>8 );
//7.calculate slope0/3 and re-calculate ymin and ymax
if ( ( tmp>0) && (pd_ymin < pd_black) && (pd_ymin > 0))
{
pd_s0 = ((tmp<<8) + 128) / pd_ymin;
pd_ymin_fix = pd_ymin;
}
else if (pd_ymin >= pd_black) //do noting use s0
{
pd_s0 = 256;
pd_ymin_fix = 0;
}
else
{
pd_s0 = 0;
pd_ymin_fix = -((pd_black<<8) - 128 ) / pd_s1 + pd_black;
}
tmp = pd_white + ((pd_s2 * (pd_ymax - pd_white) )>>8);
if ( (tmp < 255) && (pd_ymax > pd_white) && (pd_ymax < 255))
{
pd_s3 = (((255-tmp)<<8) + 128) / (255 - pd_ymax);
pd_ymax_fix = pd_ymax;
}
else if (pd_ymax <= pd_white) //do noting use s3
{
pd_s3 = 256;
pd_ymax_fix = 255;
}
else
{
pd_s3 = 0;
pd_ymax_fix = (((255-pd_white)<<8) - 128) / pd_s2 + pd_white;
}
}
else //no enough pixel for auto bws
{
pd_ymin_fix = 16;
pd_black = 32;
pd_white = 224;
pd_ymax_fix = 240;
pd_s0 = 0x100;
pd_s1 = 0x100;
pd_s2 = 0x100;
pd_s3 = 0x100;
}
//
*ymin = pd_ymin_fix;
*black = pd_black;
*white = pd_white;
*ymax = pd_ymax_fix;
*slope0 = pd_s0;
*slope1 = pd_s1;
*slope2 = pd_s2;
*slope3 = pd_s3;
}
//*********************************************************************************************************************
// function : de_bws_info2para(unsigned int auto_contrast, de_rect window, __bws_config_data *para)
// description : info->para conversion
// parameters :
// auto_contrast <info from user>
// window <window info>
// para <bsp para>
// return :
// success
//*********************************************************************************************************************
int de_bws_info2para(unsigned int auto_contrast, de_rect window, __bws_config_data *para)
{
//parameters
para->bws_en = (auto_contrast == 2 || auto_contrast == 3)?1:0;
//window
//para->win_en = 1;
//para->win.x = window.x;
//para->win.y = window.y;
//para->win.w = window.w;
//para->win.h = window.h;
return 0;
}
int de_bws_apply(unsigned int screen_id, unsigned int chno, unsigned int bws_en, unsigned int auto_contrast_dirty)
{
if (bws_en==1 && auto_contrast_dirty) //enable this time
{
g_bws_status[screen_id][chno]->IsEnable = 1;
g_bws_status[screen_id][chno]->Runtime = 0;
g_bws_status[screen_id][chno]->PreSlopeReady = 0;
}
else if (bws_en==0 && auto_contrast_dirty) //disable this time
{
g_bws_status[screen_id][chno]->IsEnable = 0;
g_bws_status[screen_id][chno]->Runtime = 0;
g_bws_status[screen_id][chno]->PreSlopeReady = 0;
}
return 0;
}
int de_bws_tasklet(unsigned int screen_id, unsigned int chno, unsigned int frame_cnt)
{
unsigned int pre_slope_black, pre_slope_white;
unsigned int th0, th1, th2, th3, s0, s1, s2, s3;
if (g_bws_status[screen_id][chno]->IsEnable && ((BWS_FRAME_MASK==(frame_cnt%2)) || (BWS_FRAME_MASK==0x2)))
{
if (g_hist_status[screen_id][chno]->TwoHistReady)
{
if (g_bws_status[screen_id][chno]->PreSlopeReady)
{
pre_slope_black = g_bws_status[screen_id][chno]->slope_black;
pre_slope_white = g_bws_status[screen_id][chno]->slope_white;
}
else
{
pre_slope_black = BWS_DEFAULT_SLOPE;
pre_slope_white = BWS_DEFAULT_SLOPE;
}
auto_bws_model(g_bws_status[screen_id][chno]->width, g_bws_status[screen_id][chno]->height,
g_hist[screen_id][chno], g_hist_p[screen_id][chno], g_sum[screen_id][chno],
pre_slope_black, pre_slope_white, 1, 90, 74, 8, 53, 235, 512, 384, 5, 2, 3, 252,
&th0, &th1, &th2, &th3, &s0, &s1, &s2, &s3);
th0=32;
th1=64;
th2=224;
th3=240;
s0=192 ; //384; //128
s1=320; //128; //384;
s2=384;
s3=128;
}
else //hist no ready for auto_bws
{
th0 = 16;
th1 = 32;
th2 = 224;
th3 = 240;
s0 = BWS_DEFAULT_SLOPE;
s1 = BWS_DEFAULT_SLOPE;
s2 = BWS_DEFAULT_SLOPE;
s3 = BWS_DEFAULT_SLOPE;
}
de_bws_set_para(screen_id, chno, th0, th1, th2, th3, s0, s1, s2, s3);
g_bws_status[screen_id][chno]->slope_black = s1;
g_bws_status[screen_id][chno]->slope_white = s2;
if (g_bws_status[screen_id][chno]->Runtime < 1)
{
g_bws_status[screen_id][chno]->Runtime ++;
}
else
{
g_bws_status[screen_id][chno]->PreSlopeReady = 1;
}
}
return 0;
}
|
package com.github.peacetrue.signature.spring;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
/**
* @author peace
*/
@RestController
@RequestMapping
class SignatureTestController {
@ResponseBody
@RequestMapping(value = "/echo", produces = MediaType.APPLICATION_JSON_VALUE)
public String echo(String input) {
return input;
}
}
|
from sklearn.svm import OneClassSVM
class ClassifierRegistry:
def __init__(self):
self.pkl_registry = []
def add_classifier(self, classifier):
"""
Add a classifier to the registry.
Args:
classifier: The classifier object to be added.
Returns:
None
"""
self.pkl_registry.append((classifier, type(classifier)))
def get_classifier(self, index):
"""
Retrieve a classifier from the registry by index.
Args:
index: The index of the classifier to retrieve.
Returns:
The classifier object at the specified index.
"""
return self.pkl_registry[index][0]
def fit_classifier(self, index, X_train):
"""
Fit the classifier at the specified index with the given training data.
Args:
index: The index of the classifier to fit.
X_train: The training data.
Returns:
None
"""
self.pkl_registry[index][0].fit(X_train)
self.pkl_registry[index][0].fit_status = True
def predict_with_classifier(self, index, X_test):
"""
Use the classifier at the specified index to make predictions on the test data.
Args:
index: The index of the classifier to use for prediction.
X_test: The test data.
Returns:
The predictions made by the classifier on the test data.
"""
return self.pkl_registry[index][0].predict(X_test) |
//-------- js/CASMorph.js --------
// Generated by CoffeeScript 1.12.2
(function () {
var CASMorph, FourCC, console, cwaenv, document, log, setTimeout;
cwaenv = this.getCWAEnv();
console = this.console;
document = this.document;
setTimeout = this.setTimeout;
log = console.log.bind(console);
FourCC = cwaenv.get("FourCC");
CASMorph = (function () {
function CASMorph() {
this.fourCCName = 0;
this.amount = 0;
}
CASMorph.NO_MORPHS = [];
CASMorph.fromJSON = function (jsnmorph) {
var morph;
morph = new CASMorph;
morph.setFromJSON(jsnmorph);
return morph;
};
CASMorph.fromXML = function (mrphel) {
var morph;
morph = new CASMorph;
morph.setFromXML(mrphel);
return morph;
};
CASMorph.fromBin = function (avdv) {
var morph;
morph = new CASMorph;
morph.setFromBin(avdv);
return morph;
};
CASMorph.create = function (name4cc, value) {
var morph;
morph = new CASMorph;
morph.set(name4cc, value);
return morph;
};
CASMorph.createFromStr = function (name4ccstr, value) {
var morph;
morph = new CASMorph;
morph.setFromStr(name4ccstr, value);
return morph;
};
CASMorph.prototype.setFromJSON = function (jsnmrph) {
this.fourCCName = FourCC.fourCCInt(jsnmrph.id4cc);
return this.amount = jsnmrph.amount;
};
CASMorph.prototype.setFromXML = function (mrphel) {
return this.setFromStr(mrphel.getAttribute("name"), Number(mrphel.getAttribute("amount")));
};
CASMorph.prototype.setFromBin = function (avdv) {
this.fourCCName = avdv.nextUint();
return this.amount = avdv.nextFloat();
};
CASMorph.prototype.setFromStr = function (name, value) {
return this.set(FourCC.fourCCInt(name), value);
};
CASMorph.prototype.set = function (name4cc, value) {
this.fourCCName = name4cc;
return this.amount = value;
};
CASMorph.prototype.setFromMorph = function (morph) {
this.fourCCName = morph.getName();
return this.amount = morph.getAmount();
};
CASMorph.prototype.hasName = function (nm4cc) {
return this.fourCCName === nm4cc;
};
CASMorph.prototype.getName = function () {
return this.fourCCName;
};
CASMorph.prototype.getFourCC = function () {
return this.fourCCName;
};
CASMorph.prototype.getFourCCStr = function () {
return FourCC.fourCCStr(this.fourCCName);
};
CASMorph.prototype.getAmount = function () {
return this.amount;
};
CASMorph.prototype._makeText = function (amtstr) {
var NX, nmstr, pad;
nmstr = FourCC.fourCCString(this.fourCCName);
NX = 4 - nmstr.length;
pad = 0 < NX ? " ".slice(0, NX) : "";
return "" + nmstr + pad + " " + this.amtstr;
};
CASMorph.prototype.asText = function () {
return this._makeText("" + this.amount);
};
CASMorph.prototype.asText4 = function () {
return this._makeText("" + (this.amount.toFixed(4)));
};
return CASMorph;
})();
cwaenv.add(CASMorph, "CASMorph");
}).call(this); |
import DruxtBlockRegion from 'druxt-blocks/dist/components/DruxtBlockRegion.vue'
export default {
title: '<%= options.title %>',
component: DruxtBlockRegion,
parameters: {
docs: {
description: {
component: 'The <strong><%= options.region %></strong> Block region for the <strong><%= options.theme %></strong> theme.'
},
},
},
}
const Template = (args, { argTypes }) => {
return {
props: Object.keys(argTypes),
template: '<DruxtBlockRegion v-bind="$props" v-on="$props" />',
}
}
export const Default = Template.bind({})
Default.args = {
name: '<%= options.region %>',
theme: '<%= options.theme %>',
}
Default.storyName = 'DruxtBlockRegion'
Default.parameters = {
docs: {
source: {
code: '<DruxtBlockRegion name="<%= options.region %>" theme="<%= options.theme %>" />'
}
}
}
|
<filename>index.js<gh_stars>0
var express = require("express");
var app = express();
//TODO: create a redis client
var redis = require('redis')
var client = redis.createClient()
// serve static files from public directory
app.use(express.static("public"));
// TODO: initialize values for: header, left, right, article and footer using the redis client
client.mset('header', 0, 'left', 0, 'article', 0, 'right', 0, 'footer', 0)
// Get values for holy grail layout
function data() {
// TODO: uses Promise to get the values for header, left, right, article and footer from Redis
return new Promise((reject, resolve) => {
client.mget(['header', 'left', 'article', 'right', 'footer'], (err, val) => {
err ? reject(err) : resolve(val)
})
})
}
// plus
app.get("/update/:key/:value", function (req, res) {
const key = req.params.key;
let value = Number(req.params.value);
client.set(key, value)
//TODO: use the redis client to update the value associated with the given key
});
// get key data
app.get("/data", function (req, res) {
data().then((data) => {
console.log(data);
res.send(data);
});
});
app.listen(3000, () => {
console.log("Running on 3000");
});
process.on("exit", function () {
client.quit();
});
|
<reponame>princeliang/obagent
// Copyright (c) 2021 OceanBase
// obagent is licensed under Mulan PSL v2.
// You can use this software according to the terms and conditions of the Mulan PSL v2.
// You may obtain a copy of Mulan PSL v2 at:
//
// http://license.coscl.org.cn/MulanPSL2
//
// THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
// EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
// MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
// See the Mulan PSL v2 for more details.
package log
import (
"os"
"regexp"
"strconv"
"strings"
"sync"
"time"
)
const logAtLayout = "2006-01-02 15:04:05.000000"
const logTimeInFileNameLayout = "20060102150405"
type logFileInfo struct {
fileDesc *os.File
// 该文件已经写满了,并被重命名过
isRenamed bool
}
type LogConfig struct {
LogDir string `yaml:"logDir"`
LogFileName string `yaml:"logFileName"`
}
type ILogAnalyzer interface {
isErrLog(logLine string) bool
getErrCode(logLine string) (int, error)
getLogAt(logLine string) (time.Time, error)
}
type logAnalyzer struct {
logAtRegexp *regexp.Regexp
errCodeRegexp *regexp.Regexp
}
func NewLogAnalyzer() *logAnalyzer {
return &logAnalyzer{
logAtRegexp: regexp.MustCompile(`^\[\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d\d\d\d\]`),
errCodeRegexp: regexp.MustCompile(`ret=-\d+`),
}
}
// isErrLog 检查是否 error 类型的 log(后面看需求是否需要扩展为其他类型日志)
func (l *logAnalyzer) isErrLog(logLine string) bool {
// example: [2020-08-07 05:55:44.377075] ERROR [RS] ob_server_table_operator.cpp:376 [151575][4][Y0-0000000000000000] [lt = 5] [dc =0] svr_status(svr_status = "active", display_status =1)
if len(logLine) > 34 {
return "ERROR" == logLine[29:34]
}
return false
}
// getErrCode 获取日志中的错误码
func (l *logAnalyzer) getErrCode(logLine string) (int, error) {
matchedErrCodes := l.errCodeRegexp.FindAllString(logLine, -1)
matchedErrCodesLen := len(matchedErrCodes)
if matchedErrCodesLen > 0 {
lastErrCodeStr := matchedErrCodes[matchedErrCodesLen-1]
// 匹配的格式为 ret=-\d+,数字从下标位置 5 开始
if len(lastErrCodeStr) >= 5 {
errCode, err := strconv.Atoi(lastErrCodeStr[5:])
if err != nil {
return -1, err
}
return errCode, nil
}
} else if strings.Contains(logLine, "clog disk is almost full") {
return 4264, nil
} else if strings.Contains(logLine, "partition table update task cost too much time to execute") {
return 4015, nil
}
return -1, nil
}
func (l *logAnalyzer) getLogAt(logLine string) (time.Time, error) {
timeStr := strings.TrimRight(strings.TrimLeft(l.logAtRegexp.FindString(logLine), "["), "]")
logAt, err := time.ParseInLocation(logAtLayout, timeStr, time.Local)
if err != nil {
return time.Time{}, err
}
return logAt, nil
}
func matchString(reg string, content string) (matched bool, err error) {
matched = strings.Contains(content, reg)
return
}
type processQueue struct {
queue []*logFileInfo
mutex sync.Mutex
}
func (p *processQueue) getQueueLen() int {
p.mutex.Lock()
defer p.mutex.Unlock()
return len(p.queue)
}
func (p *processQueue) getHead() *logFileInfo {
p.mutex.Lock()
defer p.mutex.Unlock()
if len(p.queue) == 0 {
return nil
}
return p.queue[0]
}
func (p *processQueue) getTail() *logFileInfo {
p.mutex.Lock()
defer p.mutex.Unlock()
qLen := len(p.queue)
if qLen == 0 {
return nil
}
return p.queue[qLen-1]
}
func (p *processQueue) popHead() {
p.mutex.Lock()
defer p.mutex.Unlock()
if len(p.queue) == 0 {
return
}
p.queue = p.queue[1:]
}
func (p *processQueue) pushBack(info *logFileInfo) {
p.mutex.Lock()
defer p.mutex.Unlock()
p.queue = append(p.queue, info)
}
func (p *processQueue) setRenameTrueExceptTail() {
p.mutex.Lock()
defer p.mutex.Unlock()
qLen := len(p.queue)
if qLen == 0 {
return
}
for i := 0; i < qLen-1; i++ {
p.queue[i].isRenamed = true
}
}
func (p *processQueue) setHeadIsRenameTrue() {
p.mutex.Lock()
defer p.mutex.Unlock()
if len(p.queue) == 0 {
return
}
p.queue[0].isRenamed = true
}
func (p *processQueue) getHeadIsRenamed() bool {
p.mutex.Lock()
defer p.mutex.Unlock()
if len(p.queue) == 0 {
return false
}
return p.queue[0].isRenamed
}
|
package org.allenai.ml.sequences.crf;
import org.allenai.ml.linalg.Vector;
import org.allenai.ml.objective.ExampleObjectiveFn;
import org.allenai.ml.sequences.ForwardBackwards;
import org.allenai.ml.sequences.Transition;
import com.gs.collections.api.block.function.primitive.IntToIntFunction;
import lombok.RequiredArgsConstructor;
import lombok.val;
import java.util.List;
/**
* Log-likelihood objective function for CRF training. The objective per-example boils down to
* an actual and expected components:
* - __actual__: The score is the sum of the transitions involved with the gold labels (the log potentials).
* The gradient contribution is the sum of predicates involved with those transitions
* @param <S>
*/
@RequiredArgsConstructor
public class CRFLogLikelihoodObjective<S> implements ExampleObjectiveFn<CRFIndexedExample> {
private final CRFWeightsEncoder<S> weightEncoder;
@Override
public double evaluate(CRFIndexedExample example, Vector inParams, Vector outGrad) {
if (!example.isLabeled()) {
throw new IllegalArgumentException("Requires labeled example");
}
double[][] logPotentials = weightEncoder.fillPotentials(inParams, example);
val fb = new ForwardBackwards<S>(weightEncoder.stateSpace);
val fbResult = fb.compute(logPotentials);
// Actual
int[] goldLabels = example.getGoldLabels();
double logNumerator = 0.0;
for (int idx = 0; idx + 1 < goldLabels.length; idx++) {
int from = goldLabels[idx];
int to = goldLabels[idx + 1];
val transition = weightEncoder.stateSpace.transitionFor(from, to);
if (!transition.isPresent()) {
val states = weightEncoder.stateSpace.states();
throw new IllegalArgumentException(String.format("Gold transition doesn't exist [%s, %s]",
states.get(from), states.get(to)));
}
// Objective contribution is the sum of gold transition scores
logNumerator += logPotentials[idx][transition.get().selfIndex];
// Gradient are the features on those transitions
Vector.Iterator nodePredIt = example.getNodePredicateValues(idx);
updateGrad(outGrad, nodePredIt, (predIdx) -> weightEncoder.nodeWeightIndex(predIdx, from), 1.0);
Vector.Iterator edgePredIt = example.getEdgePredicateValues(idx);
int transIdx = transition.get().selfIndex;
updateGrad(outGrad, edgePredIt, (predIdx) -> weightEncoder.edgeWeightIndex(predIdx, transIdx), 1.0);
}
// Expected
double logDenominator = fbResult.getLogZ();
double[][] nodeMarginals = fbResult.getNodeMarginals();
double[][] edgeMarginals = fbResult.getEdgeMarginals();
int numStates = weightEncoder.stateSpace.states().size();
int numTransitions = weightEncoder.stateSpace.transitions().size();
for (int idx = 0; idx+1 < example.getSequenceLength(); idx++) {
Vector.Iterator nodePreds = example.getNodePredicateValues(idx);
while (!nodePreds.isExhausted()) {
int predIdx = (int) nodePreds.index();
double predVal = nodePreds.value();
for (int s = 0; s < numStates; s++) {
int weightIdx = weightEncoder.nodeWeightIndex(predIdx, s);
outGrad.inc(weightIdx, predVal * -nodeMarginals[idx][s]);
}
nodePreds.advance();
}
Vector.Iterator edgePreds = example.getEdgePredicateValues(idx);
while (!edgePreds.isExhausted()) {
int predIdx = (int) edgePreds.index();
double predVal = edgePreds.value();
for (int t=0; t < numTransitions; ++t) {
int weightIdx = weightEncoder.edgeWeightIndex(predIdx, t);
outGrad.inc(weightIdx, predVal * -edgeMarginals[idx][t]);
}
edgePreds.advance();
}
}
assert logNumerator <= logDenominator;
return logNumerator - logDenominator;
}
private void updateGrad(Vector outGrad, Vector.Iterator predIt, IntToIntFunction weightIndexMap, double scale) {
predIt.reset();
while (!predIt.isExhausted()) {
int predIdx = (int) predIt.index();
double predVal = predIt.value();
int weightIdx = weightIndexMap.valueOf(predIdx);
outGrad.inc(weightIdx, scale * predVal);
predIt.advance();
}
}
}
|
#!/usr/bin/env bats
load '/bats-libs/bats-support/load.bash'
load '/bats-libs/bats-assert/load.bash'
@test "git-team: ls should show 'No assignments'" {
run /usr/local/bin/git-team ls
assert_success
assert_line --index 0 "warn: 'git team ls' has been deprecated and is going to be removed in a future major release, use 'git team assignments' instead"
assert_line --index 1 'No assignments'
}
@test "git-team: list should show all alias -> coauthor assignments" {
/usr/local/bin/git-team add a 'A <a@x.y>'
/usr/local/bin/git-team add bb 'B <b@x.y>'
/usr/local/bin/git-team add c 'C <c@x.y>'
run /usr/local/bin/git-team list
assert_success
assert_line --index 0 "warn: 'git team ls' has been deprecated and is going to be removed in a future major release, use 'git team assignments' instead"
assert_line --index 1 'assignments'
assert_line --index 2 '─ a → A <a@x.y>'
assert_line --index 3 '─ bb → B <b@x.y>'
assert_line --index 4 '─ c → C <c@x.y>'
/usr/local/bin/git-team rm a
/usr/local/bin/git-team rm bb
/usr/local/bin/git-team rm c
}
@test "git-team: ls should show all alias -> coauthor assignments" {
/usr/local/bin/git-team add a 'A <a@x.y>'
/usr/local/bin/git-team add bb 'B <b@x.y>'
/usr/local/bin/git-team add c 'C <c@x.y>'
run /usr/local/bin/git-team ls
assert_success
assert_line --index 0 "warn: 'git team ls' has been deprecated and is going to be removed in a future major release, use 'git team assignments' instead"
assert_line --index 1 'assignments'
assert_line --index 2 '─ a → A <a@x.y>'
assert_line --index 3 '─ bb → B <b@x.y>'
assert_line --index 4 '─ c → C <c@x.y>'
/usr/local/bin/git-team rm a
/usr/local/bin/git-team rm bb
/usr/local/bin/git-team rm c
}
|
<reponame>RodrigoGuerra/Trab1IA<filename>docs/search/classes_8.js
var searchData=
[
['layout',['Layout',['../classlayout_1_1_layout.html',1,'layout']]],
['leftturnagent',['LeftTurnAgent',['../classpacman_agents_1_1_left_turn_agent.html',1,'pacmanAgents']]]
];
|
<filename>fbcnms-packages/fbcnms-ui/components/layout/TopBarFormControlLabel.js
/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow
* @format
*/
import FormControlLabel from '@material-ui/core/FormControlLabel';
import React from 'react';
import {makeStyles} from '@material-ui/styles';
const useStyles = makeStyles(theme => ({
label: {
color: theme.palette.primaryText,
},
root: theme.mixins.toolbar,
}));
type Props = {|
checked?: boolean | string,
className?: string,
control: React$Element<any>,
disabled?: boolean,
inputRef?: Function,
label: React$Node,
name?: string,
onChange?: Function,
value?: string,
|};
export default function TopBarFormControlLabel(props: Props) {
const classes = useStyles();
return <FormControlLabel classes={classes} {...props} />;
}
|
#!/bin/bash -x
set -eo pipefail
# $PUBLIC_IP $PRIVATE_IP $PUBLIC_HOSTNAME $BOULDER_URL are dynamically set at execution
# with curl, instance metadata available from EC2 metadata service:
#public_host=$(curl -s http://169.254.169.254/2014-11-05/meta-data/public-hostname)
#public_ip=$(curl -s http://169.254.169.254/2014-11-05/meta-data/public-ipv4)
#private_ip=$(curl -s http://169.254.169.254/2014-11-05/meta-data/local-ipv4)
cd letsencrypt
export PATH="$PWD/letsencrypt-auto-source:$PATH"
letsencrypt-auto --os-packages-only --debug --version
letsencrypt-auto certonly --no-self-upgrade -v --standalone --debug \
--text --agree-dev-preview --agree-tos \
--renew-by-default --redirect \
--register-unsafely-without-email \
--domain $PUBLIC_HOSTNAME --server $BOULDER_URL
if ! letsencrypt-auto --help --no-self-upgrade | grep -F "letsencrypt-auto [SUBCOMMAND]"; then
echo "letsencrypt-auto not included in help output!"
exit 1
fi
|
<filename>src/service.js
"use strict";
module.exports = Service;
// extends Namespace
var Namespace = require("./namespace");
((Service.prototype = Object.create(Namespace.prototype)).constructor = Service).className = "Service";
var Method = require("./method"),
util = require("./util"),
rpc = require("./rpc");
/**
* Constructs a new service instance.
* @classdesc Reflected service.
* @extends NamespaceBase
* @constructor
* @param {string} name Service name
* @param {Object.<string,*>} [options] Service options
* @throws {TypeError} If arguments are invalid
*/
function Service(name, options) {
Namespace.call(this, name, options);
/**
* Service methods.
* @type {Object.<string,Method>}
*/
this.methods = {}; // toJSON, marker
/**
* Cached methods as an array.
* @type {Method[]|null}
* @private
*/
this._methodsArray = null;
}
/**
* Service descriptor.
* @interface IService
* @extends INamespace
* @property {Object.<string,IMethod>} methods Method descriptors
*/
/**
* Constructs a service from a service descriptor.
* @param {string} name Service name
* @param {IService} json Service descriptor
* @returns {Service} Created service
* @throws {TypeError} If arguments are invalid
*/
Service.fromJSON = function fromJSON(name, json) {
var service = new Service(name, json.options);
/* istanbul ignore else */
if (json.methods)
for (var names = Object.keys(json.methods), i = 0; i < names.length; ++i)
service.add(Method.fromJSON(names[i], json.methods[names[i]]));
if (json.nested)
service.addJSON(json.nested);
return service;
};
/**
* Converts this service to a service descriptor.
* @returns {IService} Service descriptor
*/
Service.prototype.toJSON = function toJSON() {
var inherited = Namespace.prototype.toJSON.call(this);
return util.toObject([
"options" , inherited && inherited.options || undefined,
"methods" , Namespace.arrayToJSON(this.methodsArray) || /* istanbul ignore next */ {},
"nested" , inherited && inherited.nested || undefined
]);
};
/**
* Methods of this service as an array for iteration.
* @name Service#methodsArray
* @type {Method[]}
* @readonly
*/
Object.defineProperty(Service.prototype, "methodsArray", {
get: function() {
return this._methodsArray || (this._methodsArray = util.toArray(this.methods));
}
});
function clearCache(service) {
service._methodsArray = null;
return service;
}
/**
* @override
*/
Service.prototype.get = function get(name) {
return this.methods[name]
|| Namespace.prototype.get.call(this, name);
};
/**
* @override
*/
Service.prototype.resolveAll = function resolveAll() {
var methods = this.methodsArray;
for (var i = 0; i < methods.length; ++i)
methods[i].resolve();
return Namespace.prototype.resolve.call(this);
};
/**
* @override
*/
Service.prototype.add = function add(object) {
/* istanbul ignore if */
if (this.get(object.name))
throw Error("duplicate name '" + object.name + "' in " + this);
if (object instanceof Method) {
this.methods[object.name] = object;
object.parent = this;
return clearCache(this);
}
return Namespace.prototype.add.call(this, object);
};
/**
* @override
*/
Service.prototype.remove = function remove(object) {
if (object instanceof Method) {
/* istanbul ignore if */
if (this.methods[object.name] !== object)
throw Error(object + " is not a member of " + this);
delete this.methods[object.name];
object.parent = null;
return clearCache(this);
}
return Namespace.prototype.remove.call(this, object);
};
/**
* Creates a runtime service using the specified rpc implementation.
* @param {RPCImpl} rpcImpl RPC implementation
* @param {boolean} [requestDelimited=false] Whether requests are length-delimited
* @param {boolean} [responseDelimited=false] Whether responses are length-delimited
* @returns {rpc.Service} RPC service. Useful where requests and/or responses are streamed.
*/
Service.prototype.create = function create(rpcImpl, requestDelimited, responseDelimited) {
var rpcService = new rpc.Service(rpcImpl, requestDelimited, responseDelimited);
for (var i = 0, method; i < /* initializes */ this.methodsArray.length; ++i) {
rpcService[util.lcFirst((method = this._methodsArray[i]).resolve().name)] = util.codegen(["r","c"], util.lcFirst(method.name))("return this.rpcCall(m,q,s,r,c)")({
m: method,
q: method.resolvedRequestType.ctor,
s: method.resolvedResponseType.ctor
});
}
return rpcService;
};
|
for expdir in $@ ; do
pair=$( basename $expdir )
dev=/mnt/data/bpop/sigmorphon/2019/task1/$pair/*dev
echo $pair
model=$( python best_config.py $expdir )
echo $model
rawout=$expdir/dev.pred
final=$rawout.out
python translate.py -model $model -corpora $dev -beam_size 5 -output $rawout -gpu 0 -n_best 2 -attn_path $expdir/attn.pt -probs_path $expdir/probs.pt
python pred2sigmorphon.py $dev $rawout > $final
python /mnt/data/bpop/sigmorphon/2019/evaluation/evaluate_2019_task1.py --reference $dev --output $final
done
|
const xml2js = require('xml2js-parser').parseStringSync;
const crypto = require('crypto');
const Base = require('../base.js');
module.exports = class extends Base {
constructor(logger, config = undefined) {
super(logger, config);
this._replier = null;
}
set replier(replier) {
this._replier = replier;
}
async handle(request, response) {
try {
request.rawBody = '';
request.setEncoding('utf8');
request.on('data', (chunk) => { request.rawBody += chunk;});
let incoming = await (new Promise( (resolve, reject)=>{
request.on('end', () => {resolve(request.rawBody);});
}));
this.logger.package(`wechatRefundCallback:${incoming}`);
let sourceJson = xml2js(incoming, { explicitArray : false, ignoreAttrs : true }).xml;
request.body = Object.assign(await this._decryptMsg(sourceJson.req_info), {appId: sourceJson.appid, mchId: sourceJson.mch_id});
if (this._replier != undefined) await this._replier(request);
//向微信回应success
response.send('<xml><return_code><![CDATA[SUCCESS]]></return_code><return_msg><![CDATA[OK]]></return_msg></xml>');
} catch (error) {
this.logger.error(`err in responseToWechat:${error.stack}`);
response.send('<xml><return_code><![CDATA[FAIL]]></return_code><return_msg><![CDATA[程序出错]]></return_msg></xml>');
}
}
async _decryptMsg(encryptText) {
let key = crypto.createHash('md5').update(this.config.payment.key).digest('hex').toLowerCase();
let aesDecipher = crypto.createDecipheriv('aes-256-ecb', key, "");
aesDecipher.setAutoPadding(true);
let aesDecipherChunks = aesDecipher.update(encryptText, 'base64', 'utf8');
aesDecipherChunks += aesDecipher.final('utf8');
let data = xml2js(aesDecipherChunks, { explicitArray : false, ignoreAttrs : true }).root;
let msg = {};
Object.keys(data).forEach(key => {
msg[
key.replace(/^[A-Z]{1}/, (c) => c.toLowerCase())
.replace(/\_[a-z]{1}/g, (c) => c.substr(1).toUpperCase())
] = data[key];
});
return msg;
}
} |
package openweathermap
import (
"encoding/json"
"fmt"
"net/url"
)
var apiURL = "http://api.openweathermap.org/data/2.5"
// getForecast sends a constructed request to the OpenWeatherMap API and decodes
// the json response
func getForecast(owm *OpenWeatherMap, url string) (*CurrentWeatherData, error) {
forecast := &CurrentWeatherData{}
resp, err := owm.client.Get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
err = CheckForErrors(resp)
if err != nil {
return nil, err
}
if err := json.NewDecoder(resp.Body).Decode(forecast); err != nil {
return nil, &JSONDecodingError{ErrString: "An error occured when decoding response to JSON"}
}
return forecast, nil
}
// GetCurrentByCityName retrieves the current weather data for the provided
// location
func (owm *OpenWeatherMap) GetCurrentByCityName(location string) (*CurrentWeatherData, error) {
if location == "" {
return nil, &IllegalArgumentError{ErrString: "Location cannot be an empty string"}
}
url := fmt.Sprintf("%s/weather?q=%s&units=%s&appid=%s", apiURL, url.QueryEscape(location), owm.unit, owm.key)
return getForecast(owm, url)
}
// GetCurrentByCoords retrieves the current weather data for the provided
// coordinates
func (owm *OpenWeatherMap) GetCurrentByCoords(coords *Coords) (*CurrentWeatherData, error) {
url := fmt.Sprintf("%s/weather?lat=%f&lon=%f&units=%s&appid=%s", apiURL, coords.Lat, coords.Lon, owm.unit, owm.key)
return getForecast(owm, url)
}
// GetCurrentByID retrieves the current weather data for the provided city id
func (owm *OpenWeatherMap) GetCurrentByID(id int) (*CurrentWeatherData, error) {
url := fmt.Sprintf("%s/weather?id=%d&units=%s&appid=%s", apiURL, id, owm.unit, owm.key)
return getForecast(owm, url)
}
// GetCurrentByZipCode retrieves the current weather data for the provided zip
// code
func (owm *OpenWeatherMap) GetCurrentByZipCode(zipCode int, countryCode string) (*CurrentWeatherData, error) {
url := fmt.Sprintf("%s/weather?zip=%d,%s&units=%s&appid=%s", apiURL, zipCode, countryCode, owm.unit, owm.key)
return getForecast(owm, url)
}
|
<filename>lib/currency_converter.rb
require "currency_converter/version"
module CurrencyConverter
class Money
@@rnd = 2
@@base_currency = ''
@@rates = {}
attr_accessor :amount
attr_accessor :currency
def initialize(amount, currency)
raise 'Amount argument required' if amount.nil?
raise 'Currency argument required' if currency.nil?
@amount = amount.to_f
@currency = currency.to_s
end
def self.conversion_rates(base_currency, rates)
@@base_currency = base_currency
@@rates = rates
end
def self.base_currency
@@base_currency
end
def self.rates
@@rates
end
def inspect
"%.2f " % @amount + @currency
end
def convert_to currency
if self.currency == currency
self
else
rate = calculate_rate( self.currency, currency)
converted_amount = (self.amount * rate).round(@@rnd)
self.class.new(converted_amount, currency)
end
end
def == money
check_type(money)
(self.amount).round(@@rnd) == (money.convert_to(self.currency).amount).round(@@rnd)
end
def > money
check_type(money)
(self.amount).round(@@rnd) > (money.convert_to(self.currency).amount).round(@@rnd)
end
def < money
check_type(money)
(self.amount).round(@@rnd) < (money.convert_to(self.currency).amount).round(@@rnd)
end
def + money
check_type(money)
self.class.new(
(self.amount + money.convert_to(self.currency).amount).round(@@rnd),
self.currency
)
end
def - money
check_type(money)
self.class.new(
(self.amount - money.convert_to(self.currency).amount).round(@@rnd),
self.currency
)
end
def * multiplier
self.class.new(
(self.amount * multiplier).round(@@rnd),
self.currency
)
end
def / divider
self.class.new(
(self.amount / divider).round(@@rnd),
self.currency
)
end
private
def calculate_rate input_currency, output_currency
if input_currency == @@base_currency
@@rates[output_currency]
elsif output_currency == @@base_currency
1/@@rates[input_currency]
else
@@rates[output_currency]/@@rates[input_currency]
end
end
def check_type money
raise 'Money instance expected' unless money.is_a?(CurrencyConverter::Money)
end
end
end
|
#!/bin/bash
bundle exec jekyll serve --drafts
|
export function add(value){
let values = localStorage.getItem('items');
if(values !== null)
values = JSON.parse(values);
else
values = [];
values.push({name: value});
localStorage.setItem('items', JSON.stringify(values));
return {
type: "SET_TODO",
payload: {name: value}
};
}
export function remove(value){
let values = localStorage.getItem('items');
if(values !== null)
values = JSON.parse(values);
else
values = [];
values = values.filter((element) => element.name !== value );
localStorage.setItem("items", JSON.stringify(values));
return {
type: "REMOVE_TODO",
payload: {name: value}
};
}
export function update(value, newName){
let values = localStorage.getItem('items');
if(values !== null)
values = JSON.parse(values);
else
values = [];
values = values.map( element => {
if(element.name === value)
element.name = newName;
return element;
});
localStorage.setItem("items", JSON.stringify(values));
return {
type: "UPDATE_TODO",
payload: {name: value, newName}
};
}
export function getAll(){
let values = localStorage.getItem('items');
if(values !== null)
values = JSON.parse(values);
else
values = [];
return {
type: "SET_ALL",
payload: values
};
} |
package errors
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("ResultNotExpected", func() {
It("behaves", func() {
Expect(IsMongoidError(ResultNotExpected{})).To(BeTrue())
Expect(IsMongoidError(&ResultNotExpected{})).To(BeTrue())
Expect(IsResultNotExpected(ResultNotExpected{})).To(BeTrue())
Expect(IsResultNotExpected(&ResultNotExpected{})).To(BeTrue())
Expect(IsResultNotExpected(ErrResultNotExpected)).To(BeTrue())
})
})
|
package malte0811.controlengineering.blockentity.bus;
import blusunrize.immersiveengineering.api.wires.ConnectionPoint;
import blusunrize.immersiveengineering.api.wires.LocalWireNetwork;
import blusunrize.immersiveengineering.api.wires.WireType;
import malte0811.controlengineering.blockentity.CEIICBlockEntity;
import malte0811.controlengineering.blocks.bus.BusInterfaceBlock;
import malte0811.controlengineering.bus.BusState;
import malte0811.controlengineering.bus.IBusConnector;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.phys.Vec3;
public class BusRelayBlockEntity extends CEIICBlockEntity implements IBusConnector {
public BusRelayBlockEntity(BlockEntityType<?> type, BlockPos pos, BlockState state) {
super(type, pos, state);
}
@Override
public void onBusUpdated(ConnectionPoint updatedPoint) {}
@Override
public BusState getEmittedState(ConnectionPoint checkedPoint) {
return BusState.EMPTY;
}
@Override
public LocalWireNetwork getLocalNet(int cpIndex) {
return super.getLocalNet(cpIndex);
}
@Override
public Vec3 getConnectionOffset(ConnectionPoint here, ConnectionPoint other, WireType type) {
return new Vec3(0.5, 0.5, 0.5)
.add(Vec3.atLowerCornerOf(getFacing().getNormal()).scale(1.5 / 16));
}
private Direction getFacing() {
return getBlockState().getValue(BusInterfaceBlock.FACING);
}
}
|
<filename>js_modules/greeting.js
/* ///////////////////////// LEGAL NOTICE ///////////////////////////////
This file is part of ZScripts,
a modular script framework for Pokemon Online server scripting.
Copyright (C) 2013 <NAME>, aka "ArchZombie" / "ArchZombie0x", <<EMAIL>>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
/////////////////////// END LEGAL NOTICE /////////////////////////////// */
({
require: ["com", "theme", "help_command", "uptime"]
,
loadModule: function ()
{
this.script.registerHandler("afterLogIn", this);
}
,
afterLogIn: function (src, msg, chan)
{
var os = sys.os(src);
this.com.broadcast("Hello " + sys.name(src) +"! Script uptime is " + this.uptime.uptime() + "!", this.theme.INFO );
this.com.message([src], "If you need help, type /help , also note it's \"/commands --all\" not \"/commands all\"", this.theme.INFO);
if (os === "linux")
{
this.com.broadcast(sys.name(src) +", I see you are a linux user! You are enlightened!");
}
}
});
|
<reponame>KorAP/Kustvakt<filename>core/src/main/java/de/ids_mannheim/korap/exceptions/WrappedException.java
package de.ids_mannheim.korap.exceptions;
import de.ids_mannheim.korap.auditing.AuditRecord;
import java.util.Arrays;
/**
* @author hanl
* @date 08/04/2015
*/
// should be a http exception that responds to a service point
// is the extension of the notauthorized exception!
public class WrappedException extends KustvaktException {
private WrappedException (Object userid, Integer status, String message,
String args, Exception rootCause) {
super(String.valueOf(userid), status, message, args, rootCause);
}
public WrappedException (Object userid, Integer status, String ... args) {
this(userid, status, "", Arrays.asList(args).toString(), null);
AuditRecord record = AuditRecord.serviceRecord(userid, status, args);
this.records.add(record);
}
public WrappedException (KustvaktException e, Integer status,
String ... args) {
this(e.getUserid(), e.getStatusCode(), e.getMessage(), e.getEntity(), e);
AuditRecord record = AuditRecord.serviceRecord(e.getUserid(), status,
args);
record.setField_1(e.string());
this.records.addAll(e.getRecords());
this.records.add(record);
}
}
|
/*
* admin.sql -- PostgreSQL commands for creating the RADIUS user.
*
* WARNING: You should change 'localhost' and 'radpass'
* to something else. Also update raddb/sql.conf
* with the new RADIUS password.
*
* WARNING: This example file is untested. Use at your own risk.
* Please send any bug fixes to the mailing list.
*
* $Id: 6b41aa1538b56713965e41d2b271a23c8e03bc68 $
*/
/*
* Create default administrator for RADIUS
*/
CREATE USER radius WITH PASSWORD '<PASSWORD>';
/* radius user needs ti clean tables in test env */
GRANT ALL ON ALL TABLES IN SCHEMA public TO radius;
GRANT SELECT, USAGE ON ALL SEQUENCES IN schema public TO radius;
|
#!/bin/bash
/usr/autodesk/mayaIO2017/bin/Render "$@"
exit $?
|
<reponame>tryconnect/react-native-kit
import { AppState, Platform, BackHandler, AppRegistry } from 'react-native';
import Container from '../Container/Container';
import Component from '../Components/Application';
import registerComponent from '../Helpers/registerComponent';
import fireAppCallbacks from '../Helpers/fireAppCallbacks';
import registerBaseBindings from '../Helpers/registerBaseBindings';
import registerCoreContainerAliases from '../Helpers/registerCoreContainerAliases';
import registerBaseServiceProviders from '../Helpers/registerBaseServiceProviders';
import registerConfiguredProviders from '../Helpers/registerConfiguredProviders';
import ServiceProvider from '../Support/ServiceProvider';
import StepProgress from '../Queue/StepProgress';
import AsyncBlockingQueue from '../Queue/AsyncBlockingQueue';
import abtractUniqueID from '../Utilities/abtractUniqueID';
class Application extends Container {
/**
* @todo Khởi tạo app
* @param {string} appName
* @param {object|function:object} configs
*/
constructor(appName, configs = {}) {
super();
Container.setInstance(this);
// mảng service provider
this._serviceProviders = new Map();
// mảng các service callback khi được khởi tạo thì khởi tạo các service khác
// được xoá khi không còn sử dụng
this._whenRegisterServiceProviders = new Map();
// mảng các callback khi đang khởi tạo app
// được xoá sau khi boot xong
this._bootingCallbacks = [];
// mảng các callback khi app đã khởi tạo xong
// được xoá sau khi boot xong
this._bootedCallbacks = [];
// cờ lưu trạng thái đã khởi tạo
this._isBooted = false;
// cờ lưu trạng thái đang khởi tạo
this._isBooting = false;
// queue đăng ký service provider
// được xoá sau khi boot xong
this._registerQueue = new AsyncBlockingQueue();
// queue đăng ký service provider sau khi các service khác đã đăng ký xong
// được xoá sau khi boot xong
this._deferRegisterQueue = new AsyncBlockingQueue();
// queue khởi động service provider
// được xoá sau khi boot xong
this._bootQueue = new AsyncBlockingQueue();
// queue các bước khởi động
this._bootProgess = new StepProgress();
// mảng callback khi khởi động app
this._startingCallbacks = [];
// mảng callback khi app chuyển sang background
this._shuttingCallbacks = [];
// bind các service mặc định
registerBaseBindings(this, configs);
// bind các alias mặc định
registerCoreContainerAliases(this, configs);
// đăng ký các service provider mặc định
registerBaseServiceProviders(this, configs);
// hàm đăng ký các service, alias từ config
this.registerConfiguredProviders = async () => {
delete this.registerConfiguredProviders;
return await registerConfiguredProviders(this, configs);
};
// register app xuống native
registerComponent(appName, () => Component);
this.appState = AppState.currentState;
AppState.addEventListener("change", (state) => {
if(state == "background") {
this._shuttingCallbacks.map((callback) => {
return callback("APP_STATE");
});
} else if (state == "active") {
this._startingCallbacks.map((callback) => {
return callback("APP_STATE");
});
}
this.appState = state;
});
}
/**
* @todo Hàm lấy queue khởi động
*/
getBootProgess() {
return this._bootProgess;
}
/**
* @todo Hàm đăng ký service provider
* @param {class|object:ServiceProvider} provider
* @param {boolean} force
*/
register(provider, force = false) {
let provideID = abtractUniqueID(provider);
// hàm kiểm tra và khởi tạo service provider
const getProvider = (provide) => {
let provideID = abtractUniqueID(provide);
// kiểm tra service provider đã đăng ký chưa
// nếu đã đăng ký và không có ghi đè
if (this._serviceProviders && this._serviceProviders.has(provideID) && !force) {
// trả về service đã đăng ký
return this._serviceProviders.get(provideID);
}
let instance = provide;
// nếu service chưa khởi tạo thì khởi tạo
if (typeof provide === "function") {
instance = this.make(provide);
}
if (!(instance instanceof ServiceProvider)) {
throw new Error("service provider is not support");
}
// check các service khởi tạo theo service hiện tại
if (this._whenRegisterServiceProviders && this._whenRegisterServiceProviders.has(provideID)) {
const {
register: registerCallback,
boot: bootCallback
} = this._whenRegisterServiceProviders.get(provideID);
this._whenRegisterServiceProviders.delete(provideID);
if (!this._whenRegisterServiceProviders.size) {
delete this._whenRegisterServiceProviders;
}
const register = instance.register.bind(instance);
const boot = instance.boot.bind(instance);
instance.register = async () => {
await Promise.resolve(register());
await Promise.resolve(registerCallback());
};
instance.boot = async () => {
await Promise.resolve(boot());
await Promise.resolve(bootCallback());
};
}
return instance;
};
let instance = getProvider(provider);
// lấy các service đăng ký trong hàm provides
let provides = (instance.provides && instance.provides()) || [];
if (provides.length) {
// khởi tạo service
provides = provides.map(getProvider);
}
// hàm gọi register của service provider
const register = async () => {
await Promise.resolve(instance.register());
if (provides.length) {
for (let index = 0; index < provides.length; index++) {
let provide = provides[index];
if (provide && provide.register) {
await Promise.resolve(provide.register());
}
}
}
};
// hàm gọi boot của service provider
const boot = async () => {
await Promise.resolve(instance.boot());
if (provides.length) {
for (let index = 0; index < provides.length; index++) {
let provide = provides[index];
if (provide && provide.boot) {
await Promise.resolve(provide.boot());
}
}
}
};
// lấy danh sách các service khi đăng ký các service này thì mới đăng ký
let when = (instance.when && instance.when()) || [];
if (when.length) {
when.forEach((provide) => {
let provideID = abtractUniqueID(provide);
bootCallback = registerCallback = () => Promise.resolve();
if (!this._whenRegisterServiceProviders) {
this._whenRegisterServiceProviders = new Map();
}
if(this._whenRegisterServiceProviders.has(provideID)) {
let {
register: registerCallback,
boot: bootCallback
} = this._whenRegisterServiceProviders.get(provideID);
}
this._whenRegisterServiceProviders.set(provideID, {
register: async () => {
await registerCallback();
await register();
},
boot: async () => {
await bootCallback();
await boot();
}
});
});
return instance;
}
// nếu app đã boot
if (this.isBooted()) {
(async () => {
try {
await register();
await boot();
} catch (error) {
if (error && !error.message) {
error = new Error(error);
}
if (!error.message) {
error.message = "Register Provider error";
}
error.code = error.code || 5;
this.make("events").emit("app.js.exception", {
error,
isFatal: true
});
throw error;
}
})();
return instance;
}
if (this.isBooting()) {
// nếu app đang boot
(async () => {
try {
await register();
if (this.isBooted()) {
await boot();
return;
}
this.booted(boot);
} catch (error) {
if (error && !error.message) {
error = new Error(error);
}
if (!error.message) {
error.message = "Register Provider error";
}
error.code = error.code || 5;
error.isFatal = true;
this.make("events").emit("app.js.exception", {
error,
isFatal: true
});
throw error;
}
})();
return instance;
}
// đăng ký service
this._serviceProviders.set(provideID, instance);
// nếu service cần load sau
if( instance.isDeferred() ) {
// đăng ký vào hằng đợi load sau
this._deferRegisterQueue.enqueue( instance );
// đăng ký các service trong hàm provides
provides.forEach((provide) => {
this._deferRegisterQueue.enqueue(provide);
});
} else { // nếu service cần load ngay
// đăng ký vào hằng đợi load
this._registerQueue.enqueue( instance );
// đăng ký các service trong hàm provides
provides.forEach((provide) => {
this._registerQueue.enqueue(provide);
});
}
return instance;
}
/**
* @todo hàm boot service provider
*/
async boot() {
// nếu đã boot
if( this.isBooted() || this.isBooting() ) {
return await Promise.resolve();
}
this._isBooting = true;
// gọi callback booting
fireAppCallbacks(this, this._bootingCallbacks);
delete this._bootingCallbacks;
// hàm thực thi hằng đợi boot
let runBootQueue = async () => {
if (!this._bootQueue) {
return await Promise.resolve();
}
// số phần tử trong hằng đợi
let length = this._bootQueue.length;
if (!length) {
return await Promise.resolve();
}
for (let index = 0; index < length; index++) {
// lấy service provider từ hằng đợi
let provider = await this._bootQueue.dequeue();
// boot
if(provider.boot) {
this._bootProgess.createStep("app.provider.boot", Promise.resolve(provider), provider);
await Promise.resolve(provider.boot());
}
}
};
// thực thi hằng đợi boot
await runBootQueue();
delete this._serviceProviders;
delete this._bootQueue;
if(!this._whenRegisterServiceProviders.size) {
delete this._whenRegisterServiceProviders;
}
this._isBooting = false;
this._isBooted = true;
// gọi callback booted
fireAppCallbacks(this, this._bootedCallbacks);
delete this._bootedCallbacks;
}
/**
* @todo hàm đăng ký callback bắt đầu boot
* @param {function} callback
*/
booting(callback) {
if(typeof callback !== "function") {
throw new Error("Booting callback is not support");
}
if(this.isBooted()) {
return;
}
if (this.isBooting()) {
fireAppCallbacks(this, [callback]);
return;
}
this._bootingCallbacks && this._bootingCallbacks.push(callback);
return {
remove: () => {
if (this._bootingCallbacks) {
let indexOf = this._bootingCallbacks.indexOf(callback);
if (indexOf !== -1) {
this._bootingCallbacks.splice(indexOf, 1);
}
}
}
};
}
/**
* @todo hàm đăng ký callback đã boot xong
* @param {function} callback
*/
booted(callback) {
if (typeof callback !== "function") {
throw new Error("Booted callback is not support");
}
if (this.isBooted()) {
fireAppCallbacks(this, [callback]);
return;
}
this._bootedCallbacks && this._bootedCallbacks.push(callback);
return {
remove: () => {
if (this._bootedCallbacks) {
let indexOf = this._bootedCallbacks.indexOf(callback);
if (indexOf !== -1) {
this._bootedCallbacks.splice(indexOf, 1);
}
}
}
};
}
/**
* @todo hàm thêm callback khi app chuyển sang background hoặc tắt
* @param {function} callback
*/
shutting(callback) {
if (typeof callback !== "function") {
throw new Error("shutting callback is not support");
}
this._shuttingCallbacks.push(callback);
return {
remove: () => {
if (this._shuttingCallbacks) {
let indexOf = this._shuttingCallbacks.indexOf(callback);
if (indexOf !== -1) {
this._shuttingCallbacks.splice(indexOf, 1);
}
}
}
};
}
/**
* @todo Hàm thêm callback khi app khởi động
* @param {function} callback
*/
starting(callback) {
if (typeof callback !== "function") {
throw new Error("shutting callback is not support");
}
this._startingCallbacks.push(callback);
return {
remove: () => {
if (this._startingCallbacks) {
let indexOf = this._startingCallbacks.indexOf(callback);
if (indexOf !== -1) {
this._startingCallbacks.splice(indexOf, 1);
}
}
}
};
}
/**
* @todo hàm kiểm tra app đã boot
* @return boolean
*/
isBooted() {
return this._isBooted;
}
/**
* @todo hàm kiểm tra app đang boot
* @return boolean
*/
isBooting() {
return this._isBooting;
}
/**
* @todo hàm thực thi hằng đợi đăng ký service provider cần load sau
*/
loadDeferredProviders = async () => {
delete this.loadDeferredProviders;
if ( !this._deferRegisterQueue ) {
return await Promise.resolve();
}
// số phần tử trong hằng đợi
let length = this._deferRegisterQueue.length;
if (!length) {
delete this._deferRegisterQueue;
return await Promise.resolve();
}
// hàm thực thi hằng đợi đăng ký service provider
const runRegisterQueue = async () => {
if (!this._deferRegisterQueue) {
return await Promise.resolve();
}
let length = this._deferRegisterQueue.length;
if (!length) {
return await Promise.resolve();
}
for (let index = 0; index < length; index++) {
// lấy service provider từ hằng đợi
let provider = await this._deferRegisterQueue.dequeue();
if (provider.register) {
await Promise.resolve(provider.register());
}
if (provider.boot) {
// nếu app đang boot
if (this.isBooting()) {
// đợi app boot xong thì gọi hàm boot
await new Promise((resolve) => {
this.booted(async () => {
// đợi hàm boot
await Promise.resolve(provider.boot());
resolve();
});
});
} else if (this.isBooted()) { // nếu app đã boot
// gọi hàm boot
await Promise.resolve(provider.boot());
} else { // nếu app chưa boot
// đăng ký hằng đợi boot
this._bootQueue.enqueue(provider);
}
}
}
if (this._deferRegisterQueue.length) {
await runRegisterQueue();
}
};
// thực thi hằng đợi
await runRegisterQueue();
delete this._deferRegisterQueue;
};
/**
* @todo Hàm bootstrap
* @param {array:Bootstrap} bootstrappers
*/
bootstrapWith = async (bootstrappers = []) => {
this._bootProgess.excute();
this._bootProgess.createStep("app.starting", Promise.resolve(), this);
try {
delete this.bootstrapWith;
// hàm thực thi hằng đợi đăng ký service provider
const runRegisterQueue = async () => {
if (!this._registerQueue) {
return await Promise.resolve();
}
let length = this._registerQueue.length;
if (!length) {
return await Promise.resolve();
}
for (let index = 0; index < length; index++) {
// lấy service provider từ hằng đợi
let provider = await this._registerQueue.dequeue();
if(provider.register) {
this._bootProgess.createStep("app.provider.register", Promise.resolve(provider), provider);
await Promise.resolve(provider.register());
}
if (provider.boot) {
// nếu app đang boot
if (this.isBooting()) {
// đợi app boot xong thì gọi hàm boot
await new Promise((resolve) => {
this.booted(async () => {
this._bootProgess.createStep("app.provider.boot", Promise.resolve(provider), provider);
// đợi hàm boot
await Promise.resolve(provider.boot());
resolve();
});
});
} else if (this.isBooted()) { // nếu app đã boot
this._bootProgess.createStep("app.provider.boot", Promise.resolve(provider), provider);
// gọi hàm boot
await Promise.resolve(provider.boot());
} else { // nếu app chưa boot
// đăng ký hằng đợi boot
this._bootQueue.enqueue(provider);
}
}
}
// giải quyết register trong register
if (this._registerQueue.length) {
await runRegisterQueue();
}
};
this._bootProgess.createStep("app.bootstrapping", Promise.resolve(bootstrappers), bootstrappers);
for (let key in bootstrappers) {
if (bootstrappers.hasOwnProperty(key)) {
let bootstrapper = bootstrappers[key];
// khởi tạo bootstrap
bootstrapper = this.make(bootstrapper);
this._bootProgess.createStep("app.bootstrap", Promise.resolve(bootstrapper), bootstrapper);
// đợi bootstrapped
await Promise.resolve(bootstrapper.bootstrap(this));
// thực thi hằng đợi đăng ký service provider
await runRegisterQueue();
}
}
// thực thi hằng đợi đăng ký service provider sau khi đã bootstrap xong
await runRegisterQueue();
this._bootProgess.createStep("app.bootstrapped", Promise.resolve(bootstrappers), bootstrappers);
this._bootProgess.createStep("app.started", Promise.resolve(this), this);
// xoá hằng đợi
delete this._registerQueue;
} catch (error) {
if (error && !error.message) {
error = new Error(error);
}
if (!error.message) {
error.message = "Bootstrap error";
}
error.code = error.code || 3;
error.isFatal = true;
this.make("events").emit("app.js.exception", {
error,
isFatal: true
});
throw error;
}
};
/**
* @todo hàm reload app
* @param {boolean} force
*/
reload(force = true) {
this.make("events").emit("app.reload", {force});
}
/**
* @todo Hàm tắt app
* @param {boolean} force
*/
exit(force = false) {
try {
let promises = this._shuttingCallbacks.map((callback) => {
return callback("EXIT", force);
});
const exit = () => {
Platform.OS == "android" && BackHandler.exitApp();
// alert("Please restart application");
// close();
};
if (force) {
exit();
return Promise.resolve();
}
return Promise.all(promises)
.then(exit)
.catch(exit)
;
} catch (error) {
if (error && !error.message) {
error = new Error(error);
}
if (!error.message) {
error.message = "Exit error";
}
error.code = error.code || 4;
error.isFatal = true;
this.make("events").emit("app.js.exception", {
error,
isFatal: true
});
}
}
/**
* @todo Hàm đăng ký background js
* @param {string} name
* @param {function: Provider} task
*/
registerHeadlessTask(name, task) {
if(typeof task !== "function") {
throw new Error("Headless js task is not support");
}
AppRegistry.registerHeadlessTask(name, task);
}
ignoreWarnings(warnings) {
if (__DEV__) {
const YellowBox = require('react-native').YellowBox;
YellowBox.ignoreWarnings(warnings);
}
}
}
export default Application; |
<gh_stars>0
"""
=========================
Tornado Analyses
=========================
This sample script illustrates how to retrieve and analyze the Storm Prediction Center (SPC) tornado database (1950-present), using both the tornado and tracks modules.
For documentation generation purposes, return_ax must be set True for plotting functions. You don't need to have this extra argument in every plotting function call (e.g., "storm.plot(return_ax=True)" will produce the same output as "storm.plot()").
"""
import tropycal.tracks as tracks
import tropycal.tornado as tornado
import datetime as dt
###########################################
# Using TornadoDataset
# --------------------
# Let's start by creating an instance of a TornadoDataset object. By default, this reads in the SPC tornado database from their website.
tor_data = tornado.TornadoDataset()
###########################################
# We can use a TornadoDataset object to analyze both tornadoes associated with tropical cyclones and non-TC tornadoes. As an example of the latter, we can make a plot of all tornadoes during the 27 April 2011 tornado outbreak, along with the Practically Perfect Forecast (PPF) in filled contours:
tor_ax,zoom,leg_tor = tor_data.plot_tors(dt.datetime(2011,4,27),plotPPF=True,return_ax=True)
tor_ax
###########################################
# Using TrackDataset
# ------------------
# We can also use TornadoDataset to assess tornadoes associated with tropical cyclones. First off let's get an instance of TrackDataset for the North Atlantic HURDAT2 basin:
hurdat_atl = tracks.TrackDataset(basin='north_atlantic',source='hurdat',include_btk=False)
###########################################
# This instance of Storm contains several methods that return the storm data back in different data types. The following examples will show how to retrieve 3 different data types.
#
# Now we want to attribute tornadoes from the SPC database to all tropical cyclones which produced tornadoes. We do so using the ``assign_storm_tornadoes()`` method of TrackDataset. The main input parameter is "dist_thresh", which controls the distance from the tropical cyclone center over which to attribute tornadoes to. For this example we'll use 750 kilometers as the threshold.
#
# This code block will take a while to run, as it will iterate over every storm in HURDAT2 and match tornadoes to those that produced them.
hurdat_atl.assign_storm_tornadoes(dist_thresh=750)
###########################################
# Once the above block is done running, we can now look at a climatology of tornadoes associated with North Atlantic tropical cyclones. The current method of analysis is via the ``plot_TCtors_rotated()`` method, which rotates tropical cyclones to a storm motion relative framework.
#
# Most tornadoes associated with tropical cyclones occur in the front right quadrant (i.e., forward and right of the storm track). We can visualize this by plotting all tornadoes associated with tropical cyclones in a motion relative framework:
hurdat_atl.plot_TCtors_rotated('all',return_ax=True)
###########################################
# We can also make the same plot for a composite subset of tropical cyclones, given either their IDs (e.g., "AL052004"), or a storm tuple. For instance, let's composite the four hurricanes that made landfall in Florida in 2004:
hurdat_atl.plot_TCtors_rotated(storms=[('charley',2004),('frances',2004),('ivan',2004),('jeanne',2004)],return_ax=True)
###########################################
# Using a Storm object
# --------------------
#
# Tropical cyclone tornado analyses can also be done via a Storm object. Let's get the data for <NAME> from 2004, which produced a major tornado outbreak:
#
# .. warning::
#
# If you retrieve an instance of a Storm object without first running ``TrackDataset.assign_storm_tornadoes()`` method, doing tornado analyses with a Storm object will require re-downloading the tornado database for each new instance of Storm. If you plan to analyze multiple storms with tornadoes, it is recommended to run ``assign_storm_tornadoes()`` first.
storm = hurdat_atl.get_storm(('ivan',2004))
###########################################
# Let's plot all the tornado tracks, and daily PPF, associated with Hurricane Ivan:
storm.plot_tors(plotPPF=True,return_ax=True)
###########################################
# Let's make a plot of the tornadoes in storm motion relative coordinates:
storm.plot_TCtors_rotated(return_ax=True)
|
<reponame>pch6828/AnimateJS<gh_stars>0
const canvas = document.getElementById('content_canvas');
export class Typo_Cylinder{
constructor(){
canvas.style.backgroundColor = '#333333';
this.Dsize = 270;
this.strsize = 100;
this.start = this.strsize/4;
this.size1 = this.start;
this.size2 = this.strsize+this.start;
}
resize(stageWidth, stageHeight){
this.width = stageWidth;
this.height = stageHeight;
this.centerx = stageWidth/2;
this.centery = stageHeight/2;
this.Dsize = Math.min(this.height/3, this.width/3);
this.strsize = this.Dsize/3;
this.start = this.strsize/4;
this.size1 = this.start;
this.size2 = this.strsize+this.start;
}
animate(ctx, moveX, moveY, isDown){
ctx.globalCompositeOperation='source-over';
let textwidth = 0;
let Dwidth = 0;
let movement = moveY/this.height;
this.size1 += movement*500;
this.size2 -= movement*500;
if(this.size1>this.strsize+this.start){
this.size1 = this.strsize+this.start;
this.size2 = this.start;
}else if(this.size1<this.start){
this.size1 = this.start;
this.size2 = this.strsize+this.start;
}
ctx.font=this.Dsize+'px consolas';
Dwidth = ctx.measureText('D').width;
textwidth+=Dwidth;
ctx.font=this.strsize+'px consolas';
textwidth+=ctx.measureText('eveloper').width;
ctx.globalCompositeOperation='source-over'
ctx.fillStyle = "rgba("+(this.size2-this.start)/(this.strsize)*150+","+(this.size1-this.start)/(this.strsize-this.start)*150+",0,1)";
ctx.font=this.Dsize+'px consolas';
ctx.fillText('D', this.centerx-textwidth/2, this.centery+this.strsize);
ctx.fillStyle = "rgba(0,150,0,0.7)";
ctx.font=this.strsize+'px consolas';
ctx.fillText('eveloper', this.centerx-textwidth/2+Dwidth, this.centery+this.strsize);
ctx.fillStyle = "#000"
ctx.globalCompositeOperation='destination-out';
ctx.fillRect(this.centerx-textwidth/2+Dwidth, this.centery, textwidth, this.size2);
ctx.globalCompositeOperation='source-over'
ctx.fillStyle = "rgba(150,0,0,0.7)";
ctx.fillText('esigner', this.centerx-textwidth/2+Dwidth, this.centery);
ctx.fillStyle = "#000"
ctx.globalCompositeOperation='destination-out';
ctx.fillRect(this.centerx-textwidth/2+Dwidth, this.centery-this.strsize, textwidth, this.size1);
ctx.globalCompositeOperation = 'source-over'
ctx.fillStyle = "rgba(255,255,255,0.3)";
ctx.font=this.Dsize+'px consolas';
ctx.fillText('D', this.centerx-textwidth/2, this.centery+this.strsize);
ctx.font=this.strsize+'px consolas';
ctx.fillText('esigner', this.centerx-textwidth/2+Dwidth, this.centery);
ctx.fillText('eveloper', this.centerx-textwidth/2+Dwidth, this.centery+this.strsize);
}
} |
TERMUX_PKG_HOMEPAGE=https://st.suckless.org/
TERMUX_PKG_DESCRIPTION="A simple virtual terminal emulator for X"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Tristan Ross <spaceboyross@yandex.com>"
TERMUX_PKG_VERSION=0.8.4
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL="http://dl.suckless.org/st/st-$TERMUX_PKG_VERSION.tar.gz"
TERMUX_PKG_SHA256=d42d3ceceb4d6a65e32e90a5336e3d446db612c3fbd9ebc1780bc6c9a03346a6
# FIXME: config.h specified a Liberation Mono font which is not available in Termux.
# Needs a patch for ttf-dejavu font package or liberation font package should be added.
TERMUX_PKG_DEPENDS="libxft, libxext"
TERMUX_PKG_BUILD_IN_SRC=true
TERMUX_PKG_EXTRA_MAKE_ARGS="TERMINFO=$TERMUX_PREFIX/share/terminfo"
TERMUX_PKG_RM_AFTER_INSTALL="share/terminfo"
termux_step_configure() {
cp "$TERMUX_PKG_BUILDER_DIR/config.h" "config.h"
}
|
from __future__ import division
import codecs
import json
import nltk
from nltk import sent_tokenize
from nltk import word_tokenize
# In[3]:
nltk.download()
with codecs.open("twitter.txt", encoding='utf-8') as f:
# Your code here
raw = f.read()
sents = nltk.sent_tokenize(raw)
import csv
#question 1
#problems: contain "" in the output and can not make each sentence in one cell
with codecs.open("twitter.txt", encoding='utf-8') as f:
# Your code here
raw = f.read()
sents = nltk.sent_tokenize(raw)
with codecs.open("output1.csv", 'w',) as p:
writer = csv.writer(p, delimiter=' ')
for i in xrange(len(sents)):
sents[i] = sents[i].replace('\n',' ')
words = word_tokenize(sents[i])
num_char = 0 #to compute all number of charaters in all words
for j in words:
num_char += len(j)
ave_word_size = num_char/len(words)
row = u'%s'%i + u'þ' + sents[i] + u'þ' + u'%s'%ave_word_size
row = row.encode('utf-8') #[s.encode('utf-8') for s in row]
row = ''.join(row)
writer.writerows([row])
#question 2
with codecs.open("twitter.txt", encoding='utf-8') as f:
# Your code here
raw = f.read()
sents = nltk.sent_tokenize(raw)
p = open("output2.xml", 'w')
p.write('<document>\n')
p.write(' '+'<sentences>\n')
for i in xrange(len(sents)):
p.write(' '+'<sentence id=\"{}\">\n'.format(i))
sents[i] = sents[i].replace('\n',' ')
row = [s.encode('utf-8') for s in sents[i]]
row = ''.join(row)
p.write(' '+'<text>{}</text>\n'.format(row))
words = word_tokenize(sents[i])
num_char = 0 #to compute all number of charaters in all words
for j in words:
num_char += len(j)
ave_word_size = num_char/len(words)
p.write(' '+'<avg>{}</avg>\n'.format(float(ave_word_size)))
p.write(' '+'</sentence>\n')
p.write(' '+'</sentences>\n')
p.write('</document>\n')
p.close()
import xmltodict, json
#question 3
import xml.etree.ElementTree as ET
tree = ET.parse('output2.xml')
root = tree.getroot()
sentences = root[0]
with codecs.open('data.json', 'w', encoding='utf-8') as f:
feeds = {"documents":{"sentences":[]}}
for i in xrange(len(sentences)):
entry = {'avg':sentences[i][1].text, 'id':sentences[i].attrib['id'], 'text':sentences[i][0].text}
feeds["documents"]["sentences"].append(entry)
json.dump(feeds,f)
|
<filename>__tests__/testCases/bugReport3Input.ts
import { E2eTestUserInput } from '../../src/types/E2eTestUserInput'
export const bugReport3Input: E2eTestUserInput = {
t1r10_prijmy: '45000',
priloha3_r11_socialne: '1000',
priloha3_r13_zdravotne: '1000',
r122: '100',
r031_priezvisko_a_meno: 'anon',
r031_rodne_cislo: 'anon',
r032_uplatnujem_na_partnera: false,
partner_step: 0,
partner_podmienky: {},
r032_partner_vlastne_prijmy: '',
r032_partner_pocet_mesiacov: '',
r001_dic: 'anon',
r003_nace: 'anon',
meno_priezvisko: 'anon',
r005_meno: 'anon',
r004_priezvisko: 'anon',
r006_titul: '',
r007_ulica: 'anon',
r008_cislo: 'anon',
r009_psc: 'anon',
r010_obec: 'anon',
r011_stat: 'anon',
employed: true,
r038: '2000',
r039_socialne: '150',
r039_zdravotne: '50',
r108: '0',
r120: '10',
hasChildren: true,
children: [
{
id: 1,
priezviskoMeno: 'anon',
rodneCislo: '9609226286',
kupelnaStarostlivost: true,
wholeYear: true,
monthFrom: '0',
monthTo: '11',
},
{
id: 2,
priezviskoMeno: 'anon',
rodneCislo: '9510133853',
kupelnaStarostlivost: true,
wholeYear: true,
monthFrom: '0',
monthTo: '11',
},
],
platil_prispevky_na_dochodok: true,
r075_zaplatene_prispevky_na_dochodok: '100',
r037_pocetMesiacov: '',
r037_zaplatene_uroky: '',
expectNgoDonationPage: true,
XIIoddiel_uplatnujem2percenta: false,
r142_ico: '',
r142_obchMeno: '',
r142_ulica: '',
r142_cislo: '',
r142_psc: '',
r142_obec: '',
XIIoddiel_suhlasZaslUdaje: false,
r033_partner_kupele: true,
r033_partner_kupele_uhrady: '10',
kupele: true,
r036_deti_kupele: '20',
r076a_kupele_danovnik: '10',
childrenInSpa: true,
danovnikInSpa: true,
iban: 'anon',
datum: '',
email: 'anon',
...{
r001_dic: '233123123',
r003_nace: '62010 - Počítačové programovanie',
r007_ulica: 'Mierova',
r008_cislo: '4',
r009_psc: '82105',
r010_obec: 'Bratislava 3',
r011_stat: 'Slovensko',
datum: '22.02.2020',
r031_rodne_cislo: '9609226286',
meno_priezvisko: '<NAME>',
r004_priezvisko: 'anon',
r005_meno: 'anon',
},
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
export DOCKER_NAME_TAG="ubuntu:20.04"
export CONTAINER_NAME=ci_native_fuzz_valgrind
export PACKAGES="clang llvm python3 libevent-dev bsdmainutils libboost-system-dev libboost-filesystem-dev libboost-test-dev libboost-thread-dev valgrind"
export NO_DEPENDS=1
export RUN_UNIT_TESTS=false
export RUN_FUNCTIONAL_TESTS=false
export RUN_FUZZ_TESTS=true
export FUZZ_TESTS_CONFIG="--valgrind"
export GOAL="install"
export SATCOIN_CONFIG="--enable-fuzz --with-sanitizers=fuzzer CC=clang CXX=clang++"
|
require "test_helper.rb"
class TestString < MiniTest::Test
def test_is_int
assert !("four".is_int)
assert !("4.4".is_int)
assert !("4..4".is_int)
assert "4".is_int
end
def test_is_range
assert !("four".is_range)
assert !("4.4".is_range)
assert !("4".is_range)
assert !("4..".is_range)
assert !("4...".is_range)
assert !("4..8..15".is_range)
assert !("4...8...15".is_range)
assert "4..8".is_range
assert "4...8".is_range
end
def test_to_range
exception = assert_raises(TypeError) { "four".to_range }
assert_equal "Cannot convert four to range.", exception.message
exception = assert_raises(TypeError) { "4.4".to_range }
assert_equal "Cannot convert 4.4 to range.", exception.message
exception = assert_raises(TypeError) { "4".to_range }
assert_equal "Cannot convert 4 to range.", exception.message
exception = assert_raises(TypeError) { "4..".to_range }
assert_equal "Cannot convert 4.. to range.", exception.message
exception = assert_raises(TypeError) { "4...".to_range }
assert_equal "Cannot convert 4... to range.", exception.message
exception = assert_raises(TypeError) { "4..8..15".to_range }
assert_equal "Cannot convert 4..8..15 to range.", exception.message
exception = assert_raises(TypeError) { "4...8...15".to_range }
assert_equal "Cannot convert 4...8...15 to range.", exception.message
assert_equal "4..8".to_range, 4..8
assert_equal "4...8".to_range, 4...8
end
end |
<reponame>nmonvisualizer/nmonvisualizer
package com.ibm.nmon.data.definition;
import java.util.Collection;
import java.util.List;
import java.text.SimpleDateFormat;
import com.ibm.nmon.data.DataSet;
import com.ibm.nmon.data.DataType;
import com.ibm.nmon.data.matcher.HostMatcher;
import com.ibm.nmon.data.matcher.TypeMatcher;
import com.ibm.nmon.data.matcher.FieldMatcher;
import com.ibm.nmon.analysis.Statistic;
import com.ibm.nmon.util.TimeFormatCache;
/**
* Base class for programmatically defining a set of data. A definition can match any number of hosts (via
* {@link DataSet DataSets}), {@link DataType DataTypes}, or fields. This class also supports renaming these values as
* well. Finally, a {@link Statistic}, which defaults to <code>AVERAGE</code>, can be specified for clients that need
* aggregated data.
*/
public abstract class DataDefinition {
private final Statistic stat;
private SimpleDateFormat dateFormat = TimeFormatCache.DATETIME_FORMAT;
private final boolean useSecondaryYAxis;
public static DataDefinition ALL_DATA = new DataDefinition() {
public List<DataSet> getMatchingHosts(Collection<DataSet> toMatch) {
return HostMatcher.ALL.getMatchingHosts(toMatch);
};
public List<DataType> getMatchingTypes(DataSet data) {
return TypeMatcher.ALL.getMatchingTypes(data);
};
public List<String> getMatchingFields(DataType type) {
return FieldMatcher.ALL.getMatchingFields(type);
}
};
protected DataDefinition() {
this(null, false);
}
protected DataDefinition(Statistic stat, boolean useSecondaryYAxis) {
if (stat == null) {
stat = Statistic.AVERAGE;
}
this.stat = stat;
this.useSecondaryYAxis = useSecondaryYAxis;
}
public final Statistic getStatistic() {
return stat;
}
public final SimpleDateFormat getDateFormat() {
return dateFormat;
}
/**
* Set the date format for use when naming by date.
*
* @param dateFormat - the date format to use. Can include <code>HOST</code>, <code>TYPE</code> or
* <code>FIELD</code> to allow variable substitution of hostname, type or field, repsectively. Note that
* these values <em>must</em> be enclosed by single quotes to be parsable by {@link SimpleDateFormat} (e.g.
* <code>'HOST'</code> or <code>'TYPE-FIELD'</code>.
*
* @see NamingMode
*/
public final void setDateFormat(SimpleDateFormat dateFormat) {
if (dateFormat == null) {
this.dateFormat = TimeFormatCache.DATETIME_FORMAT;
}
else {
this.dateFormat = dateFormat;
}
}
public boolean usesSecondaryYAxis() {
return useSecondaryYAxis;
}
/**
* Does the definition match the given host?
*
* @return <code>true</code>; by default matches all hosts
*/
public boolean matchesHost(DataSet data) {
return HostMatcher.ALL.matchesHost(data);
}
/**
* Given a list of <code>DataSet</code>s, return a new list containing the ones that match this definition.
*/
public abstract List<DataSet> getMatchingHosts(Collection<DataSet> toMatch);
/**
* Given a list of <code>DataType</code>s, return a new list containing the ones that match this definition.
*/
public abstract List<DataType> getMatchingTypes(DataSet data);
/**
* Given a <code>DataType</code>, return a new list containing the fields that match this definition.
*/
public abstract List<String> getMatchingFields(DataType type);
/**
* Get a new hostname for the given <code>DataSet</code>.
*
* @return {@link DataSet#getHostname()} by default
*/
public String renameHost(DataSet data) {
return data.getHostname();
}
/**
* Get a new name for the given <code>DataType</code>.
*
* @return {@link DataType#toString()} by default
*/
public String renameType(DataType type) {
return type.toString();
}
/**
* Get a new name for the given field.
*
* @return the same field name by default
*/
public String renameField(String field) {
return field;
}
}
|
<reponame>fenderdigital/cpprestsdk<filename>Release/tests/functional/http/listener/stdafx.h
/***
* ==++==
*
* Copyright (c) Microsoft Corporation. All rights reserved.
*
* ==--==
* =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
*
* stdafx.h
*
* Pre-compiled headers
*
* =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
****/
#pragma once
#include "cpprest/asyncrt_utils.h"
#include "cpprest/filestream.h"
#include "cpprest/http_client.h"
#include "cpprest/http_listener.h"
#include "cpprest/producerconsumerstream.h"
#include "http_listener_tests.h"
#include "http_test_utilities.h"
#include "os_utilities.h"
#include "unittestpp.h"
#include <fstream>
|
<filename>TlsrSrc/tl_pvvx_ble_sdk/proj_lib/ble/service/hids.h
#pragma once
#include "../../../proj/tl_common.h"
/** @addtogroup TELINK_BLE_STACK TELINK BLE Stack
* @{
*/
/** @addtogroup SERVICE_MODULE Service
* @{
*/
/** @addtogroup HIDS_Module Hids
* @{
*/
/** @addtogroup Hids_Constant Hids Constants
* @{
*/
/**
* @brief Definition for Characteristics UUID
*/
/** @addtogroup hids_uuid Hids Charactersitc UUID
* @{
*/
#define CHARACTERISTIC_UUID_HID_BOOT_KEY_INPUT 0x2A22 //!< HID Boot Keyboard Input Report
#define CHARACTERISTIC_UUID_HID_BOOT_KEY_OUTPUT 0x2A32 //!< HID Boot Keyboard Output Report
#define CHARACTERISTIC_UUID_HID_BOOT_MOUSE_INPUT 0x2A33 //!< HID Boot Mouse Input Report
#define CHARACTERISTIC_UUID_HID_INFORMATION 0x2A4A //!< HID Information
#define CHARACTERISTIC_UUID_HID_REPORT_MAP 0x2A4B //!< HID Report Map
#define CHARACTERISTIC_UUID_HID_CONTROL_POINT 0x2A4C //!< HID Control Point
#define CHARACTERISTIC_UUID_HID_REPORT 0x2A4D //!< HID Report
#define CHARACTERISTIC_UUID_HID_PROTOCOL_MODE 0x2A4E //!< HID Protocol Mode
/** @} end of group hids_uuid */
/** @addtogroup HID_REPORT_ID Hid Report Id
* @{
*/
/**
* @brief HID Report ID
*/
#define HID_REPORT_ID_KEYBOARD_INPUT 1 //!< Keyboard input report ID
#define HID_REPORT_ID_CONSUME_CONTROL_INPUT 2 //!< Consumer Control input report ID
#define HID_REPORT_ID_MOUSE_INPUT 3 //!< Mouse input report ID
#define HID_REPORT_ID_GAMEPAD_INPUT 4 //!< Gamepad input report ID
#define HID_REPORT_ID_LED_OUT 0 //!< LED output report ID
#define HID_REPORT_ID_FEATURE 0 //!< Feature report ID
/** @} end of group HID_REPORT_ID */
/** @addtogroup HID_REPORT_TYPE Hid Report Type
* @{
*/
/**
* @brief HID Report type
*/
#define HID_REPORT_TYPE_INPUT 1
#define HID_REPORT_TYPE_OUTPUT 2
#define HID_REPORT_TYPE_FEATURE 3
/** @} end of group HID_REPORT_TYPE */
/** @addtogroup HID_PROTOCOL_MODE Hid Protocol Mode
* @{
*/
/**
* @brief Definition for HID protocol mode
*/
#define HID_PROTOCOL_MODE_BOOT 0
#define HID_PROTOCOL_MODE_REPORT 1
#define DFLT_HID_PROTOCOL_MODE HID_PROTOCOL_MODE_REPORT
/** @} end of group HID_PROTOCOL_MODE */
/** @addtogroup HID_INFOR_FLAGS Hid Information Flags
* @{
*/
/**
* @brief Definition for HID information flags
*/
#define HID_FLAGS_REMOTE_WAKE 0x01 // RemoteWake
#define HID_FLAGS_NORMALLY_CONNECTABLE 0x02 // NormallyConnectable
/** @} end of group HID_INFOR_FLAGS */
#define HID_KEYCODE_CC_RELEASE 0x00
#define HID_KEYCODE_CC_VOL_UP 0x01
#define HID_KEYCODE_CC_VOL_DN 0x02
/** @} end of group Hids_Constant */
/** @addtogroup Hids_Callbacks Hids Callbacks
* @{
*/
/**
* @brief Definition argutment type of report change callback function
*/
typedef struct {
u8 id;
u8 type;
u8 len;
u8 value[1];
} reportChange_t;
/**
* @brief Definition argutment type of CCC change callback function
*/
typedef struct {
u8 id;
u8 type;
u8 value;
} clientCharCfgChange_t;
/**
* @brief Definition client characterist configuration of report changed callback function type for user application
*/
typedef void (*hids_clientCharCfgChangeCb_t)(clientCharCfgChange_t* cccVal);
/**
* @brief Definition report value changed callback function type for user application
*/
typedef void (*hids_reportChangeCb_t)(reportChange_t* reportVal);
/**
* @brief Definition report value changed callback function type for user application
*/
typedef void (*hids_protoModeChangeCb_t)(u32 protoMode);
/**
* @brief Definition report value changed callback function type for user application
*/
typedef void (*hids_ctrlPointChangeCb_t)(u32 ctrlPoint);
/**
* @brief Definition for foundation command callbacks.
*/
typedef struct {
hids_reportChangeCb_t reportChangeCbFunc; //!< report value changed callback function
hids_protoModeChangeCb_t protoModeChangeCbFunc; //!< protocol mode value changed callback function
hids_ctrlPointChangeCb_t ctrlPointChangeCbFunc; //!< control point value changed callback function
hids_clientCharCfgChangeCb_t cccChangedCbFunc; //!< ccc of report changed callback function
} hids_callbacks_t;
/** @} end of group Hids_Callbacks */
/** @addtogroup HIDS_Variables Hids Variables
* @{
*/
/**
* @brief External variable for HID service Attribute tables
*/
extern attribute_t hid_attrTbl[];
/**
* @brief External variable for HID service attribute size
*/
extern u16 hid_attrSize;
/**
* @brief External variable for HID information
*/
extern const u8 hidInformation[];
/** @} end of group HIDS_Variables */
/** @addtogroup HIDS_Functions Hids APIs
* @{
*/
/**
* @brief API to add HID service to gatt.
*
* @param[in] hidsCb - The callback function of HID service
*
* @return Status
*/
ble_sts_t hids_addService(hids_callbacks_t* hidsCb);
ble_sts_t hids_keyInReport(u16 connHandle, u8* value, u8 len);
ble_sts_t hids_consumerControlInReport(u16 connHandle, u8* value, u8 len);
/** @} end of group HIDS_Functions */
/** @} end of group HIDS_Module */
/** @} end of group SERVICE_MODULE */
/** @} end of group TELINK_BLE_STACK */
|
// Package export provided parsing and analysis methods for Apple Health Expored Data.
package export
import (
"encoding/xml"
"io/ioutil"
)
// Parse Apple HK exported XML fileName and returns pointer to HealthData, or error if any
func Parse(fileName string) (*HealthData, error) {
// Read XML file
health, err := ioutil.ReadFile(fileName)
if err != nil {
return nil, err
}
var data HealthData
if err = xml.Unmarshal(health, &data); err != nil {
return nil, err
}
return &data, nil
}
|
package cmd
/*
Copyright © 2021 strixeye <EMAIL>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import (
"bytes"
"context"
"io"
"io/ioutil"
"log"
"net"
"net/http"
"time"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/strixeyecom/gniffer/pkg/sniff"
)
const clientTimeout = 20
const MaxWorkers = 1e3 * 2
const MaxIdleConnectionsPerHost = MaxWorkers
// nolint: gochecknoglobals // because we want all workers to share the same client and using a struct is
// overkill.
var client = &http.Client{
Transport: &http.Transport{
MaxIdleConnsPerHost: MaxIdleConnectionsPerHost,
},
Timeout: time.Second * clientTimeout,
}
func worker(ctx context.Context, c chan *http.Request) {
for {
select {
case <-ctx.Done():
return
case req := <-c:
resp, err := client.Do(req)
if err != nil {
panic(err)
}
_, _ = io.Copy(ioutil.Discard, resp.Body)
err = resp.Body.Close()
if err != nil {
panic(err)
}
}
}
}
// proxyCmd represents the proxy command.
var proxyCmd = &cobra.Command{
Use: "proxy",
Short: "copy and redirect sniffed requests",
Long: `proxy command copies the sniffed request and sends to given target server,
without changing the host headers`,
RunE: func(cmd *cobra.Command, args []string) error {
var proxyCfg sniff.ProxyCfg
err := viper.Unmarshal(&proxyCfg)
if err != nil {
return err
}
sniffingCtx, cancelSniffing := context.WithCancel(context.Background())
defer cancelSniffing()
err = RunProxy(sniffingCtx, &proxyCfg)
if err != nil {
return errors.Wrap(err, "failed to add handler")
}
return nil
},
}
func RunProxy(ctx context.Context, proxyCfg *sniff.ProxyCfg) error {
if proxyCfg.TargetPort == "" {
return errors.New("target port is required")
}
sniffer := sniff.New(proxyCfg.Cfg)
requestChan := make(chan *http.Request)
for i := 0; i < MaxWorkers; i++ {
go worker(ctx, requestChan)
}
// add logging handler
err := sniffer.AddHandler(
func(ctx context.Context, req *http.Request) error {
return handlerFunc(ctx, req, proxyCfg, requestChan)
},
)
if err != nil {
return errors.Wrap(err, "failed to add handler")
}
log.Printf(
"proxying %s %s requests to %s://%s:%s", proxyCfg.Cfg.InterfaceName,
proxyCfg.HTTPFilter.Hostname, proxyCfg.TargetProtocol,
proxyCfg.TargetHost, proxyCfg.TargetPort,
)
if err := sniffer.Run(ctx); err != nil {
return errors.Wrap(err, "can not run sniffer")
}
return nil
}
func handlerFunc(
ctx context.Context, req *http.Request, proxyCfg *sniff.ProxyCfg, requestChan chan *http.Request,
) error {
if proxyCfg.HTTPFilter != nil {
if !proxyCfg.HTTPFilter.Match(req) {
return nil
}
}
dupReq := req.Clone(ctx)
// modify request so that it goes to the target server but still has the original headers
dupReq.URL.Scheme = proxyCfg.TargetProtocol
dupReq.URL.Host = proxyCfg.TargetHost + ":" + proxyCfg.TargetPort
// request uri is handled by the client library
dupReq.RequestURI = ""
// add original client information to x- headers while proxying
ip, port, err := net.SplitHostPort(req.RemoteAddr)
if proxyCfg.AppendXFF {
if err != nil {
return err
}
dupReq.Header.Add("X-Forwarded-For", ip)
dupReq.Header.Set("X-Forwarded-Port", port)
}
if proxyCfg.EnableOriginHeaders {
dupReq.Header.Set("Gniffer-Connecting-Ip", ip)
dupReq.Header.Set("Gniffer-Connecting-Port", port)
}
// should copy the body because the original request body will be emptied
body, err := ioutil.ReadAll(req.Body)
if err == nil {
dupReq.Body = ioutil.NopCloser(bytes.NewReader(body))
}
req.Header.Set("Connection", "close")
req.Close = true
requestChan <- dupReq
return nil
}
func init() {
sniffCmd.AddCommand(proxyCmd)
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
proxyCmd.PersistentFlags().Int("target-port", 80, "target location's port")
err := viper.BindPFlag("TARGET_PORT", proxyCmd.PersistentFlags().Lookup("target-port"))
if err != nil {
log.Fatal(err)
}
proxyCmd.PersistentFlags().Bool("append-xff", false, "append xff header to the request")
err = viper.BindPFlag("APPEND_XFF", proxyCmd.PersistentFlags().Lookup("append-xff"))
if err != nil {
log.Fatal(err)
}
proxyCmd.PersistentFlags().Bool(
"enable-origin-headers", true, "set gniffer-connecting-ip and gniffer-connecting-port headers",
)
err = viper.BindPFlag("ENABLE_ORIGIN_HEADERS", proxyCmd.PersistentFlags().Lookup("enable-origin-headers"))
if err != nil {
log.Fatal(err)
}
proxyCmd.PersistentFlags().String("target-host", "localhost", "target location's host")
err = viper.BindPFlag("TARGET_HOST", proxyCmd.PersistentFlags().Lookup("target-host"))
if err != nil {
log.Fatal(err)
}
proxyCmd.PersistentFlags().String("target-protocol", "http", "target location's protocol")
err = viper.BindPFlag("TARGET_PROTOCOL", proxyCmd.PersistentFlags().Lookup("target-protocol"))
if err != nil {
log.Fatal(err)
}
}
|
class BookManager:
def __init__(self):
self.editions = []
def addEdition(self, languages):
edition = Edition(languages)
self.editions.append(edition)
return edition
def _select(self):
preferred_edition = None
acceptable_editions = []
for edition in self.editions:
if 'lang:fi' in edition.languages:
preferred_edition = edition
else:
acceptable_editions.append(edition)
return preferred_edition, acceptable_editions
class Edition:
def __init__(self, languages):
self.languages = languages
# Create a BookManager instance
manager = BookManager()
# Add editions for different languages
english_edition = manager.addEdition(['lang:en'])
finnish_edition = manager.addEdition(['lang:fi'])
french_edition = manager.addEdition(['lang:fr'])
# Select preferred and acceptable editions based on language preferences
preferred_edition, acceptable_editions = manager._select()
print(preferred_edition) # Output: finnish_edition
print(acceptable_editions) # Output: [english_edition, french_edition] |
<reponame>srcole/fxml
"""Plot fxdata that was formatted in format_fxdata.py"""
import numpy as np
import matplotlib.pyplot as plt
import h5py
# User input
# Default output filepath and name in scrape_forexite.py
fipath = 'C:/gh/data/fx/format/'
finame = fipath + '2010_01.hdf5'
# Import data
with h5py.File(finame, 'r') as fi:
opens = fi['opens'][:]
dates_tstart = fi['dates_tstart'][:]
dates_tend = fi['dates_tend'][:]
dates = fi['dates'][:]
# Format dates to readable strings
dates = dates.astype(np.int)
dates_str = np.zeros(len(dates), dtype=object)
for d in range(len(dates)):
dates_str[d] = str(dates[d, 0]) + '-' + \
str(dates[d, 1]) + '-' + str(dates[d, 2])
# Plot EUR/USD
EURUSD_idx = 0
t = np.arange(dates_tend[-1])
plt.figure()
plt.plot(t, opens[EURUSD_idx, :])
plt.xticks(dates_tstart, dates_str)
plt.xlabel('Date')
plt.ylabel('EURUSD')
|
#!/bin/bash
# strict mode http://redsymbol.net/articles/unofficial-bash-strict-mode/
set -euo pipefail
IFS=$'\n\t'
function lint(){
eslint --no-eslintrc --config .eslintrc "${@-.}" --ext .jsx --ext .js --ext .es6
}
function git_require_clean_work_tree(){
git diff --exit-code
}
function find_changelog_file(){
# find the changelog file
local CHANGELOG=""
if test "$CHANGELOG" = ""; then
CHANGELOG="$(ls | egrep '^(change|history)' -i | head -n1)"
if test "$CHANGELOG" = ""; then
CHANGELOG="CHANGELOG.md";
fi
fi
echo $CHANGELOG
}
function find_last_git_tag(){
node -pe "a=$(npm version); 'v' + a['$(npm show . name)']"
}
# based on https://github.com/tj/git-extras/blob/master/bin/git-changelog
function generate_git_changelog(){
GIT_LOG_OPTS="--no-merges"
local DATE
DATE=$(date +'%Y-%m-%d')
local HEAD='## '
# get the commits between the most recent tag and the second most recent
local lasttag
lasttag=$(find_last_git_tag)
local version
version=$(git describe --tags --abbrev=0 "$lasttag" 2>/dev/null)
local previous_version
previous_version=$(git describe --tags --abbrev=0 "$lasttag^" 2>/dev/null)
# if we don't have a previous version to look at
if test -z "$version"; then
local head="$HEAD$DATE"
local changes
changes=$(git log $GIT_LOG_OPTS --pretty="format:* %s%n" 2>/dev/null)
# the more common case, there's a version to git the changes betwen
else
local head="$HEAD$version | $DATE"
# tail to get remove the first line, which will always just be the version commit
# awk to remove empty lines
local changes
changes=$(tail -n +2 <<< "$(git log $GIT_LOG_OPTS --pretty="format:* %s%n" "$previous_version..$version" 2>/dev/null)" | awk NF)
fi
local CHANGELOG
CHANGELOG=$(find_changelog_file)
echo "Editing $CHANGELOG"
# insert the changes after the header (assumes markdown)
# this shells out to node b/c I couldn't figure out how to do it with awk
local tmp_changelog=/tmp/changelog
node -e "console.log(require('fs').readFileSync(process.argv[1]).toString().replace(/(#.*?\n\n)/, '\$1' + process.argv.slice(2).join('\n') + '\n\n'))" "$CHANGELOG" "$head" "$changes" > $tmp_changelog
# open the changelog in the editor for editing
${EDITOR:-'vi'} $tmp_changelog
mv $tmp_changelog "$CHANGELOG"
}
function git_ammend_tag(){
local changelog_file
local changes
changes=$(git diff --minimal --diff-filter=M --unified=0 --color=never "$changelog_file" | grep '^\+' | egrep -v '^\+\+' | cut -c 2-)
changelog_file="$(find_changelog_file)"
git add "$changelog_file"
git commit --amend --no-edit --no-verify
git tag "$(find_last_git_tag)" -f -a -m "$changes"
}
function npm_release(){
local access="${1-public}"
local version="${2-patch}"
npm version "$version" && generate_git_changelog && git_ammend_tag && npm run gitPush && npm publish --access "$access"
}
|
import java.util.Arrays;
public class maximumAndMinimum {
public static int[] maxMin(int inputArray[], int arrSize)
{
int max, min;
// If array has one element or no elements return it.
if (arrSize == 0) {
max = 0;
min = 0;
} else if (arrSize == 1) {
max = inputArray[0];
min = inputArray[0];
} else {
for (int i = 0; i < arrSize - 1; i++) {
// Compare pairwise the two elements and switch them if
// the former element is greater than the latter.
if (inputArray[i] > inputArray[i + 1]) {
int swap = inputArray[i];
inputArray[i] = inputArray[i + 1];
inputArray[i + 1] = swap;
}
}
// Initialize max and min
max = inputArray[arrSize - 1];
min = inputArray[0];
}
// Store maximum and minimum
// elements in an array and return
// the array.
int outputArray[] = {max, min};
return outputArray;
}
public static void main(String[] args)
{
int inputArray[] = {2, 5, 3, 1, 7};
int arrSize = inputArray.length;
int[] outputArray = maxMin(inputArray, arrSize);
System.out.println("Maximum element: " + outputArray[0]);
System.out.println("Minimum element: " + outputArray[1]);
}
} |
#!/bin/bash
# start ssh
service ssh restart
# Create user account
if [ -n "$USER" ]; then
if [ -z "$USER_HOME" ]; then
export USER_HOME=/home/$USER
fi
if [ -z "$USER_ID" ]; then
export USER_ID=99
fi
if [ -n "$USER_ENCRYPTED_PASSWORD" ]; then
useradd -M -d $USER_HOME -p $USER_ENCRYPTED_PASSWORD -u $USER_ID $USER > /dev/null
else
useradd -M -d $USER_HOME -u $USER_ID $USER > /dev/null
fi
# expects a comma-separated string of the form GROUP1:GROUP1ID,GROUP2,GROUP3:GROUP3ID,...
# (the GROUPID is optional, but needs to be separated from the group name by a ':')
for i in $(echo $USER_GROUPS | sed "s/,/ /g")
do
if [[ $i == *":"* ]]
then
addgroup ${i%:*} # > /dev/null
groupmod -g ${i#*:} ${i%:*} #> /dev/null
adduser $USER ${i%:*} #> /dev/null
else
addgroup $i > /dev/null
adduser $USER $i > /dev/null
fi
done
# set correct primary group
if [ -n "$USER_GROUPS" ]; then
group="$( cut -d ',' -f 1 <<< "$USER_GROUPS" )"
if [[ $group == *":"* ]]
then
usermod -g ${group%:*} $USER &
else
usermod -g $group $USER &
fi
fi
# set shell
if [ -z "$USER_SHELL" ]
then
usermod -s "/bin/bash" $USER
else
usermod -s $USER_SHELL $USER
fi
if [ -n $CWD ]; then cd $CWD; fi
echo "Running as user $USER"
# Add more variables as desired
echo "export CUDNN_VERSION=$CUDNN_VERSION
export NVIDIA_REQUIRE_CUDA=$NVIDIA_REQUIRE_CUDA
export LIBRARY_PATH=$LIBRARY_PATH
export LD_PRELOAD=$LD_PRELOAD
export NVIDIA_VISIBLE_DEVICES=$NVIDIA_VISIBLE_DEVICES
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH
export JUPYTERPASS=$JUPYTERPASS
export NVIDIA_DRIVER_CAPABILITIES=$NVIDIA_DRIVER_CAPABILITIES
export PATH=$PATH
export CUDA_PKG_VERSION=$CUDA_PKG_VERSION
export JUPYTER_CONFIG_DIR=$JUPYTER_CONFIG_DIR" >> /etc/profile
chown -R $USER:$USER /root/conf.json
# Make sure we run the container as a USER account and not Root
# Why gosu? Gosu handles TTY better than default su. We spawn an entire different USER kernel in which we run the CMD ("$@").
# This will ensure we do not natively enter the docker container as root.
exec gosu $USER "$@"
else
if [ -n $CWD ]; then cd $CWD; fi
echo "Running as default container user"
exec "$@"
fi
|
// Define the ExplorerApi trait within the explorer module
pub mod explorer {
pub trait ExplorerApi {
fn explore_directory(&self, directory_path: &str) -> Vec<String>;
}
pub struct Explorer;
impl ExplorerApi for Explorer {
fn explore_directory(&self, directory_path: &str) -> Vec<String> {
// Implementation to explore the directory and return a list of files and subdirectories
// Placeholder implementation for demonstration purposes
vec!["file1.txt".to_string(), "subdir1".to_string(), "file2.txt".to_string()]
}
}
}
// Define the SystemApi trait within the system module
pub mod system {
pub trait SystemApi {
fn get_system_info(&self) -> SystemInfo;
}
pub struct System;
pub struct SystemInfo {
pub os_type: String,
pub architecture: String,
pub available_memory: u64,
}
impl SystemApi for System {
fn get_system_info(&self) -> SystemInfo {
// Implementation to retrieve system information
// Placeholder implementation for demonstration purposes
SystemInfo {
os_type: "Linux".to_string(),
architecture: "x86_64".to_string(),
available_memory: 8192,
}
}
}
} |
#!/bin/bash -l
#SBATCH --time=0-80:00:00 --mem-per-cpu=3000
#SBATCH -o ./logs/job-%a.out
#SBATCH --array=407
module load matlab
workfolder="/scratch/work/pajunel2/"
outputfile="saved_outputs/velocity_full_tdesign_first_order_noscat_full_$SLURM_ARRAY_TASK_ID.mat"
N_diffuse_repetitions=20
setting="tdesign_first_order_velocity_noscat"
rng_seed=$SLURM_ARRAY_TASK_ID
touch $outputfile
matlab -nojvm -r "savePressureFieldsTriton('$workfolder','$outputfile',$N_diffuse_repetitions,'$setting',$rng_seed); exit(0)"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.