text stringlengths 1 1.05M |
|---|
<reponame>AlexJenter/mr-anderson
import nullMatrix from "./null";
import map from "./map";
const identity = (numRows, numCols) =>
map(nullMatrix(numRows, numCols), (_, rowIndex, columnIndex) =>
Number(rowIndex === columnIndex)
);
export default identity;
|
# CLI support for JIRA interaction
#
# See README.md for details
function jira() {
emulate -L zsh
local action jira_url jira_prefix
if [[ -n "$1" ]]; then
action=$1
elif [[ -f .jira-default-action ]]; then
action=$(cat .jira-default-action)
elif [[ -f ~/.jira-default-action ]]; then
action=$(cat ~/.jira-default-action)
elif [[ -n "${JIRA_DEFAULT_ACTION}" ]]; then
action=${JIRA_DEFAULT_ACTION}
else
action="new"
fi
if [[ -f .jira-url ]]; then
jira_url=$(cat .jira-url)
elif [[ -f ~/.jira-url ]]; then
jira_url=$(cat ~/.jira-url)
elif [[ -n "${JIRA_URL}" ]]; then
jira_url=${JIRA_URL}
else
_jira_url_help
return 1
fi
if [[ -f .jira-prefix ]]; then
jira_prefix=$(cat .jira-prefix)
elif [[ -f ~/.jira-prefix ]]; then
jira_prefix=$(cat ~/.jira-prefix)
elif [[ -n "${JIRA_PREFIX}" ]]; then
jira_prefix=${JIRA_PREFIX}
else
jira_prefix=""
fi
if [[ $action == "new" ]]; then
echo "Opening new issue"
open_command "${jira_url}/secure/CreateIssue!default.jspa"
elif [[ "$action" == "assigned" || "$action" == "reported" ]]; then
_jira_query $@
elif [[ "$action" == "dashboard" ]]; then
echo "Opening dashboard"
if [[ "$JIRA_RAPID_BOARD" == "true" ]]; then
open_command "${jira_url}/secure/RapidBoard.jspa"
else
open_command "${jira_url}/secure/Dashboard.jspa"
fi
elif [[ "$action" == "dumpconfig" ]]; then
echo "JIRA_URL=$jira_url"
echo "JIRA_PREFIX=$jira_prefix"
echo "JIRA_NAME=$JIRA_NAME"
echo "JIRA_RAPID_BOARD=$JIRA_RAPID_BOARD"
echo "JIRA_DEFAULT_ACTION=$JIRA_DEFAULT_ACTION"
else
# Anything that doesn't match a special action is considered an issue name
# but `branch` is a special case that will parse the current git branch
if [[ "$action" == "br" ]]; then
local issue_arg=$(git rev-parse --abbrev-ref HEAD)
local issue="${jira_prefix}${issue_arg}"
else
local issue_arg=$action
local issue="${jira_prefix}${issue_arg}"
fi
local url_fragment=''
if [[ "$2" == "m" ]]; then
url_fragment="#add-comment"
echo "Add comment to issue #$issue"
else
echo "Opening issue #$issue"
fi
if [[ "$JIRA_RAPID_BOARD" == "true" ]]; then
open_command "${jira_url}/issues/${issue}${url_fragment}"
else
open_command "${jira_url}/browse/${issue}${url_fragment}"
fi
fi
}
function _jira_url_help() {
cat << EOF
error: JIRA URL is not specified anywhere.
Valid options, in order of precedence:
.jira-url file
\$HOME/.jira-url file
\$JIRA_URL environment variable
EOF
}
function _jira_query() {
emulate -L zsh
local verb="$1"
local jira_name lookup preposition query
if [[ "${verb}" == "reported" ]]; then
lookup=reporter
preposition=by
elif [[ "${verb}" == "assigned" ]]; then
lookup=assignee
preposition=to
else
echo "error: not a valid lookup: $verb" >&2
return 1
fi
jira_name=${2:=$JIRA_NAME}
if [[ -z $jira_name ]]; then
echo "error: JIRA_NAME not specified" >&2
return 1
fi
echo "Browsing issues ${verb} ${preposition} ${jira_name}"
query="${lookup}+%3D+%22${jira_name}%22+AND+resolution+%3D+unresolved+ORDER+BY+priority+DESC%2C+created+ASC"
open_command "${jira_url}/secure/IssueNavigator.jspa?reset=true&jqlQuery=${query}"
}
|
/**
* @file serial_transport_win32.c
* @brief Mercury API - Serial transport over local serial port on Win32
* @author <NAME>
* @date 10/20/2009
*/
/*
* Copyright (c) 2010 ThingMagic, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "tm_config.h"
#include "tmr_status.h"
#if defined(WIN32) || defined(WINCE)
#if !defined(_WINSOCK2API_) && !defined(_WINSOCKAPI_)
#include <winsock2.h>
#endif
#include <stdio.h>
#ifndef __GNUC__
# if defined(WINCE)
# define snprintf _snprintf
# else
# define snprintf sprintf_s
# endif
#endif /* __GNUC__ */
#endif /* WIN32 */
#include "tm_reader.h"
__declspec(dllexport) TMR_Status
s_open(TMR_SR_SerialTransport *this)
{
TMR_SR_SerialPortNativeContext *c;
COMMTIMEOUTS timeOuts;
DCB dcb;
c = this->cookie;
c->handle = CreateFile((TCHAR*)c->devicename,
GENERIC_READ | GENERIC_WRITE,
0, /* not shared */
NULL, /* default security */
OPEN_EXISTING, /* don't create a new file */
0, /* flags and attributes */
NULL); /* template file */
if (INVALID_HANDLE_VALUE == c->handle)
return TMR_ERROR_COMM_ERRNO(GetLastError());
timeOuts.ReadIntervalTimeout = 0xFFFFFFFF;
timeOuts.ReadTotalTimeoutConstant = 0;
timeOuts.ReadTotalTimeoutMultiplier = 0;
timeOuts.WriteTotalTimeoutConstant = 5000;
timeOuts.WriteTotalTimeoutMultiplier = 0;
SetCommTimeouts(c->handle, &timeOuts);
dcb.DCBlength = sizeof(DCB);
GetCommState(c->handle, &dcb);
dcb.fOutxCtsFlow = 0;
dcb.fOutxDsrFlow = 0;
dcb.fDtrControl = DTR_CONTROL_DISABLE;
dcb.fDsrSensitivity = 0;
dcb.fOutX = 0;
dcb.fInX = 0;
dcb.fNull = 0;
dcb.fRtsControl = RTS_CONTROL_DISABLE;
dcb.ByteSize = 8;
dcb.fParity = NOPARITY;
dcb.StopBits = ONESTOPBIT;
if ((0 == SetCommState(c->handle, &dcb)) ||
(0 == SetupComm(c->handle, 10000, 10000)))
{
return TMR_ERROR_COMM_ERRNO(GetLastError());
}
return TMR_SUCCESS;
}
__declspec(dllexport) TMR_Status
s_sendBytes(TMR_SR_SerialTransport *this, uint32_t length,
uint8_t* message, const uint32_t timeoutMs)
{
TMR_SR_SerialPortNativeContext *c;
BOOL writeStatus;
COMMTIMEOUTS timeOuts;
c = this->cookie;
GetCommTimeouts(c->handle, &timeOuts);
timeOuts.WriteTotalTimeoutConstant = timeoutMs;
SetCommTimeouts(c->handle, &timeOuts);
{
long numberOfBytesWritten;
writeStatus = WriteFile(c->handle, message, length, &numberOfBytesWritten, NULL);
}
if (0 == writeStatus)
{
return TMR_ERROR_COMM_ERRNO(GetLastError());
}
return TMR_SUCCESS;
}
__declspec(dllexport) TMR_Status
s_receiveBytes(TMR_SR_SerialTransport *this, uint32_t length,
uint32_t* messageLength, uint8_t* message, const uint32_t
timeoutMs)
{
DWORD readLength;
DWORD errorFlags;
COMSTAT comStat;
TMR_SR_SerialPortNativeContext *c;
BOOL readStatus;
COMMTIMEOUTS timeOuts;
c = this->cookie;
*messageLength=0;
GetCommTimeouts(c->handle, &timeOuts);
timeOuts.ReadTotalTimeoutConstant = timeoutMs;
SetCommTimeouts(c->handle, &timeOuts);
ClearCommError(c->handle, &errorFlags, &comStat);
while (length > 0)
{
readLength=0;
readStatus = ReadFile(c->handle, message, length, &readLength, NULL);
*messageLength += readLength;
if (0 == readStatus)
{
return TMR_ERROR_COMM_ERRNO(GetLastError());
}
if(readLength == 0)
{
return TMR_ERROR_TIMEOUT;
}
length -= readLength;
message += readLength;
}
return TMR_SUCCESS;
}
__declspec(dllexport) TMR_Status
s_setBaudRate(TMR_SR_SerialTransport *this, uint32_t rate)
{
TMR_SR_SerialPortNativeContext *c;
DCB dcb;
c = this->cookie;
dcb.DCBlength = sizeof(DCB);
GetCommState(c->handle, &dcb);
dcb.BaudRate = rate;
if (0 == SetCommState(c->handle, &dcb))
{
return TMR_ERROR_COMM_ERRNO(GetLastError());
}
return TMR_SUCCESS;
}
static TMR_Status
s_shutdown(TMR_SR_SerialTransport *this)
{
TMR_SR_SerialPortNativeContext *c;
c = this->cookie;
CloseHandle(c->handle);
/* What, exactly, would be the point of checking for an error here? */
return TMR_SUCCESS;
}
static TMR_Status
s_flush(TMR_SR_SerialTransport *this)
{
TMR_SR_SerialPortNativeContext *c;
c = this->cookie;
if (PurgeComm(c->handle, PURGE_RXCLEAR) == 0)
{
return TMR_ERROR_COMM_ERRNO(errno);
}
return TMR_SUCCESS;
}
__declspec(dllexport)TMR_Status
TMR_SR_SerialTransportNativeInit(TMR_SR_SerialTransport *transport,
TMR_SR_SerialPortNativeContext *context,
const char *device)
{
// Transform COM port name from "/COMnn" to "\\\\.\\COMnn"
if (strlen(device)-1 + 4 + 1 > TMR_MAX_READER_NAME_LENGTH)
{
return TMR_ERROR_INVALID;
}
snprintf(context->devicename, sizeof(context->devicename),
"\\\\.\\%s", device+1);
transport->cookie = context;
transport->open = s_open;
transport->sendBytes = s_sendBytes;
transport->receiveBytes = s_receiveBytes;
transport->setBaudRate = s_setBaudRate;
transport->shutdown = s_shutdown;
transport->flush = s_flush;
return TMR_SUCCESS;
}
|
<reponame>RolandTaverner/D3dTiles<filename>D3dTiles/src/Region.cpp<gh_stars>1-10
#include "stdafx.h"
#include <boost/geometry/algorithms/within.hpp>
#include <boost/geometry/algorithms/overlaps.hpp>
#include "D3dTiles/Region.h"
#include "D3dTiles/RendererBase.h"
namespace TileEngine {
Region::Region() : Region(WeakPtr(), 0, Position(0, 0), 0, 0) {
}
Region::Region(WeakPtr parent, RegionID id, const Position &position, unsigned width, unsigned height) :
m_parent(parent), m_ID(id), m_position(position), m_width(width), m_height(height) {
}
Region::~Region() {}
Region::RegionID Region::ID() const {
return m_ID;
}
const Position &Region::Pos() const {
return m_position;
}
unsigned Region::Width() const {
return m_width;
}
unsigned Region::Height() const {
return m_height;
}
Rect Region::GetRect() const {
const Position &minPoint = Pos();
Position maxPoint(minPoint);
boost::geometry::add_point(maxPoint, Position(Width(), Height()));
return Rect(minPoint, maxPoint);
}
unsigned Region::GetLevelsCount() const {
unsigned childLevels = 0;
for (auto i : m_children) {
childLevels = std::max<>(childLevels, i.second->GetLevelsCount());
}
unsigned layersLevels = 0;
for (auto layer : m_layers) {
layersLevels += layer.second->GetLevelsCount();
}
return childLevels + 1 + layersLevels;
}
Region::Ptr Region::AddChild(const Position &position, unsigned width, unsigned height) {
const Rect thisRect(Position(0, 0), Position(Width(), Height()));
const Position newMin(position);
Position newMax(position);
boost::geometry::add_point(newMax, Position(width, height));
const Rect newRect(newMin, newMax);
if (!boost::geometry::within(newRect, thisRect)) {
throw std::invalid_argument("New child region outside parent's bounds");
}
for (auto i : m_children) {
const Rect curRect(i.second->GetRect());
if (boost::geometry::overlaps(curRect, newRect)) {
throw std::invalid_argument("New child region overlaps with existing");
}
}
const RegionID newID = m_children.empty() ? 0 : (m_children.rbegin()->first + 1);
Ptr newRegion(std::make_shared<Region>(shared_from_this(), newID, position, width, height));
m_children[newID] = newRegion;
return newRegion;
}
Region::Ptr Region::AddLayer(unsigned level) {
LayersMap::const_iterator i = m_layers.find(level);
if (i != m_layers.end()) {
throw std::invalid_argument("layer already exists");
}
Ptr newLevel(std::make_shared<Region>(shared_from_this(), level, Position(0, 0), Width(), Height()));
m_layers[level] = newLevel;
return newLevel;
}
Region::Ptr Region::AddLayer() {
if (m_layers.empty()) {
return AddLayer(0);
}
const unsigned maxLayer = m_layers.rbegin()->first;
return AddLayer(maxLayer + 1);
}
void Region::Render(unsigned ownLevel, const Position &parentPosition, Region::RendererBasePtr renderer) {
Position absPosition(parentPosition);
boost::geometry::add_point(absPosition, Pos());
RenderSelf(ownLevel, absPosition, renderer);
unsigned level = ownLevel;
for (auto child : m_children) {
child.second->Render(++level, absPosition, renderer);
}
for (auto layer : m_layers) {
layer.second->Render(++level, absPosition, renderer);
}
}
class GraphicElementVisitor {
public:
GraphicElementVisitor(unsigned level, const Position &position, Region::RendererBasePtr renderer) :
m_level(level), m_position(position), m_renderer(renderer){
}
void operator()(Bitmap::Ptr &s) {
Position minPoint(m_position);
Position maxPoint(m_position);
boost::geometry::add_point(maxPoint, Position(s->Width(), s->Height()));
m_renderer->RenderBitmap(m_level, Rect(minPoint, maxPoint), s);
}
void operator()(ColoredRectangle::Ptr &s) {
m_renderer->RenderColoredRectangle(m_level, m_position, s);
}
void operator()(TexturedRectangle::Ptr &s) {
m_renderer->RenderTexturedRectangle(m_level, m_position, s);
}
void operator()(Text::Ptr &t) {
m_renderer->RenderText(m_level, m_position, t);
}
private:
unsigned m_level;
Position m_position;
RendererBase::Ptr m_renderer;
};
void Region::RenderSelf(unsigned level, const Position &position, Region::RendererBasePtr renderer) {
for (auto e : m_graphics) {
Position pos(position);
boost::geometry::add_point(pos, e.position);
GraphicElementVisitor visitor(level, pos, renderer);
std::visit<>(visitor, e.element);
}
}
void Region::DrawPrimitive(const Position &position, ColoredRectangle::Ptr p) {
m_graphics.push_back(GraphicElementPosition{ position, GraphicElement(p) });
}
void Region::DrawPrimitive(const Position &position, TexturedRectangle::Ptr p) {
m_graphics.push_back(GraphicElementPosition{ position, GraphicElement(p) });
}
void Region::DrawImage(const Position &position, Bitmap::Ptr bitmap) {
m_graphics.push_back(GraphicElementPosition{ position, GraphicElement(bitmap) });
}
void Region::DrawPrimitive(const Position &position, Text::Ptr p) {
m_graphics.push_back(GraphicElementPosition{ position, GraphicElement(p) });
}
void Region::Clear(bool children) {
m_graphics.clear();
if (children) {
for (auto child : m_children) {
child.second->Clear(children);
}
}
}
} // namespace TileEngine
|
<filename>test/suites/info.js
const assert = require('assert');
const uuid = require('uuid');
// helpers
const {
startService,
stopService,
inspectPromise,
owner,
modelData,
bindSend,
finishUpload,
processUpload,
initUpload,
updateAccess,
} = require('../helpers/utils');
const route = 'files.info';
const {
STATUS_PENDING,
STATUS_UPLOADED,
STATUS_PROCESSED,
} = require('../../src/constant');
describe('info suite', function suite() {
// setup functions
before('start service', startService);
// sets `this.response` to `files.finish` response
before('init upload', initUpload(modelData));
before('helpers', bindSend(route));
// tear-down
after('stop service', stopService);
it('404 on missing filename/upload-id', function test() {
return this
.send({ filename: uuid.v4(), username: owner })
.reflect()
.then(inspectPromise(false))
.then((err) => {
assert.equal(err.statusCode, 404);
return null;
});
});
it('401 on valid upload id, invalid user', function test() {
return this
.send({ filename: this.response.uploadId, username: '<EMAIL>' })
.reflect()
.then(inspectPromise(false))
.then((err) => {
assert.equal(err.statusCode, 401);
return null;
});
});
it('STATUS_PENDING on valid upload id', function test() {
return this
.send({ filename: this.response.uploadId, username: owner })
.reflect()
.then(inspectPromise())
.then((rsp) => {
assert.equal(rsp.username, owner);
assert.deepEqual(rsp.file, this.response);
assert.equal(rsp.file.embed, undefined);
assert.equal(rsp.file.status, STATUS_PENDING);
return null;
});
});
describe('after upload', function afterUploadSuite() {
before('complete upload', function pretest() {
return finishUpload.call(this, this.response);
});
it('401 on invalid user id', function test() {
return this
.send({ filename: this.response.uploadId, username: '<EMAIL>' })
.reflect()
.then(inspectPromise(false))
.then((err) => {
assert.equal(err.statusCode, 401);
return null;
});
});
it('STATUS_UPLOADED on valid user id', function test() {
return this
.send({ filename: this.response.uploadId, username: owner })
.reflect()
.then(inspectPromise())
.then((rsp) => {
assert.equal(rsp.username, owner);
assert.equal(rsp.file.status, STATUS_UPLOADED);
return null;
});
});
describe('after processed', function afterProcessedSuite() {
before('process file', function pretest() {
return processUpload.call(this, this.response);
});
it('returns 401 on invalid user id', function test() {
return this
.send({ filename: this.response.uploadId, username: '<EMAIL>' })
.reflect()
.then(inspectPromise(false))
.then((err) => {
assert.equal(err.statusCode, 401);
return null;
});
});
it('returns correct STATUS_PROCESSED', function test() {
return this
.send({ filename: this.response.uploadId, username: owner })
.reflect()
.then(inspectPromise())
.then((rsp) => {
assert.equal(rsp.username, owner);
assert.equal(rsp.file.status, STATUS_PROCESSED);
assert.ok(Array.isArray(rsp.file.controlsData));
assert.ok(Array.isArray(rsp.file.tags));
assert.equal(rsp.file.controlsData.length, 29);
assert.deepEqual(rsp.file.tags, ['ok', 'done']);
assert.ifError(rsp.file.public);
assert.ok(rsp.file.files);
rsp.file.files.forEach((file) => {
assert.ok(file.contentLength);
if (file.type === 'c-bin') {
assert.ok(file.decompressedLength);
assert.ok(file.decompressedLength > file.contentLength);
}
});
assert.ok(rsp.file.embed);
assert.ok(rsp.file.embed.code);
assert.equal(typeof rsp.file.embed.code, 'string');
assert.notEqual(rsp.file.embed.code.length, 0);
assert.ok(rsp.file.embed.params);
Object.keys(rsp.file.embed.params).forEach((key) => {
const param = rsp.file.embed.params[key];
assert.ok(param.type);
assert.notStrictEqual(param.default, undefined);
assert.ok(param.description);
});
return null;
});
});
describe('public file', function publicSuite() {
before('make public', function pretest() {
return updateAccess.call(this, this.response.uploadId, owner, true);
});
it('returns info when file is public', function test() {
return this
.send({ filename: this.response.uploadId, username: owner })
.reflect()
.then(inspectPromise())
.then((rsp) => {
assert.equal(rsp.username, owner);
assert.equal(rsp.file.owner, owner);
assert.equal(rsp.file.public, '1');
assert.equal(rsp.file.status, STATUS_PROCESSED);
assert.ok(rsp.file.files);
rsp.file.files.forEach((file) => {
assert.ok(file.contentLength);
if (file.type === 'c-bin') {
assert.ok(file.decompressedLength);
assert.ok(file.decompressedLength > file.contentLength);
}
});
return null;
});
});
});
});
});
});
|
#!/bin/bash
set -e
SCRIPT="$(readlink -f "$0")"
SCRIPT_PATH="$(dirname $SCRIPT)"
pushd $SCRIPT_PATH 1>&2 2>/dev/null || exit 1
#启用自建的一些方便函数
. $SCRIPT_PATH/work/tools/functions.sh
#判断是否有root的权限
if ! HasRootPremission; then
if IsCommandExists sudo; then
sudo bash "$0"
exit $?
else
ray_echo_Red "ERROR: You need to be root to run this script"
fi
fi
#很重要,需要一些环境变量
if ! IsFile ${SCRIPT_PATH}/.env; then
cp ${SCRIPT_PATH}/.env.sample ${SCRIPT_PATH}/.env;
ray_echo_Red "please modify ${SCRIPT_PATH}/.env first!";
exit 1
fi
. ${SCRIPT_PATH}/.env
ray_printStatusOk "导入.env环境变量"
#国内知名的仓库源
mirrors=(
#阿里内网
mirrors.cloud.aliyuncs.com
#阿里公网
mirrors.aliyun.com
)
#因为我司服务器都是在aliyun上,所以优先使用aliyun的内部网络
for mirrors_ in "${mirrors[@]}"; do
if nc -z -w 1 $mirrors_ 80 1>&2 2>/dev/null; then
mirrors_default=$mirrors_
fi
done
ray_echo_Green "探测到仓库源 ${mirrors_default}"
if ! IsCommandExists docker; then
if IsUbuntu; then
apt-get update -y
apt-get remove docker docker-engine docker.io containerd runc -y
apt-get install apt-transport-https ca-certificates curl gnupg-agent \
software-properties-common -y
curl -fsSL https://${mirrors_default}/docker-ce/linux/ubuntu/gpg | apt-key add -
the_ppa="http://${mirrors_default}/docker-ce/linux/ubuntu"
if ! grep -q "^deb .*$the_ppa" /etc/apt/sources.list /etc/apt/sources.list.d/*; then
add-apt-repository \
"deb [arch=amd64 trusted=yes] $the_ppa $(lsb_release -cs) stable"
fi
ray_printStatusOk "docker 仓库已更新"
elif IsRedHat; then
#centos7以上
mv /etc/yum.repos.d/CentOS-Base.repo /etc/yum.repos.d/CentOS-Base.repo.backup
wget -O /etc/yum.repos.d/CentOS-Base.repo http://mirrors.aliyun.com/repo/Centos-7.repo
mv /etc/yum.repos.d/epel.repo /etc/yum.repos.d/epel.repo.backup
wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo
yum makecache -y
yum remove docker docker-client docker-client-latest docker-common \
docker-latest docker-latest-logrotate docker-logrotate docker-engine -y
yum install -y yum-utils device-mapper-persistent-data lvm2
yum-config-manager --add-repo https://${mirrors_default}/docker-ce/linux/centos/docker-ce.repo
ray_printStatusOk "docker 仓库已更新"
fi
InstallApps docker-ce docker-ce-cli containerd.io mysql-client-core-5.7 jq git htop iftop
if IsCommandExists docker; then
ray_printStatusOk "安装docker……"
else
ray_printStatusFailed "安装docker……"
exit 1
fi
fi
ray_echo_Green "检测docker-compose文件"
if ! IsFile /usr/local/bin/docker-compose; then
ray_echo_Green "正在下载docker-compose,很慢的,稍安勿躁……"
curl -fSL https://get.daocloud.io/docker/compose/releases/download/1.25.4/docker-compose-$(uname -s)-$(uname -m) \
-o /usr/local/bin/docker-compose && chmod +x /usr/local/bin/docker-compose
if ! IsSameStr "$(sha256sum /usr/local/bin/docker-compose | awk '{print $1 }')" \
"cfb3439956216b1248308141f7193776fcf4b9c9b49cbbe2fb07885678e2bb8a" ; then
ray_echo_Red "docker-compose 文件sha256不对"
exit 1
fi
if IsCommandExists docker-compose; then
ray_printStatusOk "安装docker-compose"
else
ray_printStatusFailed "安装docker-compose"
exit 1
fi
fi
#注入公用的docker-compose环境变量
. ${SCRIPT_PATH}/.env
if ! IsFileHasStr '#lnmp-tools' $HOME/.bashrc; then
sed -i -e "\$a#lnmp-tools\n. $SCRIPT_PATH/work/tools/functions.sh\n" $HOME/.bashrc
ray_printStatusOk "安装基本快捷bash命令"
fi
#配置logrotate,每天切割日志
if IsDir /etc/logrotate.d; then
cat > /etc/logrotate.d/wwwlogs <<EOF
$SCRIPT_PATH/work/logs/*.log {
su root root
daily
rotate 7
missingok
notifempty
compress
dateext
sharedscripts
postrotate
/usr/bin/env docker exec nginx nginx -s reopen
endscript
}
EOF
chmod 644 /etc/logrotate.d/wwwlogs
ray_printStatusOk "安装日志切割脚本"
fi
#删除可能存在的旧的的容器
docker-compose down --rmi local 2>/dev/null || true
################################生成数据库的密码#################################
if ! IsFile db_root_password.txt; then
DB_ROOT_PASSWD="$(MakePassword)"
DB_DEFAULT_PASSWD="$(MakePassword)"
#这个是数据库root的密码
echo -n "$DB_ROOT_PASSWD" > db_root_password.txt
ray_printStatusOk "生成数据库root密码"
#这个是项目要用的数据库用户密码
echo -n "$DB_DEFAULT_PASSWD" > db_${MYSQL_USER}_password.txt
ray_printStatusOk "生成数据库用户${MYSQL_USER}密码"
chmod 400 db_root_password.txt
chmod 444 db_${MYSQL_USER}_password.txt
fi
###############################恢复默认文件权限##################################
find $SCRIPT_PATH -type f -exec chmod 644 {} \;
find $SCRIPT_PATH -name "*.sh" -exec chmod 755 {} \;
find $SCRIPT_PATH -type d -exec chmod 755 {} \;
chmod 777 work/logs
ray_printStatusOk "恢复默认文件权限"
###########################从本地添加docker-images###############################
#把文件docker-images.tar.gz放在/opt/docker-lnmp目录下
Images=""
if ! IsFile $Images; then
Images="docker-images.tar.gz"
elif ! IsFile $Images; then
Images="$HOME/docker-images.tar.gz"
fi
if ! IsEmpty $Images; then
ray_echo_Green "正在使用本地docker-images"
gzip -d "$Images"
docker load -i "${Images%.tar.gz}"
if ! IsEmpty $DOCKER_LOCAL_SRC; then
for image in "code-get:2.0" "php7.3-fpm" "php7.3-cli" \
"nginx-with-lua:1.16.0" "redis:5.0" "mysql:5.7"; do
docker tag xiaosumay/$image ${DOCKER_LOCAL_SRC}xiaosumay/$image
done
fi
ray_printStatusOk "安装lnmp成功"
fi
################################################################################
ray_echo_Green "正在启动服务"
docker-compose up -d
rm -f "$SCRIPT"
popd 1>&2 2>/dev/null
ray_printStatusOk "docker-lnmp 安装……"
|
package io.renren.modules.sys.controller;
import java.util.Arrays;
import java.util.Map;
import io.renren.common.validator.ValidatorUtils;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import io.renren.modules.sys.entity.TbuserEntity;
import io.renren.modules.sys.service.TbuserService;
import io.renren.common.utils.PageUtils;
import io.renren.common.utils.R;
/**
* 用户
*
* @author Mark
* @email <EMAIL>
* @date 2019-06-17 10:51:15
*/
@RestController
@RequestMapping("sys/tbuser")
public class TbuserController {
@Autowired
private TbuserService tbuserService;
/**
* 列表
*/
@RequestMapping("/list")
@RequiresPermissions("sys:tbuser:list")
public R list(@RequestParam Map<String, Object> params){
PageUtils page = tbuserService.queryPage(params);
return R.ok().put("page", page);
}
/**
* 信息
*/
@RequestMapping("/info/{userId}")
@RequiresPermissions("sys:tbuser:info")
public R info(@PathVariable("userId") Long userId){
TbuserEntity tbuser = tbuserService.getById(userId);
return R.ok().put("tbuser", tbuser);
}
/**
* 保存
*/
@RequestMapping("/save")
@RequiresPermissions("sys:tbuser:save")
public R save(@RequestBody TbuserEntity tbuser){
tbuserService.save(tbuser);
return R.ok();
}
/**
* 修改
*/
@RequestMapping("/update")
@RequiresPermissions("sys:tbuser:update")
public R update(@RequestBody TbuserEntity tbuser){
ValidatorUtils.validateEntity(tbuser);
tbuserService.updateById(tbuser);
return R.ok();
}
/**
* 删除
*/
@RequestMapping("/delete")
@RequiresPermissions("sys:tbuser:delete")
public R delete(@RequestBody Long[] userIds){
tbuserService.removeByIds(Arrays.asList(userIds));
return R.ok();
}
}
|
package krakend
import (
"context"
"fmt"
amqp "github.com/devopsfaith/krakend-amqp/v2"
cel "github.com/devopsfaith/krakend-cel/v2"
cb "github.com/devopsfaith/krakend-circuitbreaker/v2/gobreaker/proxy"
lambda "github.com/devopsfaith/krakend-lambda/v2"
lua "github.com/devopsfaith/krakend-lua/v2/proxy"
martian "github.com/devopsfaith/krakend-martian/v2"
metrics "github.com/devopsfaith/krakend-metrics/v2/gin"
oauth2client "github.com/devopsfaith/krakend-oauth2-clientcredentials/v2"
opencensus "github.com/devopsfaith/krakend-opencensus/v2"
pubsub "github.com/devopsfaith/krakend-pubsub/v2"
juju "github.com/devopsfaith/krakend-ratelimit/v2/juju/proxy"
"github.com/luraproject/lura/v2/config"
"github.com/luraproject/lura/v2/logging"
"github.com/luraproject/lura/v2/proxy"
"github.com/luraproject/lura/v2/transport/http/client"
httprequestexecutor "github.com/luraproject/lura/v2/transport/http/client/plugin"
httpcache "github.com/NichoFerdians/krakend-httpcache/v2"
)
// NewBackendFactory creates a BackendFactory by stacking all the available middlewares:
// - oauth2 client credentials
// - http cache
// - martian
// - pubsub
// - amqp
// - cel
// - lua
// - rate-limit
// - circuit breaker
// - metrics collector
// - opencensus collector
func NewBackendFactory(logger logging.Logger, metricCollector *metrics.Metrics) proxy.BackendFactory {
return NewBackendFactoryWithContext(context.Background(), logger, metricCollector)
}
// NewBackendFactory creates a BackendFactory by stacking all the available middlewares and injecting the received context
func NewBackendFactoryWithContext(ctx context.Context, logger logging.Logger, metricCollector *metrics.Metrics) proxy.BackendFactory {
requestExecutorFactory := func(cfg *config.Backend) client.HTTPRequestExecutor {
clientFactory := client.NewHTTPClient
if _, ok := cfg.ExtraConfig[oauth2client.Namespace]; ok {
clientFactory = oauth2client.NewHTTPClient(cfg)
} else {
clientFactory = httpcache.NewHTTPClient(cfg, clientFactory)
}
return opencensus.HTTPRequestExecutorFromConfig(clientFactory, cfg)
}
requestExecutorFactory = httprequestexecutor.HTTPRequestExecutor(logger, requestExecutorFactory)
backendFactory := martian.NewConfiguredBackendFactory(logger, requestExecutorFactory)
bf := pubsub.NewBackendFactory(ctx, logger, backendFactory)
backendFactory = bf.New
backendFactory = amqp.NewBackendFactory(ctx, logger, backendFactory)
backendFactory = lambda.BackendFactory(logger, backendFactory)
backendFactory = cel.BackendFactory(logger, backendFactory)
backendFactory = lua.BackendFactory(logger, backendFactory)
backendFactory = juju.BackendFactory(logger, backendFactory)
backendFactory = cb.BackendFactory(backendFactory, logger)
backendFactory = metricCollector.BackendFactory("backend", backendFactory)
backendFactory = opencensus.BackendFactory(backendFactory)
return func(remote *config.Backend) proxy.Proxy {
logger.Debug(fmt.Sprintf("[BACKEND: %s] Building the backend pipe", remote.URLPattern))
return backendFactory(remote)
}
}
type backendFactory struct{}
func (b backendFactory) NewBackendFactory(ctx context.Context, l logging.Logger, m *metrics.Metrics) proxy.BackendFactory {
return NewBackendFactoryWithContext(ctx, l, m)
}
|
<filename>src/main/java/gex/newsml/NewsMLException.java
package gex.newsml;
public class NewsMLException extends Exception {
private static final long serialVersionUID = 1022133291415943221L;
public NewsMLException(Throwable cause) {
super(cause);
}
public NewsMLException(String message, Throwable cause) {
super(message, cause);
}
}
|
<reponame>danwatt/chain-pattern<filename>src/main/java/com/googlecode/chainpattern/impl/ChainBase.java
/*
* Copyright 2001-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.chainpattern.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import com.googlecode.chainpattern.Chain;
import com.googlecode.chainpattern.Command;
import com.googlecode.chainpattern.Filter;
/**
* <p>
* Convenience base class for {@link Chain} implementations.
* </p>
*/
public class ChainBase<C> implements Chain<C> {
/**
* <p>
* Flag indicating whether the configuration of our commands list has been
* frozen by a call to the <code>execute()</code> method.
* </p>
*/
private boolean frozen = false;
/**
* <p>
* The list of {@link Command}s configured for this {@link Chain}, in the
* order in which they may delegate processing to the remainder of the
* {@link Chain}.
* </p>
*/
private final List<Command<C>> commands = new ArrayList<Command<C>>();
/**
* <p>
* Construct a {@link Chain} with no configured {@link Command}s.
* </p>
*/
public ChainBase() {
}
/**
* See the {@link Chain} JavaDoc.
*
* @param command
* The {@link Command} to be added
*
* @exception IllegalArgumentException
* if <code>command</code> is <code>null</code>
* @exception IllegalStateException
* if no further configuration is allowed
*/
public Chain<C> addCommand(Command<C> command) {
if (null == command) {
throw new IllegalArgumentException("Commands may not be null");
}
if (frozen) {
throw new IllegalStateException("A command may not be added to a chain once the chain has been executed");
}
commands.add(command);
return this;
}
public List<Command<C>> getCopyOfCommands() {
return Collections.unmodifiableList(commands);
}
/**
* See the {@link Chain} JavaDoc.
*
* @param context
* The {@link Context} to be processed by this {@link Chain}
*
* @throws Exception
* if thrown by one of the {@link Command}s in this
* {@link Chain} but not handled by a <code>postprocess()</code>
* method of a {@link Filter}
* @throws IllegalArgumentException
* if <code>context</code> is <code>null</code>
*
* @return <code>true</code> if the processing of this {@link Context} has
* been completed, or <code>false</code> if the processing of this
* {@link Context} should be delegated to a subsequent
* {@link Command} in an enclosing {@link Chain}
*/
public boolean execute(C context) throws Exception {
this.frozen = true;
boolean savedResult = CONTINUE_PROCESSING;
ListIterator<Command<C>> li = this.commands.listIterator();
Exception savedException = null;
while (li.hasNext()) {
Command<C> command = li.next();
try {
savedResult = command.execute(context);
} catch (Exception toSave) {
savedException = toSave;
break;
}
if (Chain.PROCESSING_COMPLETE == savedResult) {
break;
}
}
boolean handled = false;
while (li.hasPrevious()) {
Command<C> command = li.previous();
if (command instanceof Filter<?>) {
Filter<C> f = (Filter<C>) command;
try {
handled |= f.postprocess(context, savedException);
} catch (Exception ignoreMe) {
}
}
}
if (null != savedException && !handled) {
throw savedException;
} else {
return savedResult;
}
}
}
|
def delete_divisible_by_5(my_list):
new_list = []
for i in my_list:
if i % 5 != 0:
new_list.append(i)
return new_list
# Test
my_list = [1, 10, 15, 20, 25]
new_list = delete_divisible_by_5(my_list)
print("List after deletion: " + str(new_list)) |
<reponame>chirag-singhal/-Data-Structures-and-Algorithms<filename>Miscellaneous/InterviewBit/Maths/ExcelColumnNumber.cpp
#include <bits/stdc++.h>
int titleToNumber(std::string A) {
int ans = 0;
for(int i = 0; i < A.length(); i++) {
ans = ans * 26 + (A[i] - 'A' + 1);
}
return ans;
}
|
if [ ! -f "./package.json" ]; then echo "you should run this from project root"; exit 1; fi
docker run -it -v `pwd`:/var/local libarchive-llvm |
import tensorflow as tf
# create the model
model = tf.keras.Sequential([
tf.keras.layers.Dense(32, activation='sigmoid', input_shape=(2,)),
tf.keras.layers.Dense(32, activation='sigmoid'),
tf.keras.layers.Dense(1)
])
# compile the model
model.compile(optimizer='adam', loss='mse', metrics=['accuracy'])
# train the model
model.fit((x_train, y_train), ({target}, epochs=10, batch_size=32)
# predict the values of x, y coordinates
predictions = model.predict((x_test, y_test)) |
/* jshint strict:false, globalstrict:false */
/* global describe, it, beforeEach, inject, module */
describe('BoardCtrl', function () {
var boardCtrl,
scope;
beforeEach(module('memory'));
beforeEach(inject(function ($injector) {
scope = $injector.get('$rootScope');
boardCtrl = function () {
return $injector.get('$controller');
};
}));
it('should add new boards on add()', function () {
});
});
|
#!/bin/bash
set -ev
#-------------------------------------------------------------------------------
# Force no insight
#-------------------------------------------------------------------------------
mkdir -p "$HOME"/.config/configstore/
mv "$JHIPSTER_TRAVIS"/configstore/*.json "$HOME"/.config/configstore/
#-------------------------------------------------------------------------------
# Generate the project with yo jhipster
#-------------------------------------------------------------------------------
mv -f "$JHIPSTER_SAMPLES"/"$JHIPSTER" "$HOME"/
cd "$HOME"/"$JHIPSTER"
rm -Rf "$HOME"/"$JHIPSTER"/node_modules/.bin/*grunt*
rm -Rf "$HOME"/"$JHIPSTER"/node_modules/*grunt*
npm link generator-jhipster
yo jhipster --force --no-insight
ls -al "$HOME"/"$JHIPSTER"
ls -al "$HOME"/"$JHIPSTER"/node_modules/
ls -al "$HOME"/"$JHIPSTER"/node_modules/generator-jhipster/
ls -al "$HOME"/"$JHIPSTER"/node_modules/generator-jhipster/generators/
ls -al "$HOME"/"$JHIPSTER"/node_modules/generator-jhipster/generators/entity/
|
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::Metrics::CyclomaticComplexity, :config do
context 'when Max is 1' do
let(:cop_config) { { 'Max' => 1 } }
it 'accepts a method with no decision points' do
expect_no_offenses(<<~RUBY)
def method_name
call_foo
end
RUBY
end
it 'accepts an empty method' do
expect_no_offenses(<<~RUBY)
def method_name
end
RUBY
end
it 'accepts an empty `define_method`' do
expect_no_offenses(<<~RUBY)
define_method :method_name do
end
RUBY
end
it 'accepts complex code outside of methods' do
expect_no_offenses(<<~RUBY)
def method_name
call_foo
end
if first_condition then
call_foo if second_condition && third_condition
call_bar if fourth_condition || fifth_condition
end
RUBY
end
it 'registers an offense for an if modifier' do
expect_offense(<<~RUBY)
def self.method_name
^^^^^^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo if some_condition
end
RUBY
end
it 'registers an offense for an unless modifier' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo unless some_condition
end
RUBY
end
it 'registers an offense for an elsif block' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [3/1]
if first_condition then
call_foo
elsif second_condition then
call_bar
else
call_bam
end
end
RUBY
end
it 'registers an offense for a ternary operator' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
value = some_condition ? 1 : 2
end
RUBY
end
it 'registers an offense for a while block' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
while some_condition do
call_foo
end
end
RUBY
end
it 'registers an offense for an until block' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
until some_condition do
call_foo
end
end
RUBY
end
it 'registers an offense for a for block' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
for i in 1..2 do
call_method
end
end
RUBY
end
it 'registers an offense for a rescue block' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
begin
call_foo
rescue Exception
call_bar
end
end
RUBY
end
it 'registers an offense for a case/when block' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [3/1]
case value
when 1
call_foo
when 2
call_bar
end
end
RUBY
end
it 'registers an offense for &&' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo && call_bar
end
RUBY
end
it 'registers an offense for &&=' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
foo = nil
foo &&= 42
end
RUBY
end
it 'registers an offense for and' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo and call_bar
end
RUBY
end
it 'registers an offense for ||' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo || call_bar
end
RUBY
end
it 'registers an offense for ||=' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
foo = nil
foo ||= 42
end
RUBY
end
it 'registers an offense for or' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo or call_bar
end
RUBY
end
it 'deals with nested if blocks containing && and ||' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [6/1]
if first_condition then
call_foo if second_condition && third_condition
call_bar if fourth_condition || fifth_condition
end
end
RUBY
end
it 'registers an offense for &.' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [3/1]
foo&.bar
foo&.bar
end
RUBY
end
it 'counts repeated &. on same untouched local variable as 1' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [3/1]
var = 1
var&.foo
var&.dont_count_me
var = 2
var&.bar
var&.dont_count_me_eother
end
RUBY
end
it 'counts only a single method' do
expect_offense(<<~RUBY)
def method_name_1
^^^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name_1 is too high. [2/1]
call_foo if some_condition
end
def method_name_2
^^^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name_2 is too high. [2/1]
call_foo if some_condition
end
RUBY
end
it 'registers an offense for a `define_method`' do
expect_offense(<<~RUBY)
define_method :method_name do
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
call_foo if some_condition
end
RUBY
end
it 'counts enumerating methods with blocks as +1' do
expect_offense(<<~RUBY)
define_method :method_name do
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [3/1]
(1..4).map do |i| # map: +1
i * 2
end.each.with_index { |val, i| puts val, i } # each: +0, with_index: +1
return treasure.map
end
RUBY
end
it 'counts enumerating methods with block-pass as +1' do
expect_offense(<<~RUBY)
define_method :method_name do
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [2/1]
[].map(&:to_s)
end
RUBY
end
it 'does not count blocks in general' do
expect_no_offenses(<<~RUBY)
define_method :method_name do
Struct.new(:foo, :bar) do
String.class_eval do
[42].tap do |answer|
foo { bar }
end
end
end
end
RUBY
end
end
context 'when IgnoredMethods is set' do
context 'with a string' do
let(:cop_config) { { 'Max' => 0, 'IgnoredMethods' => ['foo'] } }
it 'does not register an offense when defining an instance method' do
expect_no_offenses(<<~RUBY)
def foo
bar.baz(:qux)
end
RUBY
end
it 'does not register an offense when defining a class method' do
expect_no_offenses(<<~RUBY)
def self.foo
bar.baz(:qux)
end
RUBY
end
it 'does not register an offense when using `define_method`' do
expect_no_offenses(<<~RUBY)
define_method :foo do
bar.baz(:qux)
end
RUBY
end
end
context 'with a regex' do
let(:cop_config) { { 'Max' => 0, 'IgnoredMethods' => [/foo/] } }
it 'does not register an offense when defining an instance method' do
expect_no_offenses(<<~RUBY)
def foo
bar.baz(:qux)
end
RUBY
end
it 'does not register an offense when defining a class method' do
expect_no_offenses(<<~RUBY)
def self.foo
bar.baz(:qux)
end
RUBY
end
it 'does not register an offense when using `define_method`' do
expect_no_offenses(<<~RUBY)
define_method :foo do
bar.baz(:qux)
end
RUBY
end
end
end
context 'when Max is 2' do
let(:cop_config) { { 'Max' => 2 } }
it 'counts stupid nested if and else blocks' do
expect_offense(<<~RUBY)
def method_name
^^^^^^^^^^^^^^^ Cyclomatic complexity for method_name is too high. [5/2]
if first_condition then
call_foo
else
if second_condition then
call_bar
else
call_bam if third_condition
end
call_baz if fourth_condition
end
end
RUBY
end
end
end
|
#!/bin/bash
$DR_ENGINE $DR_HOME/dist/directory-repository.py --list $1;
|
# TODO: Make this compatible with rvm.
# Run sudo gem on the system ruby, not the active ruby.
alias sgem='sudo gem'
# Find ruby file
alias rfind='find . -name "*.rb" | xargs grep -n'
|
<reponame>lgarciaaco/cos-fleetshard
package org.bf2.cos.fleetshard.operator.debezium;
import io.quarkus.runtime.Quarkus;
import io.quarkus.runtime.QuarkusApplication;
import io.quarkus.runtime.annotations.QuarkusMain;
@QuarkusMain
public class Main implements QuarkusApplication {
@Override
public int run(String... args) throws Exception {
Quarkus.waitForExit();
return 0;
}
public static void main(String... args) {
Quarkus.run(Main.class, args);
}
} |
<filename>app/src/main/java/com/acmvit/acm_app/service/NetworkChangeReceiver.java
package com.acmvit.acm_app.service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.util.Log;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import com.acmvit.acm_app.AcmApp;
public class NetworkChangeReceiver extends BroadcastReceiver {
@Override
public void onReceive(final Context context, final Intent intent) {
final ConnectivityManager connMgr = (ConnectivityManager) context.getSystemService(
Context.CONNECTIVITY_SERVICE
);
final android.net.NetworkInfo netWorkInfo = connMgr.getActiveNetworkInfo();
AcmApp.setIsConnected(
(netWorkInfo != null && netWorkInfo.isConnected())
);
}
}
|
def isPalindrome(s: str) -> bool:
# Remove spaces and punctuation and convert to lowercase
s = ''.join(e for e in s if e.isalnum()).lower()
# Check if the string is equal to its reverse
return s == s[::-1] |
#!/bin/bash
FILE=$1
if [[ -z $FILE ]]; then
echo "usage style_check.sh <file name>"
exit 1
fi
filters=-readability/streams,-build/include_what_you_use,-whitespace/comments,-runtime/references,-runtime/rtti
$(dirname $0)/cpplint.py --filter=${filters} "$@"
|
def kml2latlon(ifile):
"""Read lon lat from kml file with single path"""
from fastkml import kml, geometry
with open(ifile, 'rt') as myfile:
doc = myfile.read()
k = kml.KML()
k.from_string(doc)
features = list(k.features())
placemarks = list(features[0].features())
coordinates = list(placemarks[0].geometry.coords)
latlon_list = [(lon, lat) for lon, lat, _ in coordinates]
return latlon_list |
docker push mohitsethi/simplecv:latest
|
<reponame>AlbertoCortes13/node
const log = require('../__utils__/logger');
const seqModels = require('../__utils__/sequelizeConf');
// TODO: Identify on how to destructure this object properly
// eslint-disable-next-line prefer-destructuring
const userModel = seqModels.user;
seqModels.sequelize.authenticate().then(() => {
log.info('Connection has been established successfully.');
})
.catch((err) => {
log.error('Unable to connect to the database:', err);
});
module.exports = {
getAllUsers: async () => {
log.info('Getting all users');
try {
let res = await userModel.findAll();
// This map changes from sqlize obj to json object
res = res.map(r => (r.toJSON()));
return res;
} catch (error) {
log.error(`Can't get all users from Postgres: \n${error.message}`);
return error;
}
},
getUserFromDbById: async (id) => {
try {
const sqlzUser = await userModel.findOne({
where: {
fbId: id,
},
});
return sqlzUser.get({
plain: true,
});
} catch (error) {
log.error(`Couldn't get user by id: ${id}\n${error.message}`);
return error;
}
},
getUserFromDbByEmail: async (email) => {
try {
const sqlzUser = await userModel.findOne({
where: {
email,
},
});
return sqlzUser.get({
plain: true,
});
} catch (error) {
log.error(`Couldn't get user by email: ${email}\n${error.message}`);
return error;
}
},
addUserToDb: async (user) => {
try {
const createdUser = userModel.create(user);
return createdUser;
} catch (error) {
log.error(`Couldn't create user: ${user}`);
return error;
}
},
updateUserInDb: async (fbId, user) => {
try {
const updatedUser = userModel.update(user, {
where: {
fbId,
},
});
return updatedUser.get({
plain: true,
});
} catch (error) {
log.error(`Couldn't update user: ${user}`);
return error;
}
},
removeUserFromDbById: async fbId => userModel.destroy({
where: {
fbId,
},
}),
removeUserFromDbByEmail: async email => userModel.destroy({
where: {
email,
},
}),
};
|
#!/bin/sh
docker-compose exec -T webpage python3 manage.py updateKeycloak |
<reponame>lxlx704034204/zheng-master-diy0
package com.zheng.common.util;
import com.alibaba.druid.pool.DruidDataSource;
import sun.misc.BASE64Decoder;
import sun.misc.BASE64Encoder;
import javax.crypto.*;
import javax.crypto.spec.SecretKeySpec;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.sql.*;
/**
* AES加解密工具类
* Created by shuzheng on 2017/2/5.
*/
public class AESUtil {
private static final String encodeRules = "zheng";//这个不能改!
/**
* 加密
* 1.构造密钥生成器
* 2.根据ecnodeRules规则初始化密钥生成器
* 3.产生密钥
* 4.创建和初始化密码器
* 5.内容加密
* 6.返回字符串
*/
public static String AESEncode(String content) {
try {
//1.构造密钥生成器,指定为AES算法,不区分大小写
KeyGenerator keygen = KeyGenerator.getInstance("AES");
//2.根据ecnodeRules规则初始化密钥生成器
//生成一个128位的随机源,根据传入的字节数组
SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
random.setSeed(encodeRules.getBytes());
keygen.init(128, random);
//3.产生原始对称密钥
SecretKey original_key = keygen.generateKey();
//4.获得原始对称密钥的字节数组
byte[] raw = original_key.getEncoded();
//5.根据字节数组生成AES密钥
SecretKey key = new SecretKeySpec(raw, "AES");
//6.根据指定算法AES自成密码器
Cipher cipher = Cipher.getInstance("AES");
//7.初始化密码器,第一个参数为加密(Encrypt_mode)或者解密解密(Decrypt_mode)操作,第二个参数为使用的KEY
cipher.init(Cipher.ENCRYPT_MODE, key);
//8.获取加密内容的字节数组(这里要设置为utf-8)不然内容中如果有中文和英文混合中文就会解密为乱码
byte[] byte_encode = content.getBytes("utf-8");
//9.根据密码器的初始化方式--加密:将数据加密
byte[] byte_AES = cipher.doFinal(byte_encode);
//10.将加密后的数据转换为字符串
//这里用Base64Encoder中会找不到包
//解决办法:
//在项目的Build path中先移除JRE System Library,再添加库JRE System Library,重新编译后就一切正常了。
String AES_encode = new String(new BASE64Encoder().encode(byte_AES));
//11.将字符串返回
return AES_encode;
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (NoSuchPaddingException e) {
e.printStackTrace();
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
//如果有错就返加nulll
return null;
}
/**
* 解密
* 解密过程:
* 1.同加密1-4步
* 2.将加密后的字符串反纺成byte[]数组
* 3.将加密内容解密
*/
public static String AESDecode(String content) {
try {
//1.构造密钥生成器,指定为AES算法,不区分大小写
KeyGenerator keygen = KeyGenerator.getInstance("AES");
//2.根据ecnodeRules规则初始化密钥生成器
//生成一个128位的随机源,根据传入的字节数组
SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
random.setSeed(encodeRules.getBytes());
keygen.init(128, random);
//3.产生原始对称密钥
SecretKey original_key = keygen.generateKey();
//4.获得原始对称密钥的字节数组
byte[] raw = original_key.getEncoded();
//5.根据字节数组生成AES密钥
SecretKey key = new SecretKeySpec(raw, "AES");
//6.根据指定算法AES自成密码器
Cipher cipher = Cipher.getInstance("AES");
//7.初始化密码器,第一个参数为加密(Encrypt_mode)或者解密(Decrypt_mode)操作,第二个参数为使用的KEY
cipher.init(Cipher.DECRYPT_MODE, key);
//8.将加密并编码后的内容解码成字节数组
byte[] byte_content = new BASE64Decoder().decodeBuffer(content);
/*
* 解密
*/
byte[] byte_decode = cipher.doFinal(byte_content);
String AES_decode = new String(byte_decode, "utf-8");
return AES_decode;
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (NoSuchPaddingException e) {
e.printStackTrace();
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
throw new RuntimeException("兄弟,配置文件中的密码需要使用AES加密,请使用com.zheng.common.util.AESUtil工具类修改这些值!");
//e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
}
//如果有错就返加nulll
return null;
}
public static void main(String[] args) {
// String[] keys = {
// "", "123456"
// };
// System.out.println("key | AESEncode | AESDecode");
// for (String key : keys) {
// System.out.print(key + " | ");
// String encryptString = AESEncode(key);
// System.out.print(encryptString + " | ");
// String decryptString = AESDecode(encryptString);
// System.out.println(decryptString);
// }
System.out.print("-test-21-:"+ MybatisGeneratorUtil.class.getResource("/").getPath() ); //
// testConnectDB();
// 注意:"zheng"加密后为"xUHISEO23dP+VSxTjekCyQ=="
// "liangxin"加密后为"iIZhAeuyc/+QfYppQBG0Ww=="
// "mysql"加密后为"p98wSLPTg1FFEMrGwtBVOA=="
// String mysqlPassWord = "<PASSWORD>";
// String encode = AESEncode(mysqlPassWord);
// String dncode = AESDecode(encode);
// System.out.print("-test-对个人数据库密码进行加密-:"+ encode +"\n"); //"p98wSLPTg1FFEMrGwtBVOA=="
// System.out.print("-test-对加密后的mysql密码 解密-:"+ dncode); //"mysql"
}
public static void testConnectDB(){
System.out.println( "-test-0-: ");
Connection cc=null;
PreparedStatement statement =null;
ResultSet rs =null;
try{
cc= getConnection();
// cc= getConnection2();
if(!cc.isClosed()) System.out.println( "-test-1-: Succeeded connecting to the Database!");
statement = cc.prepareStatement("select * from upms_user where user_id=?");
statement.setString(1, "1");//"select * from users where name=?"
rs = statement.executeQuery();
while(rs.next()) {
System.out.println("-test-2-'upms_user'表id=1的name为: "+ rs.getString("username")+"");
}
}catch(SQLException e){
System.out.println("-test-3-: "+ e.toString());
}finally {
System.out.println("-test-4-: ");
try{
if(rs!=null) rs.close();
if(statement!=null) statement.close();
if(cc!=null) cc.close();
}catch(SQLException e){
e.printStackTrace();
}
}
}
public static Connection getConnection(){
try{
Class.forName("com.mysql.jdbc.Driver");
Connection conn= DriverManager.getConnection(
"jdbc:mysql://dbserver:3306/zheng",
"root",
"mysql");//获取连接对象
return conn;
}catch(ClassNotFoundException e){
e.printStackTrace();
return null;
}catch(SQLException e){
e.printStackTrace();
return null;
}
}
public static Connection getConnection2(){
DruidDataSource dataSource = new DruidDataSource();
dataSource.setDriverClassName("com.mysql.jdbc.Driver");
dataSource.setUsername("root");
dataSource.setPassword("<PASSWORD>");
dataSource.setUrl("jdbc:mysql://dbserver:3306/zheng");//127.0.0.1
dataSource.setInitialSize(1); dataSource.setMinIdle(1);
dataSource.setMaxActive(20); // 启用监控统计功能 dataSource.setFilters("stat");// for mysql dataSource.setPoolPreparedStatements(false);
try{
// Class.forName(driver);
// Connection conn= DriverManager.getConnection(url,name,pwd);//获取连接对象
Connection conn = dataSource.getConnection();
return conn;
}catch(SQLException e){
e.printStackTrace();
return null;
}
}
}
|
<reponame>part-blockchain/chainsqld
//------------------------------------------------------------------------------
/*
This file is part of rippled: https://github.com/ripple/rippled
Copyright (c) 2016 Ripple Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include <ripple/app/misc/ValidatorList.h>
#include <ripple/app/misc/ValidatorSite.h>
#include <ripple/basics/Slice.h>
#include <ripple/json/json_reader.h>
#include <ripple/protocol/JsonFields.h>
#include <beast/core/detail/base64.hpp>
#include <boost/regex.hpp>
namespace ripple {
// default site query frequency - 5 minutes
//auto constexpr DEFAULT_REFRESH_INTERVAL = std::chrono::minutes{5};
ValidatorSite::ValidatorSite (
ManifestCache& validatorManifests,
boost::asio::io_service& ios,
ValidatorList& validators,
beast::Journal j)
: ConfigSite(ios, validatorManifests, j)
, validators_ (validators)
{
}
ValidatorSite::~ValidatorSite()
{
}
Json::Value
ValidatorSite::getJson() const
{
using namespace std::chrono;
using Int = Json::Value::Int;
Json::Value jrr(Json::objectValue);
Json::Value& jSites = (jrr[jss::validator_sites] = Json::arrayValue);
{
std::lock_guard<std::mutex> lock{sites_mutex_};
for (Site const& site : sites_)
{
Json::Value& v = jSites.append(Json::objectValue);
v[jss::uri] = site.uri;
if (site.lastRefreshStatus)
{
v[jss::last_refresh_time] =
to_string(site.lastRefreshStatus->refreshed);
v[jss::last_refresh_status] =
to_string(site.lastRefreshStatus->disposition);
}
v[jss::refresh_interval_min] =
static_cast<Int>(site.refreshInterval.count());
}
}
return jrr;
}
ripple::ListDisposition ValidatorSite::applyList(std::string const& manifest, std::string const& blob, std::string const& signature, std::uint32_t version)
{
return validators_.applyList(manifest, blob, signature, version);
}
} // ripple
|
Ext.onReady(function () {
//计薪表头
//WPQtyHeader_Model
var isAdminRoot = WpConfig.UserDefault[GlobalVar.NowUserId].root == '管理员';
var EditingWQ = null;
var CalInscrease = true;
var TableType = '计件分成';
var OnFormInit = function () {
EditingWQ = null;
EditingWQ = Ext.create('WPQtyHeader_Model', {
wq_id: -1,
size_id: -1,
edit_ut: WpConfig.UserDefault[GlobalVar.NowUserId].edit_ut || 1, //1.对, 2.个
cal_inscrease: CalInscrease,
n_man: GlobalVar.NowUserId
});
WQForm.getForm().reset();
SetJXDDMinMax();
WQForm.setLoading(true);
fnCommonCreateLastNo('SY', WQForm.getComponent('jx_no'), function () {
Ext.Function.defer(function () {
WQForm.setLoading(false);
}, 800);
});
WQGrid.store.removeAll();
ListenFormJX_DDChange();
OnSetReadOnlyOnUpdateing(false);
}
var FormUpdateRecord = function () {
EditingWQ.set('jx_dd', WQForm.getComponent('jx_dd').getValue());
EditingWQ.set('jx_no', WQForm.getComponent('jx_no').getValue());
EditingWQ.set('wp_dep_no', WQForm.getComponent('wp_dep_no').getValue());
EditingWQ.set('user_dep_no', WQForm.getComponent('user_dep_no').getValue());
EditingWQ.set('provider', WQForm.getComponent('provider').getValue());
}
///普通员工,不能改冰封, 结账的单据
// 管理员, 不能改结账的单据,但可以改冰封期内的
var IsLocking = function (p_TableDD) {
if (EditingWQ.get('wq_id') < 0) {
return false;
}
var TableDate = new Date(Ext.Date.format(p_TableDD || EditingWQ.get('n_dd'), 'Y/m/d'));
var freezeDate = Ext.Date.add(TableDate, Ext.Date.DAY, (WpConfig.freezeDay - 1));
var TodayDate = GlobalVar.ServerDate;
// console.log([Ext.Date.format(TodayDate, 'Y/m/d'), Ext.Date.format(freezeDate, 'Y/m/d'), Ext.Date.format(GlobalVar.freeze_date, 'Y/m/d')]);
// alert(Ext.Date.format(TableDate, 'Y/m/d'));
// alert(Ext.Date.format(GlobalVar.freeze_date, 'Y/m/d'));
//有没有超出结账日期
if (TableDate <= GlobalVar.freeze_date) {
return '结账';
}
if (TodayDate > freezeDate) {
//管理员有权改冰封期内的
if (isAdminRoot == true) {
return false;
}
else {
return '冰封';
}
}
return false;
}
var OnFormDelete = function () {
var checkResult = IsLocking();
if (checkResult != false) {
if (checkResult == '冰封')
alert('单据已被冰封!, 想删除请与管理员联系!');
else
alert('单据已被结账,不能删除!');
return;
}
alert('删除! 末处理');
}
var OnSetReadOnlyOnUpdateing = function (readOnly) {
WQForm.getComponent('plan_no').setReadOnly(readOnly);
WQForm.getComponent('wp_dep_no').setReadOnly(readOnly);
WQForm.getComponent('user_dep_no').setReadOnly(readOnly);
}
var SetJXDDMinMax = function () {
var tDate = GlobalVar.ServerDate;
if (EditingWQ == null || EditingWQ.get('wq_id') < 0) {
tDate = GlobalVar.ServerDate;
}
else {
tDate = EditingWQ.get('n_dd');
}
var minValue = Ext.Date.add(tDate, Ext.Date.DAY, -1 * (WpConfig.freezeDay - 1));
var maxValue = tDate;
WQForm.getComponent('jx_dd').setMinValue(minValue);
WQForm.getComponent('jx_dd').setMaxValue(maxValue);
}
//更新计件日期,因为没有整体保存.
// 当有计件表身时,是不能再更新到表头了.
var flagJXDD = false;
var ListenFormJX_DDChange = function () {
WQForm.getComponent('jx_dd').hasListener('change') && WQForm.getComponent('jx_dd').clearListeners();
WQForm.getComponent('jx_dd').on('change', function (vthis, newValue, oldValue, eOpts) {
if (IsLayoutingTable == true
|| flagJXDD == true
|| Ext.isDate(newValue) == false) {
return;
}
var checkResult = IsLocking(newValue);
if (checkResult != false) {
flagJXDD = true;
WQForm.getComponent('jx_dd').setValue(oldValue);
flagJXDD = false;
if (checkResult == '冰封') {
alert('单据已被冰封!, 请与管理员联系修改!');
}
else {
alert('单据已被结账,不能修改行!');
}
return;
}
if (EditingWQ.get('wq_id') > 0 && newValue != oldValue && newValue) {
Ext.MessageBox.confirm('询问', '确定变更单据时间吗? ', function (btn) {
if (btn == 'yes') {
commonVar.AjaxRequest(
commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
{
NowUserId: NowUserId,
action: 'UpdateJXDD',
wq_id: EditingWQ.get('wq_id'),
jx_dd: newValue
}
);
} else {
flagJXDD = true;
WQForm.getComponent('jx_dd').setValue(oldValue);
flagJXDD = false;
}
});
}
});
}
var WQForm = Ext.create('Ext.form.Panel', {
region: 'north',
layout: {
type: 'table',
columns: 4
},
url: commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
defaults: {
width: 230,
labelWidth: 100,
xtype: 'textfield',
margin: '2 0 2 5',
labelAlign: 'right'
},
items: [
{
fieldLabel: '计薪日   ',
name: 'jx_dd',
itemId: 'jx_dd',
xtype: 'datefield',
format: 'Y/m/d',
minValue: Ext.Date.add(GlobalVar.ServerDate, Ext.Date.DAY, -1 * (WpConfig.freezeDay - 1)),
maxValue: GlobalVar.ServerDate,
value: GlobalVar.ServerDate,
//editable :false,
allowBlank: false
},
{
readOnly: true,
fieldLabel: '计件单号  ',
name: 'jx_no',
itemId: 'jx_no',
allowBlank: false
},
{
fieldLabel: '录入人员',
name: 'provider',
itemId: 'provider',
xtype: 'MSearch_Salm',
value: WpConfig.UserDefault[GlobalVar.NowUserId].user_no,
allowBlank: false,
colspan: 2
},
{
fieldLabel: '计划单号  ',
name: 'plan_no',
itemId: 'plan_no',
//allowBlank: false,
xtype: 'MSearch_PlanNo', // 同选 size_id size
pageSize:100,
listeners: {
select: function (vcombo, records, eOpts) {
if (EditingWQ == null || EditingWQ.get('wq_id') > 0) {
//alert('');
return;
}
//可以切换,证明切换是有效的,重设加载标记
LoadedShareFlag = false;
if (records && records.length > 0) {
var cbSel = records[0];
EditingWQ.set('plan_id', cbSel.get('plan_id'));
EditingWQ.set('plan_no', cbSel.get('plan_no'));
EditingWQ.set('prd_no', cbSel.get('prd_no'));
EditingWQ.set('size_id', -1);
EditingWQ.set('size', '');
EditingWQ.set('color_id', -1);
EditingWQ.set('cal_inscrease', CalInscrease);
WQForm.getComponent('prd_no').setValue(cbSel.get('prd_no'));
WQForm.getComponent('size').setValue(cbSel.get('size'));
FormUpdateRecord();
}
else {
alert('填充其他选择时,出错! 试重选一次!');
}
}
}
},
{
fieldLabel: '货品代号*',
name: 'prd_no',
itemId: 'prd_no',
xtype: 'MSearch_Prdt',
allowBlank: false,
readOnly: true
},
{
fieldLabel: '尺     寸*',
name: 'size',
itemId: 'size',
xtype: 'MSearch_Size',
allowBlank: false,
readOnly: true,
hidden: true
},
{
fieldLabel: '工序部门',
name: 'wp_dep_no',
itemId: 'wp_dep_no',
xtype: 'MSearch_DeptWP',
value: WpConfig.UserDefault[GlobalVar.NowUserId].wp_dep_no || '000000',
allowBlank: false
},
{
xtype: 'fieldcontainer',
items: [{
xtype: 'button',
height: 30,
text: '新建单据',
margin: '0 5 0 5',
handler: function () {
OnFormInit();
}
}, {
xtype: 'button',
height: 30,
text: '更新布局',
margin: '0 5 0 5',
hidden:true,
handler: function () {
FormUpdateRecord();
OnWQGridLayout();
}
}, {
xtype: 'button',
height: 30,
text: '统计视图',
margin: '0 5 0 5',
handler: function () {
PlanViewHelper.ShowWin(
WQForm.getComponent('plan_no').getValue(),
WQForm.getComponent('wp_dep_no').getValue(),
WQForm.getComponent('user_dep_no').getValue());
}
},{
xtype: 'button',
height: 30,
text: '导出',
icon: '../JS/resources/MyImages/ms_excel.png',
margin: '0 5 0 5',
handler: function () {
GlobalVar.ToExcel(WQGrid, '工资分析');
//alert('功能末实现');
}
}]
},
{
fieldLabel: '员工部门*',
name: 'user_dep_no',
itemId: 'user_dep_no',
xtype: 'MSearch_Dept',
value: WpConfig.UserDefault[GlobalVar.NowUserId].user_dep_no || '000000',
allowBlank: false
},
{
//// { value: 1, "name": "对" },
////{ value: 2, "name": "个" },
colspan: 1,
xtype: 'cbUTType',
name: 'edit_ut',
itemId: 'edit_ut',
fieldLabel: '录入单位',
readOnly: true,
hidden:true,
value: WpConfig.UserDefault[GlobalVar.NowUserId].edit_ut || 1
},
{
colspan: 1,
xtype: 'cbLayoutFinishQty',
name: 'layout_finish',
itemId: 'layout_finish',
fieldLabel: '完工显示',
hidden: true,
value: Ext.util.Cookies.get('layout_finish') || 'FINISH-PLAN',
listeners: {
change: function (vthis, value, eOpts) {
var layoutFinish = vthis.getValue() || 'FINISH-PLAN';
Ext.util.Cookies.set('layout_finish', layoutFinish, new Date('2099-01-01'));
WQGrid.store.getAt(RowTypeIndex.ALL_QTY).set('row_type', '');
WQGrid.store.getAt(RowTypeIndex.ALL_QTY).set('row_type', 'ALL_QTY');
WQGrid.store.getAt(RowTypeIndex.ALL2_QTY).set('row_type', '');
WQGrid.store.getAt(RowTypeIndex.ALL2_QTY).set('row_type', 'ALL2_QTY');
}
}
}
],
listeners: { afterrender: function () { OnFormInit(); } }
});
var SwitchUTIng = false;
//是否高于,最高上限对数,返回高于的对数 100, 输了102对, 返回2
var MoreThanCeliQtyPair = function (wp_no, eValue, editingWQDetailRecord) {
var isNewing = EditingWQ.get('wq_id') < 0,
edit_ut = EditingWQ.get('edit_ut');
var orignFinishQty = 0;
var orignFinish_AllQty = 0;
if (isNewing) {
orignFinishQty = EditWQGridData[wp_no].sum_qty_pair || 0;
orignFinish_AllQty = EditWQGridData[wp_no].sum_all_qty_pair || 0;
}
else {
//修改单据,已完成包含本单的量.. 所以要减少加载时的原工序完成量
orignFinishQty = (EditWQGridData[wp_no].sum_qty_pair || 0) - (EditWQGridData[wp_no].table_sum_qty_pair || 0);
orignFinish_AllQty = (EditWQGridData[wp_no].sum_all_qty_pair || 0) - (EditWQGridData[wp_no].table_sum_qty_pair || 0);
}
var _qty_pair = 0;
WQDetailStore.findBy(function (_wd_rec) {
//去掉自己原来的数
if (_wd_rec != editingWQDetailRecord) {
if (_wd_rec.get('wp_no') == wp_no) {
_qty_pair += _wd_rec.get('qty_pair') || 0;
}
}
});
_qty_pair += eValue;
//var thisSumQtys = SumDetailWpQty(wp_no);
if ((orignFinish_AllQty + _qty_pair) > EditingWQ.get('plan_sizes_qty')) {
alert('工序对数, 超过总计划单的总对数!(' + EditingWQ.get('plan_sizes_qty') + ')');
return false;
}
///在限制尺寸数量提前下
//console.log({ orignFinishQty: orignFinishQty, _qty_pair: _qty_pair, plan_size_qty: EditingWQ.get('plan_size_qty') });
if (EditWQGridData[wp_no].wq_type == 'size_qty' && (orignFinishQty + _qty_pair) > EditingWQ.get('plan_size_qty')) {
alert('工序对数, 超过尺寸计划单的对数!(' + EditingWQ.get('plan_size_qty') + ')');
return false;
}
return true;
}
var OnWQGridEdit = function (editor, e) {
//保持 WQDetailStore 与 实时编辑一致. 用于直接提交参数
if (SwitchUTIng == false && e.record.get('row_type') == 'SALM' && e.field.indexOf("wp_") == 0) {
//因model 是用 string,非数量类型,所以要"" null转换一下为0
e.originalValue = e.originalValue || 0;
e.value = e.value || 0;
if (e.originalValue == e.value)
return;
var wp_no = e.field.substr('wp_'.length, 10),
sal_no = e.record.get('row_value'),
edit_ut = EditingWQ.get('edit_ut');
var hadRecord = false;
var wqDetailRecord = null,
wqOldQtyPic = 0.00,
wqOldQtyPair = 0.00;
WQDetailStore.each(function (_rec) {
if (_rec.get('worker') == sal_no && _rec.get('wp_no') == wp_no) {
hadRecord = true;
wqDetailRecord = _rec;
wqOldQtyPic = _rec.get('qty_pic');
wqOldQtyPair = _rec.get('qty_pair');
//对,填充 个数
if (edit_ut == 1) {
_rec.set('qty_pic', e.value * EditWQGridData[wp_no].pic_num);
_rec.set('qty_pair', e.value);
}
else {
_rec.set('qty_pic', e.value);
_rec.set('qty_pair', e.value / EditWQGridData[wp_no].pic_num);
}
}
});
if (hadRecord == false) {
var _qty_pic = 0,
_qty_pair = 0.00;
wqOldQtyPic = 0;
wqOldQtyPair = 0;
if (edit_ut == 1) {
_qty_pic = e.value * EditWQGridData[wp_no].pic_num;
_qty_pair = e.value;
}
else {
_qty_pic = e.value;
_qty_pair = e.value / EditWQGridData[wp_no].pic_num;
}
var insertRecords = WQDetailStore.add({
wqb_id: -1,
wq_id: EditingWQ.get('wq_id'),
worker: sal_no,
prd_no: EditingWQ.get('prd_no'),
wp_no: wp_no,
qty_pic: _qty_pic,
qty_pair: _qty_pair
});
wqDetailRecord = insertRecords[0];
//console.log('ADD_');
}
//////
//if (e.originalValue < e.value) {
//判断是否超录
var _A_qty_pair = 0;
if (edit_ut == 1) {
_A_qty_pair = e.value;
}
else {
_A_qty_pair = e.value / EditWQGridData[wp_no].pic_num;
}
var flag = MoreThanCeliQtyPair(wp_no, _A_qty_pair, wqDetailRecord);
if (flag == false) {
///管理员可以超录,,但有可能是用于处理超录.(因为初使用时,有部份单是红色的,但一改系统又提醒超了,变回原值)
if (isAdminRoot == true) {
alert("数量已超录, 但管理员有权限处理,但也请好好控制!");
}
else if (有超数权限 == true) {
alert("数量已超录, 但你有权限超录,但也请好好控制!");
}
else {
e.record.set(e.field, e.originalValue || 0);
wqDetailRecord.set('qty_pic', wqOldQtyPic);
wqDetailRecord.set('qty_pair', wqOldQtyPair);
return false;
}
}
UpdateTableSumAndFinsishRecord(wp_no);
}
}
var fnCommitShare = function (insertParams, successCallBack) {
EditShareWinObj.win.setLoading(true);
EditShareWinObj.win.btnSave.setDisabled(true);
Ext.Ajax.request({
type: 'post',
url: commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
params: insertParams,
success: function (response) {
var Json = Ext.decode(response.responseText);
EditShareWinObj.win.btnSave.setDisabled(false);
EditShareWinObj.win.setLoading(false);
if (Json.result == true) {
EditShareWinObj.fnCloseWin();
//更新
//EditingWQ = Ext.create('WPQtyHeader_Model', Json.WQHeader[0]);
LoadWQBTable(Json.WQHeader[0].wq_id);
alert('保存成功');
if (successCallBack) {
successCallBack();
}
}
else {
alert('保存失败:' + Json.msg);
}
},
failure: function (form, action) {
EditShareWinObj.win.btnSave.setDisabled(false);
EditShareWinObj.win.setLoading(false);
CommMsgShow("异常:", form.responseText, true);
}
});
}
var OnInsertShare = function () {
var checkResult = IsLocking();
if (checkResult != false) {
if (checkResult == '冰封')
alert('单据已被冰封!, 请与管理员联系添加!');
else
alert('单据已被结账,不能添加行!');
return;
}
if (EditShareWinObj.opeingFlag == true) {
return;
}
if (EditingWQ == null || !EditingWQ.get('plan_no')) {
alert('末选择计划单');
return;
}
FormUpdateRecord();
//加载数据后才执行
EditShareWinObj.nowWQB_ID = -1;
EditShareWinObj.EditingWQ = EditingWQ;
EditShareWinObj.fnOpenWin(function (paramsObj) {
//console.log(paramsObj);
if (EditingWQ.get('wq_id') < 0) {
EditShareWinObj.win.setLoading(true);
EditShareWinObj.win.btnSave.setDisabled(true);
fnCommonCreateLastNo('SY', WQForm.getComponent('jx_no'), function () {
paramsObj.jx_no = WQForm.getComponent('jx_no').getValue();
fnCommitShare(paramsObj, function () { OnSearchWQ(); });
});
return;
}
fnCommitShare(paramsObj);
});
}
var OnUpdateShare = function () {
var checkResult = IsLocking();
if (checkResult != false) {
if (checkResult == '冰封') {
console.log('单据已被冰封!, 请与管理员联系修改!');
alert('单据已被冰封!, 请与管理员联系修改!');
}
else {
console.log('单据已被结账,不能修改行!');
alert('单据已被结账,不能修改行!');
}
return;
}
if (EditShareWinObj.opeingFlag == true) {
return;
}
if (EditingWQ == null || !EditingWQ.get('plan_no')) {
return;
}
if (EditingWQ.get('wq_id') <=0) {
return;
}
var selRows = WQGrid.getSelectionModel().getSelection();
if (selRows.length <= 0) {
alert('末选择表身行');
return;
}
FormUpdateRecord();
//加载数据后才执行
EditShareWinObj.nowWQB_ID = selRows[0].get('wqb_id');
var originShares = [];
WPQtyBodyShareStore.findBy(function (qRec) {
if (qRec.get('wqb_id') == EditShareWinObj.nowWQB_ID) {
originShares.push(qRec.copy());
}
});
//EditShareWinObj.nowSizeId = selRows[0].get('size_id');
//EditShareWinObj.nowWpNo = selRows[0].get('wp_no');
EditShareWinObj.updateWQBClone = selRows[0].copy();
EditShareWinObj.updateWQBSharesClone = originShares;
EditShareWinObj.EditingWQ = EditingWQ;
EditShareWinObj.fnOpenWin(function (paramsObj) {
//console.log(paramsObj);
fnCommitShare(paramsObj);
});
}
var fnComfirmToDelete = function () {
var checkResult = IsLocking();
if (checkResult != false) {
if (checkResult == '冰封')
alert('单据已被冰封!, 想删除请与管理员联系!');
else
alert('单据已被结账,不能删除行!');
return;
}
var selRows = WQGrid.getSelectionModel().getSelection();
if (selRows.length <= 0) {
alert('末选择表身行');
return;
}
Ext.MessageBox.confirm('询问', '确定要删除本行吗?', function (btn) {
if (btn != 'yes')
return;
OnDeleteShare();
});
}
var OnDeleteShare = function () {
var selRows = WQGrid.getSelectionModel().getSelection();
Ext.Ajax.request({
type: 'post',
url: commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
params: {
action: 'DeleteOneWQB',
NowUserId : NowUserId,
wq_id: selRows[0].get('wq_id'),
wqb_id: selRows[0].get('wqb_id')
},
success: function (response) {
var Json = Ext.decode(response.responseText);
if (Json.result == true) {
LoadWQBTable(Json.WQHeader[0].wq_id);
//alert('成功');
}
else {
alert('删除失败:' + Json.msg);
}
},
failure: function (form, action) {
CommMsgShow("异常:", form.responseText, true);
}
});
}
///整单删除
var OnDeleteTable = function () {
Ext.Ajax.request({
type: 'post',
url: commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
params: {
action: 'DeleteWQTable',
NowUserId: NowUserId,
wq_id: EditingWQ.get('wq_id')
},
success: function (response) {
var Json = Ext.decode(response.responseText);
if (Json.result == true) {
OnFormInit();
OnSearchWQ();
}
else {
alert('删除失败:' + Json.msg);
}
},
failure: function (form, action) {
CommMsgShow("异常:", form.responseText, true);
}
});
}
var WPQtyBodyStore = Ext.create('Ext.data.Store', {
model: 'WPQtyBody_Model',
data: []
});
var WPQtyBodyShareStore = Ext.create('Ext.data.Store', {
model: 'WPQtyBodyShare_Model',
data: []
});
var WQPrdtWPStore = Ext.create('Ext.data.Store', {
model: 'Model_Only_PrdtWP',
data: []
});
///加载单据中
IsLayoutingTable = false;
var LoadWQBTable = function (wq_id) {
viewport.setLoading(true);
Ext.Ajax.request({
type: 'post',
url: commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
params: {
action: 'LoadWQBTable',
wq_id: wq_id,
NowUserId: NowUserId
},
success: function (response) {
var Json = Ext.decode(response.responseText);
IsLayoutingTable = true;
if (Json.result == true) {
//更新
Ext.suspendLayouts();
EditingWQ = Ext.create('WPQtyHeader_Model', Json.Header[0]);
SetJXDDMinMax();
WQForm.loadRecord(EditingWQ);
WQForm.getComponent('plan_no').setRawValue(EditingWQ.get('plan_no'));
WPQtyBodyStore.removeAll();
WPQtyBodyShareStore.removeAll();
WQPrdtWPStore.removeAll();
WQPrdtWPStore.add(Json.PrdtWP);
WPQtyBodyShareStore.add(Json.BodyShare);
WPQtyBodyStore.add(Json.Body);
Ext.resumeLayouts(true);
//有内容就要设 "计划单号" 灰
if (EditingWQ.get('wq_id') > 0 && WPQtyBodyStore.getCount() > 0) {
OnSetReadOnlyOnUpdateing(true);
}
else {
OnSetReadOnlyOnUpdateing(false);
}
IsLayoutingTable = false;
viewport.setLoading(false);
}
else {
viewport.setLoading(false);
IsLayoutingTable = false;
alert('加载失败:' + Json.msg);
}
},
failure: function (form, action) {
viewport.setLoading(false);
IsLayoutingTable = false;
CommMsgShow("异常:", form.responseText, true);
}
});
}
var WQGrid = Ext.create('Ext.grid.Panel', {
region: 'center',
enableLocking: true,
enableColumnMove: false,
enableColumnHide: false,
sortableColumns: false,
columnLines: true,
rowLines: true,
layout: 'fit',
plugins: [],
store: WPQtyBodyStore,
columns: [
{ xtype: 'rownumberer' },
{
text: '员工', dataIndex: 'workerList', width: 130,
renderer: function (v, m, rec) {
var wqb_id = rec.get('wqb_id');
var works = [];
WPQtyBodyShareStore.findBy(function (qRec) {
if (qRec.get('wqb_id') == wqb_id) {
works.push(GlobalVar.rdSalmName(qRec.get('worker')));
//console.log(qRec.get('worker'));
}
});
return works.toString();
}
},
{
text: '工序', dataIndex: 'wp_no', width: 180,
renderer: function (v, m, rec) {
var qRec = WQPrdtWPStore.findRecord('wp_no', v);
if (qRec) {
return qRec.get('name');
}
return '';
}
},
{
header: '尺寸(颜色)', name: 'rdShowSizeAndColor', renderer: function (v, m, rec) {
var size = rec.get('size');
if (rec.get('color_id') > 0) {
return size + '(' + commonVar.RenderColorName(rec.get('color_id')) + ')';
}
return size;
}
},
{ text: '对数', dataIndex: 'qty_pair', width: 100 },
{ text: '个数', dataIndex: 'qty_pic', width: 90 },
{ text: '加翻(%)', dataIndex: 'inscrease_percent', width: 90 },
{ text: '单价(对)', dataIndex: 'up_pair', width: 100 },
{ text: '单价(个)', dataIndex: 'up_pic', width: 100 },
{
text: '基本金额', dataIndex: 'amt', width: 100,
sortable: false,
renderer: function (v, m, rec) {
v = (rec.get('qty_pair') || 0) * (rec.get('up_pair') || 0);
v = Ext.util.Format.round(v, 3);
return v;
}
},
{
text: '加翻率', dataIndex: 'inscrease_percent', width: 80,
sortable: false,
renderer: function (v, m, rec) {
return (v || 0) + '%';
}
},
{
text: '加翻后金额', dataIndex: 'amt2', width: 100,
sortable: false,
renderer: function (v, m, rec) {
var pre = rec.get('inscrease_percent') ;
if (pre > 0) {
v = (rec.get('qty_pair') || 0) * (rec.get('up_pair') || 0) * (1+( pre / 100));
}
else {
v = (rec.get('qty_pair') || 0) * (rec.get('up_pair') || 0);
}
v = Ext.util.Format.round(v, 3);
return v;
}
}
],
selType: 'rowmodel',
bbar: [
{
text: '添加行', icon: '../JS/resources/MyIcon/icon_save.png', itemId: 'btnInsert', height: 30, width: 80, handler:
function () {
OnInsertShare();
}
},
{
text: '修改行', icon: '../JS/resources/MyIcon/icon_skill.png', itemId: 'btnEdit', height: 30, width: 80, handler:
function () {
OnUpdateShare();
}
},
'-',
{
text: '删除行', icon: '../JS/resources/MyIcon/icon_delete.png', height: 30, width: 80, handler: function () {
fnComfirmToDelete();
}
},
'-',
{
text: '整单删除', icon: '../JS/resources/MyIcon/icon_delete.png', height: 30, width: 80, handler: function () {
if (EditingWQ.get('wq_id') <= 0) {
alert("单据未保存不需要删除 ,点击'新建单据'即可!");
return;
}
Ext.MessageBox.confirm('询问', '确定要整单删除吗?', function (btn) {
if (btn != 'yes')
return;
OnDeleteTable();
});
}
},
{
text: '查询面板',
icon: '../JS/resources/MyIcon/search.png',
height: 30, width: 90,
handler: function () {
SearchPanel.expand();
}
}
],
listeners: {
boxready: function () {
this.btnSave = this.getDockedComponent(0).getComponent('btnSave');
///console.log(this.btnSave);
},
itemcontextmenu: function (vthis, record, item, index, e, eOpts) {
e.preventDefault();
var menu = Ext.create('Ext.menu.Menu', {
width: 100,
margin: '0 0 10 0',
items: [{
text: '修改行',
handler: function () {
OnUpdateShare();
}
}, {
text: '删除行',
handler: function () {
fnComfirmToDelete();
}
}]
});
menu.showAt(e.getXY());
}
}
});
//查询 , 计薪范围段 计薪单号 计划单号, 员工部门
//查询Grid
var SearchGridStore = Ext.create('Ext.data.Store', {
model: 'WPQtyHeader_Model',
proxy: {
type: 'ajax',
url: commonVar.urlCDStr + 'ASHX/ashx_WPQtyEdit.ashx',
reader: {
type: 'json'
//root: 'items'
}
}
});
var OnSearchWQ = function () {
var searchParams = SearchPanel.getComponent('SearchFormId').getValues();
searchParams.action = 'SearchWQ';
searchParams.NowUserId = GlobalVar.NowUserId;
searchParams.TableType = TableType;
searchParams.IsShareTable = IsShareTable;
SearchGridStore.load({
params: searchParams
});
}
var SearchPanel = Ext.create('Ext.panel.Panel', {
region: 'east',
title: '查询计薪单',
width: 480,
collapsed: true,
layout: 'border',
collapsible: true,
items: [
{
region: 'north',
xtype: 'form',
itemId: 'SearchFormId',
defaults: {
labelAlign: 'right',
width: 170,
labelWidth: 60
},
layout: {
type: 'table',
columns: 2
},
items: [{
fieldLabel: '计薪日',
name: 'S_jx_dd',
itemId: 'S_jx_dd',
xtype: 'datefield',
format: 'Y/m/d',
value: GlobalVar.MouthFirstDay
}, {
fieldLabel: '~  至  ',
name: 'E_jx_dd',
itemId: 'E_jx_dd',
xtype: 'datefield',
format: 'Y/m/d',
value: GlobalVar.MouthLastDay
}, {
fieldLabel: '计薪单',
name: 'jx_no',
itemId: 'jx_no',
xtype: 'textfield',
}, {
fieldLabel: '计划单',
name: 'plan_no',
itemId: 'plan_no',
xtype: 'textfield'
},
{
fieldLabel: '员工部门',
name: 'user_dep_no',
itemId: 'user_dep_no',
xtype: 'MSearch_Dept'
}, {
fieldLabel: '货号',
name: 'query_prd_no',
itemId: 'query_prd_no',
xtype: 'textfield',
value: ''
}, {
margin: '0 0 0 10',
text: '查    询',
xtype: 'button',
height: 30,
width: 80,
handler: OnSearchWQ
}]
},
{
region: 'center',
xtype: 'grid',
store: SearchGridStore,
columns: [
{ header: '计薪日', name: 'jx_dd', dataIndex: 'jx_dd', xtype: 'datecolumn', format: 'm-d', width: 60 },
{ header: '计薪单', name: 'jx_no', dataIndex: 'jx_no', width: 100 },
{ header: '员工部门', name: 'user_dep_no', dataIndex: 'user_dep_no', renderer: GlobalVar.rdDeptName },
{ header: '计划单', name: 'plan_no', dataIndex: 'plan_no', width: 100 },
{ header: '货名', name: 'prd_no', dataIndex: 'prd_no', renderer: GlobalVar.RenderPrdtName }
],
listeners: {
itemdblclick: function (gridThis, record, item, index, e, eOpts) {
LoadWQBTable(record.get('wq_id'));
}
}
}],
listeners: {
afterrender: function () {
OnSearchWQ();
}
}
});
var pageMonitor = function (receiverUrl, e) {
if (receiverUrl != 'WPQtyOnShare')
return;
if (e.action == 'startEdit') {
OnFormInit();
EditingWQ.set('jx_dd', e.jx_dd);
EditingWQ.set('wp_dep_no', e.wp_dep_no);
EditingWQ.set('user_dep_no', e.user_dep_no);
EditingWQ.set('provider', WpConfig.UserDefault[GlobalVar.NowUserId].user_no || '');
var cbSelectReocrd = Ext.create('WorkPlan_Sizes_Model', { plan_no: e.plan_no, plan_id: e.plan_id, size_id: e.size_id, size: e.size, prd_no: e.prd_no });
WQForm.getComponent('plan_no').fireEvent('select', WQForm.getComponent('plan_no'), [cbSelectReocrd]);
WQForm.getComponent('plan_no').displayTplData = [cbSelectReocrd.data];
WQForm.getComponent('plan_no').setRawValue(e.plan_no);
WQForm.getComponent('wp_dep_no').setValue(e.wp_dep_no);
WQForm.getComponent('user_dep_no').setValue(e.user_dep_no);
EditingWQ.set('edit_ut', WpConfig.UserDefault[GlobalVar.NowUserId].edit_ut || 1);
WQForm.getComponent('edit_ut').setValue(WpConfig.UserDefault[GlobalVar.NowUserId].edit_ut || 1);
OnWQGridLayout();
}
}
viewport = Ext.create('Ext.Viewport', {
layout: 'border',
items: [
{
region: 'center',
xtype: 'panel',
itemId: 'LeftPanel',
layout: 'border',
items: [WQForm, WQGrid]
},
SearchPanel
],
listeners: {
afterrender: function (comp, eOpts) {
fnCommonCreateLastNo('SY', WQForm.getComponent('jx_no'), function () { });
var me = this;
var pa = window.parent ? window.parent.Ext.getCmp('tabPanel') : null;
if (pa) {
var thisTabComp = pa.getComponent('WPQtyOnShare');
if (thisTabComp) {
thisTabComp.had_rendered = true;
pa.on('SendOrder', pageMonitor);
pa.getComponent('WPQtyOnShare').fireEvent('had_rendered', pageMonitor);
PageClose = function () {
var pa = window.parent.Ext.getCmp('tabPanel');
if (pa) {
pa.getComponent('WPQtyOnShare').fireEvent('letcloseme');
}
}
}
}
}
}
});
}); |
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# autoreconf calls are necessary to fix hard-coded aclocal versions in the
# configure scripts that ship with the projects.
set -e
TP_DIR=$(cd "$(dirname "$BASH_SOURCE")"; pwd)
source $TP_DIR/vars.sh
if [[ "$OSTYPE" =~ ^linux ]]; then
OS_LINUX=1
fi
delete_if_wrong_patchlevel() {
local DIR=$1
local PATCHLEVEL=$2
if [ ! -f $DIR/patchlevel-$PATCHLEVEL ]; then
echo It appears that $DIR is missing the latest local patches.
echo Removing it so we re-download it.
rm -Rf $DIR
fi
}
unzip_to_source() {
local FILENAME=$1
local SOURCE=$2
unzip -q "$FILENAME"
# Parse out the unzipped top directory
DIR_NAME=`unzip -qql "$FILENAME" | awk 'NR==1 {print $4}' | sed -e 's|^[/]*\([^/]*\).*|\1|'`
# If the unzipped directory is the wrong name, move it.
if [ "$SOURCE" != "$TP_SOURCE_DIR/$DIR_NAME" ]; then
mv "$TP_SOURCE_DIR/$DIR_NAME" "$SOURCE"
fi
}
fetch_and_expand() {
local FILENAME=$1
local SOURCE=$2
local URL_PREFIX=$3
if [ -z "$FILENAME" ]; then
echo "Error: Must specify file to fetch"
exit 1
fi
if [ -z "$URL_PREFIX" ]; then
echo "Error: Must specify url prefix to fetch"
exit 1
fi
TAR_CMD=tar
if [[ "$OSTYPE" == "darwin"* ]] && which gtar &>/dev/null; then
TAR_CMD=gtar
fi
FULL_URL="${URL_PREFIX}/${FILENAME}"
SUCCESS=0
# Loop in case we encounter an error.
for attempt in 1 2 3; do
if [ -r "$FILENAME" ]; then
echo "Archive $FILENAME already exists. Not re-downloading archive."
else
echo "Fetching $FILENAME from $FULL_URL"
if ! curl --retry 3 -L -O "$FULL_URL"; then
echo "Error downloading $FILENAME"
rm -f "$FILENAME"
# Pause for a bit before looping in case the server throttled us.
sleep 5
continue
fi
fi
echo "Unpacking $FILENAME to $SOURCE"
if [[ "$FILENAME" =~ \.zip$ ]]; then
if ! unzip_to_source "$FILENAME" "$SOURCE"; then
echo "Error unzipping $FILENAME, removing file"
rm "$FILENAME"
continue
fi
elif [[ "$FILENAME" =~ \.(tar\.gz|tgz)$ ]]; then
if ! $TAR_CMD xf "$FILENAME"; then
echo "Error untarring $FILENAME, removing file"
rm "$FILENAME"
continue
fi
elif [[ "$FILENAME" =~ \.jar$ ]]; then
mkdir ${FILENAME%.jar}
cp $FILENAME ${FILENAME%.jar}/
else
echo "Error: unknown file format: $FILENAME"
exit 1
fi
SUCCESS=1
break
done
if [ $SUCCESS -ne 1 ]; then
echo "Error: failed to fetch and unpack $FILENAME"
exit 1
fi
# Allow for not removing previously-downloaded artifacts.
# Useful on a low-bandwidth connection.
if [ -z "$NO_REMOVE_THIRDPARTY_ARCHIVES" ]; then
echo "Removing $FILENAME"
rm $FILENAME
fi
echo
}
fetch_with_url_and_patch() {
local FILENAME=$1
local SOURCE=$2
local PATCH_LEVEL=$3
local URL_PREFIX=$4
# Remaining args are expected to be a list of patch commands
delete_if_wrong_patchlevel $SOURCE $PATCH_LEVEL
if [ ! -d $SOURCE ]; then
fetch_and_expand $FILENAME $SOURCE $URL_PREFIX
pushd $SOURCE
shift 4
# Run the patch commands
for f in "$@"; do
eval "$f"
done
touch patchlevel-$PATCH_LEVEL
popd
echo
fi
}
# Call fetch_with_url_and_patch with the default dependency URL source.
fetch_and_patch() {
local FILENAME=$1
local SOURCE=$2
local PATCH_LEVEL=$3
shift 3
fetch_with_url_and_patch \
$FILENAME \
$SOURCE \
$PATCH_LEVEL \
$DEPENDENCY_URL \
"$@"
}
mkdir -p $TP_SOURCE_DIR
cd $TP_SOURCE_DIR
GLOG_PATCHLEVEL=4
fetch_and_patch \
glog-${GLOG_VERSION}.tar.gz \
$GLOG_SOURCE \
$GLOG_PATCHLEVEL \
"patch -p0 < $TP_DIR/patches/glog-issue-198-fix-unused-warnings.patch" \
"patch -p0 < $TP_DIR/patches/glog-issue-54-dont-build-tests.patch" \
"patch -p1 < $TP_DIR/patches/glog-fix-symbolization.patch" \
"patch -p1 < $TP_DIR/patches/glog-support-stacktrace-for-aarch64.patch" \
"autoreconf -fvi"
GMOCK_PATCHLEVEL=0
fetch_and_patch \
googletest-release-${GMOCK_VERSION}.tar.gz \
$GMOCK_SOURCE \
$GMOCK_PATCHLEVEL
GFLAGS_PATCHLEVEL=0
fetch_and_patch \
gflags-${GFLAGS_VERSION}.tar.gz \
$GFLAGS_SOURCE \
$GFLAGS_PATCHLEVEL
GPERFTOOLS_PATCHLEVEL=2
fetch_and_patch \
gperftools-${GPERFTOOLS_VERSION}.tar.gz \
$GPERFTOOLS_SOURCE \
$GPERFTOOLS_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/gperftools-Replace-namespace-base-with-namespace-tcmalloc.patch" \
"patch -p1 < $TP_DIR/patches/gperftools-unbreak-memz.patch" \
"autoreconf -fvi"
PROTOBUF_PATCHLEVEL=0
fetch_and_patch \
protobuf-${PROTOBUF_VERSION}.tar.gz \
$PROTOBUF_SOURCE \
$PROTOBUF_PATCHLEVEL \
"autoreconf -fvi"
# Returns 0 if cmake should be patched to work around this bug [1].
#
# Currently only SLES 12 SP0 is known to be vulnerable, and since the workaround
# hurts cmake performance, we apply it only if absolutely necessary.
#
# 1. https://gitlab.kitware.com/cmake/cmake/issues/15873.
needs_patched_cmake() {
if [ ! -e /etc/SuSE-release ]; then
# Not a SUSE distro.
return 1
fi
if ! grep -q "SUSE Linux Enterprise Server 12" /etc/SuSE-release; then
# Not SLES 12.
return 1
fi
if ! grep -q "PATCHLEVEL = 0" /etc/SuSE-release; then
# Not SLES 12 SP0.
return 1
fi
return 0
}
CMAKE_PATCHLEVEL=1
CMAKE_PATCHES=""
if needs_patched_cmake; then \
CMAKE_PATCHES="patch -p1 < $TP_DIR/patches/cmake-issue-15873-dont-use-select.patch"
fi
fetch_and_patch \
cmake-${CMAKE_VERSION}.tar.gz \
$CMAKE_SOURCE \
$CMAKE_PATCHLEVEL \
"$CMAKE_PATCHES"
SNAPPY_PATCHLEVEL=0
fetch_and_patch \
snappy-${SNAPPY_VERSION}.tar.gz \
$SNAPPY_SOURCE \
$SNAPPY_PATCHLEVEL \
"autoreconf -fvi"
ZLIB_PATCHLEVEL=0
fetch_and_patch \
zlib-${ZLIB_VERSION}.tar.gz \
$ZLIB_SOURCE \
$ZLIB_PATCHLEVEL
LIBEV_PATCHLEVEL=0
fetch_and_patch \
libev-${LIBEV_VERSION}.tar.gz \
$LIBEV_SOURCE \
$LIBEV_PATCHLEVEL
RAPIDJSON_PATCHLEVEL=1
fetch_and_patch \
rapidjson-${RAPIDJSON_VERSION}.zip \
$RAPIDJSON_SOURCE \
$RAPIDJSON_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/rapidjson-fix-signed-unsigned-conversion-error.patch"
SQUEASEL_PATCHLEVEL=0
fetch_and_patch \
squeasel-${SQUEASEL_VERSION}.tar.gz \
$SQUEASEL_SOURCE \
$SQUEASEL_PATCHLEVEL
MUSTACHE_PATCHLEVEL=0
fetch_and_patch \
mustache-${MUSTACHE_VERSION}.tar.gz \
$MUSTACHE_SOURCE \
$MUSTACHE_PATCHLEVEL
GSG_PATCHLEVEL=2
fetch_and_patch \
google-styleguide-${GSG_VERSION}.tar.gz \
$GSG_SOURCE \
$GSG_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/google-styleguide-cpplint.patch"
GCOVR_PATCHLEVEL=0
fetch_and_patch \
gcovr-${GCOVR_VERSION}.tar.gz \
$GCOVR_SOURCE \
$GCOVR_PATCHLEVEL
CURL_PATCHLEVEL=0
fetch_and_patch \
curl-${CURL_VERSION}.tar.gz \
$CURL_SOURCE \
$CURL_PATCHLEVEL \
"autoreconf -fvi"
CRCUTIL_PATCHLEVEL=0
fetch_and_patch \
crcutil-${CRCUTIL_VERSION}.tar.gz \
$CRCUTIL_SOURCE \
$CRCUTIL_PATCHLEVEL
LIBUNWIND_PATCHLEVEL=1
fetch_and_patch \
libunwind-${LIBUNWIND_VERSION}.tar.gz \
$LIBUNWIND_SOURCE \
$LIBUNWIND_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/libunwind-trace-cache-destructor.patch"
PYTHON_PATCHLEVEL=0
fetch_and_patch \
python-${PYTHON_VERSION}.tar.gz \
$PYTHON_SOURCE \
$PYTHON_PATCHLEVEL
LLVM_PATCHLEVEL=5
fetch_and_patch \
llvm-${LLVM_VERSION}-iwyu-${IWYU_VERSION}.src.tar.gz \
$LLVM_SOURCE \
$LLVM_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/llvm-add-iwyu.patch" \
"patch -p1 < $TP_DIR/patches/llvm-iwyu-include-picker.patch" \
"patch -p0 < $TP_DIR/patches/llvm-iwyu-sized-deallocation.patch"
LZ4_PATCHLEVEL=0
fetch_and_patch \
lz4-$LZ4_VERSION.tar.gz \
$LZ4_SOURCE \
$LZ4_PATCHLEVEL
BITSHUFFLE_PATCHLEVEL=0
fetch_and_patch \
bitshuffle-${BITSHUFFLE_VERSION}.tar.gz \
$BITSHUFFLE_SOURCE \
$BITSHUFFLE_PATCHLEVEL
TRACE_VIEWER_PATCHLEVEL=0
fetch_and_patch \
kudu-trace-viewer-${TRACE_VIEWER_VERSION}.tar.gz \
$TRACE_VIEWER_SOURCE \
$TRACE_VIEWER_PATCHLEVEL
BOOST_PATCHLEVEL=2
fetch_and_patch \
boost_${BOOST_VERSION}.tar.gz \
$BOOST_SOURCE \
$BOOST_PATCHLEVEL \
"patch -p0 < $TP_DIR/patches/boost-issue-12179-fix-compilation-errors.patch" \
"patch -p0 < $TP_DIR/patches/boost-issue-440-darwin-version.patch"
# Return 0 if the current system appears to be el6 (either CentOS or proper RHEL)
needs_openssl_workaround() {
test -f /etc/redhat-release || return 1
rel="$(cat /etc/redhat-release)"
pat="(CentOS|Red Hat Enterprise).* release 6.*"
[[ "$rel" =~ $pat ]]
return $?
}
if needs_openssl_workaround && [ ! -d "$OPENSSL_WORKAROUND_DIR" ] ; then
echo Building on el6: installing OpenSSL from CentOS 6.4.
$TP_DIR/install-openssl-el6-workaround.sh
fi
BREAKPAD_PATCHLEVEL=1
fetch_and_patch \
breakpad-${BREAKPAD_VERSION}.tar.gz \
$BREAKPAD_SOURCE \
$BREAKPAD_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/breakpad-add-basic-support-for-dwz-dwarf-extension.patch"
SPARSEHASH_PATCHLEVEL=3
fetch_and_patch \
sparsehash-c11-${SPARSEHASH_VERSION}.tar.gz \
$SPARSEHASH_SOURCE \
$SPARSEHASH_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/sparsehash-0001-Add-compatibily-for-gcc-4.x-in-traits.patch" \
"patch -p1 < $TP_DIR/patches/sparsehash-0002-Add-workaround-for-dense_hashtable-move-constructor-.patch"
SPARSEPP_PATCHLEVEL=0
fetch_and_patch \
sparsepp-${SPARSEPP_VERSION}.tar.gz \
$SPARSEPP_SOURCE \
$SPARSEPP_PATCHLEVEL
THRIFT_PATCHLEVEL=0
fetch_and_patch \
$THRIFT_NAME.tar.gz \
$THRIFT_SOURCE \
$THRIFT_PATCHLEVEL
BISON_PATCHLEVEL=0
fetch_and_patch \
$BISON_NAME.tar.gz \
$BISON_SOURCE \
$BISON_PATCHLEVEL
# This would normally call autoreconf, but it does not succeed with
# autoreconf 2.69-11 (RHEL 7): "autoreconf: 'configure.ac' or 'configure.in' is required".
HIVE_PATCHLEVEL=0
fetch_and_patch \
$HIVE_NAME-stripped.tar.gz \
$HIVE_SOURCE \
$HIVE_PATCHLEVEL
HADOOP_PATCHLEVEL=0
fetch_and_patch \
$HADOOP_NAME-stripped.tar.gz \
$HADOOP_SOURCE \
$HADOOP_PATCHLEVEL
SENTRY_PATCHLEVEL=0
fetch_and_patch \
$SENTRY_NAME.tar.gz \
$SENTRY_SOURCE \
$SENTRY_PATCHLEVEL
YAML_PATCHLEVEL=0
fetch_and_patch \
$YAML_NAME.tar.gz \
$YAML_SOURCE \
$YAML_PATCHLEVEL
CHRONY_PATCHLEVEL=2
fetch_and_patch \
$CHRONY_NAME.tar.gz \
$CHRONY_SOURCE \
$CHRONY_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/chrony-no-superuser.patch" \
"patch -p1 < $TP_DIR/patches/chrony-reuseport.patch"
GUMBO_PARSER_PATCHLEVEL=1
fetch_and_patch \
$GUMBO_PARSER_NAME.tar.gz \
$GUMBO_PARSER_SOURCE \
$GUMBO_PARSER_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/gumbo-parser-autoconf-263.patch" \
"autoreconf -fvi"
GUMBO_QUERY_PATCHLEVEL=1
fetch_and_patch \
$GUMBO_QUERY_NAME.tar.gz \
$GUMBO_QUERY_SOURCE \
$GUMBO_QUERY_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/gumbo-query-namespace.patch"
POSTGRES_PATCHLEVEL=1
fetch_and_patch \
$POSTGRES_NAME.tar.gz \
$POSTGRES_SOURCE \
$POSTGRES_PATCHLEVEL \
"patch -p0 < $TP_DIR/patches/postgres-root-can-run-initdb.patch" \
"patch -p0 < $TP_DIR/patches/postgres-no-check-root.patch"
POSTGRES_JDBC_PATCHLEVEL=0
fetch_and_patch \
$POSTGRES_JDBC_NAME.jar \
$POSTGRES_JDBC_SOURCE \
$POSTGRES_JDBC_PATCHLEVEL
RANGER_PATCHLEVEL=2
fetch_and_patch \
$RANGER_NAME.tar.gz \
$RANGER_SOURCE \
$RANGER_PATCHLEVEL \
"patch -p1 < $TP_DIR/patches/ranger-python3.patch" \
"patch -p0 < $TP_DIR/patches/ranger-fixscripts.patch"
echo "---------------"
echo "Thirdparty dependencies downloaded successfully"
|
package com.goldencarp.lingqianbao.model.bean;
/**
* Created by sks on 2017/12/2.
*/
public class ProductBean {
private String name;//产品名称
private String rate;//预期年化收益率
private int time;//投资时间
public ProductBean() {
super();
}
public ProductBean(String name, String rate, int time) {
this.name = name;
this.rate = rate;
this.time = time;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getRate() {
return rate;
}
public void setRate(String rate) {
this.rate = rate;
}
public int getTime() {
return time;
}
public void setTime(int time) {
this.time = time;
}
@Override
public String toString() {
return "ProductBean{" +
"name='" + name + '\'' +
", rate='" + rate + '\'' +
", time=" + time +
'}';
}
}
|
import random
def random_shuffle(arr):
for i in range(len(arr)-1, 0, -1):
j = random.randint(0, i+1)
arr[i], arr[j] = arr[j], arr[i]
return arr
arr = [1, 2, 3, 4, 5]
random_shuffle(arr)
print(arr) |
<filename>source/ui/src/components/__test__/Header.test.tsx<gh_stars>1-10
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
import { I18n } from '@aws-amplify/core';
import { render } from '@testing-library/react';
import { MemoryRouter } from 'react-router-dom';
import Header from '../Header';
I18n.setLanguage('en');
test('renders the Header component', async () => {
const header = render(
<MemoryRouter>
<Header />
</MemoryRouter>
);
expect(header.container).toMatchSnapshot();
});
|
# file: src/bash/qto/funcs/dev/create-ctags.func.sh
# v0.8.5
#------------------------------------------------------------------------------
# creates the ctags file for the projet
#------------------------------------------------------------------------------
doCreateCtags(){
ctags --help >/dev/null 2>&1 ||
{ do_log "ERROR. ctags is not installed or not in PATH. Aborting." >&2; exit 1; }
pushd .
cd $PROJ_INSTANCE_DIR
cmd="rm -fv ./tags" && doRunCmdAndLog "$cmd"
cmd="ctags -R -n --fields=+i+K+S+l+m+a --exclude=.git --exclude=dat --exclude=*/node_modules/* ." && doRunCmdAndLog "$cmd"
cmd="ls -la $PRODUCT_INSTANCE_DIR/tags" && doRunCmdAndLog "$cmd"
popd
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
if ($BUILD_DRUID_CLSUTER); then
DRUID_HOME=$(dirname `pwd`)
echo "SET DRUID_HOME: $DRUID_HOME"
sudo /usr/local/bin/minikube delete
sudo rm -rf $DRUID_HOME/tmp
sudo rm -rf $DRUID_HOME/druid-operator
docker ps
fi
|
/**
* toNumber-funktion yksikkötestit
*
* @group unit
*/
import toNumber from "../../src/toNumber.js";
import isObject from "../../src/isObject.js";
import isSymbol from "../../src/isSymbol.js";
jest.mock("../../src/isObject.js");
jest.mock("../../src/isSymbol.js");
describe("unit/toNumber", () => {
isSymbol.mockReturnValue(false);
isObject.mockReturnValue(false);
it("42 -> 42", () => {
expect(toNumber(42))
.toBe(42);
});
it("new Number(42) -> 42", () => {
expect(toNumber(new Number(42)))
.toBe(42);
});
it("NaN-> NaN", () => {
expect(toNumber(NaN))
.toBe(NaN);
});
it("Infinity-> Infinity", () => {
expect(toNumber(Infinity))
.toBe(Infinity);
});
it("\"3.2\"-> 3.2", () => {
expect(toNumber("3.2"))
.toBe(3.2);
});
it("\" 3.2 \"-> 3.2", () => {
expect(toNumber(" 3.2 "))
.toBe(3.2);
});
it("3.2-> 3.2", () => {
expect(toNumber(3.2))
.toBe(3.2);
});
it("\"0x2A\"-> 42", () => {
expect(toNumber("0X2A"))
.toBe(42);
});
it("\"0b101010\"-> 42", () => {
expect(toNumber("0b101010"))
.toBe(42);
});
it("\"0o52\"-> 42", () => {
expect(toNumber("0o52"))
.toBe(42);
});
it("Symbol(\"a\")-> NaN", () => {
isSymbol.mockReturnValueOnce(true);
expect(toNumber(Symbol("a")))
.toBe(NaN);
});
it("\"a\"-> NaN", () => {
expect(toNumber("a"))
.toBe(NaN);
});
it("undefined-> NaN", () => {
expect(toNumber(undefined))
.toBe(NaN);
});
it("null-> 0", () => {
expect(toNumber(null))
.toBe(0);
});
it("true-> 1", () => {
expect(toNumber(true))
.toBe(1);
});
it("false-> 0", () => {
expect(toNumber(false))
.toBe(0);
});
it("{}-> NaN", () => {
isObject.mockReturnValueOnce(true);
expect(toNumber({}))
.toBe(NaN);
});
});
|
#!/bin/sh
set -e
echo $BUILD_URL
echo $JOB_URL
echo $WORKSPACE
VERSION_NAME=$(cat ./android/local.properties | grep versionName | cut -d'=' -f2)
LOCAL_IP=$(ipconfig getifaddr en0)
APK_PATH=app/$VERSION_NAME
WEBAPP_DIR=/Users/hawk/Library/apache-tomcat-9.0.17/webapps
SHARE_HOST=http://$LOCAL_IP/$APK_PATH
WEB_DIR=$WEBAPP_DIR/$APK_PATH
ETT_APP_NAME=wangxiao
NEXUS_HOST=http://192.168.10.8:18080
NEXUS_DIR=$NEXUS_HOST/nexus/service/local/repositories/EttAppReleases/content/com/online/$ETT_APP_NAME/android/$VERSION_NAME
NEXUS_JENKINS_NAME=jenkins
NEXUS_JENKINS_PASSWD=jenkins20100328
ETT_PACKAGE_PATH=$WORKSPACE/build/app/outputs/apk
DING_TOKEN=d7fb2719a1655eb9e067dd549a86385cc7f57e14a056fe52187da85c1adf3159
MEN_TO_NOTIFY='["18501378653","18612167007","18310511388"]'
flutter packages get
flutter clean
flutter build apk --release -v --flavor develop
# cd android
gradle -v
# gradle -pandroid -Pchannel -Pverbose=true -Ptarget=lib/main.dart -Ptrack-widget-creation=false -Pcompilation-trace-file=compilation.txt -Ptarget-platform=android-arm assembleRelease -xlint
gradle clean build -Pchannel -pandroid -xlint
content="Android $VERSION_NAME 最新打包预览,点击下载\n"
function getdir() {
echo $1
for file in $1/*; do
if test -f $file; then
# echo $file
arr=(${arr[*]} $file)
if [ "${file##*.}"x = "apk"x ]; then
echo "found" $file
name=$(basename $file)
echo name
curl -v -u $NEXUS_JENKINS_NAME:$NEXUS_JENKINS_PASSWD --upload-file $file $NEXUS_DIR/$name
cp $file $WEB_DIR/
content=$content$SHARE_HOST/$(basename $file)'\n'
echo ---
echo $content
echo ---
fi
else
getdir $file
fi
done
}
getdir $ETT_PACKAGE_PATH
# echo ${arr[@]}
# 钉钉机器人,手机号为钉钉群里你要@的人的手机号
pre='{"msgtype":"text","text":{"content":"'
post='"},"at":{"atMobiles":'$MEN_TO_NOTIFY',"isAtAll":false}}'
json=$pre$content$post
echo $json
curl 'https://oapi.dingtalk.com/robot/send?access_token=$DING_TOKEN' -H 'Content-Type: application/json' -d $json
|
#!/bin/sh
parallel -j 4 "export CUDA_VISIBLE_DEVICES=0,1,2,3; python -m bound experiment.target_weight_file={1} experiment.gpu_id={#} dataset=cifar10 experiment.seed=7" :::: ../scripts/cifar10/eval/seed-7/all_weights.txt
|
/* SPDX-License-Identifier: Apache-2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.commonservices.generichandlers;
import org.odpi.openmetadata.commonservices.ffdc.InvalidParameterHandler;
import org.odpi.openmetadata.commonservices.repositoryhandler.RepositoryHandler;
import org.odpi.openmetadata.frameworks.connectors.ffdc.InvalidParameterException;
import org.odpi.openmetadata.frameworks.connectors.ffdc.PropertyServerException;
import org.odpi.openmetadata.frameworks.connectors.ffdc.UserNotAuthorizedException;
import org.odpi.openmetadata.metadatasecurity.server.OpenMetadataServerSecurityVerifier;
import org.odpi.openmetadata.frameworks.auditlog.AuditLog;
import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.properties.instances.InstanceStatus;
import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.repositoryconnector.OMRSRepositoryHelper;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* GlossaryTermHandler retrieves Glossary Term objects from the property server. It runs server-side
* and retrieves Glossary Term entities through the OMRSRepositoryConnector.
*
* @param <B> class for the glossary term bean
*/
public class GlossaryTermHandler<B> extends ReferenceableHandler<B>
{
/**
* Construct the discovery engine configuration handler caching the objects
* needed to operate within a single server instance.
*
* @param converter specific converter for this bean class
* @param beanClass name of bean class that is represented by the generic class B
* @param serviceName name of this service
* @param serverName name of the local server
* @param invalidParameterHandler handler for managing parameter errors
* @param repositoryHandler manages calls to the repository services
* @param repositoryHelper provides utilities for manipulating the repository services objects
* @param localServerUserId userId for this server
* @param securityVerifier open metadata security services verifier
* @param supportedZones list of zones that the access service is allowed to serve Asset instances from.
* @param defaultZones list of zones that the access service should set in all new Asset instances.
* @param publishZones list of zones that the access service sets up in published Asset instances.
* @param auditLog destination for audit log events.
*/
public GlossaryTermHandler(OpenMetadataAPIGenericConverter<B> converter,
Class<B> beanClass,
String serviceName,
String serverName,
InvalidParameterHandler invalidParameterHandler,
RepositoryHandler repositoryHandler,
OMRSRepositoryHelper repositoryHelper,
String localServerUserId,
OpenMetadataServerSecurityVerifier securityVerifier,
List<String> supportedZones,
List<String> defaultZones,
List<String> publishZones,
AuditLog auditLog)
{
super(converter,
beanClass,
serviceName,
serverName,
invalidParameterHandler,
repositoryHandler,
repositoryHelper,
localServerUserId,
securityVerifier,
supportedZones,
defaultZones,
publishZones,
auditLog);
}
/**
* Create a new metadata element to represent a glossary term (or a subtype).
*
* @param userId calling user
* @param glossaryGUID unique identifier of the owning glossary
* @param glossaryGUIDParameterName parameter supplying glossaryGUID
* @param qualifiedName unique name for the category - used in other configuration
* @param displayName short display name for the term
* @param summary string text
* @param description description of the term
* @param examples string text
* @param abbreviation string text
* @param usage string text
* @param additionalProperties additional properties for a term
* @param typeName type name from the caller (enables creation of subtypes)
* @param extendedProperties properties for a term subtype
* @param initialStatus glossary term status to use when the object is created
* @param methodName calling method
*
* @return unique identifier of the new metadata element for the glossary term
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public String createGlossaryTerm(String userId,
String glossaryGUID,
String glossaryGUIDParameterName,
String qualifiedName,
String displayName,
String summary,
String description,
String examples,
String abbreviation,
String usage,
Map<String, String> additionalProperties,
String typeName,
Map<String, Object> extendedProperties,
InstanceStatus initialStatus,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
final String qualifiedNameParameterName = "qualifiedName";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateName(qualifiedName, qualifiedNameParameterName, methodName);
InstanceStatus instanceStatus = InstanceStatus.ACTIVE;
if (initialStatus != null)
{
instanceStatus = initialStatus;
}
String typeGUID = invalidParameterHandler.validateTypeName(typeName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
serviceName,
methodName,
repositoryHelper);
GlossaryTermBuilder builder = new GlossaryTermBuilder(qualifiedName,
displayName,
summary,
description,
examples,
abbreviation,
usage,
additionalProperties,
extendedProperties,
instanceStatus,
repositoryHelper,
serviceName,
serverName);
String glossaryTermGUID = this.createBeanInRepository(userId,
null,
null,
typeGUID,
typeName,
qualifiedName,
OpenMetadataAPIMapper.QUALIFIED_NAME_PROPERTY_NAME,
builder,
methodName);
if (glossaryTermGUID != null)
{
/*
* Link the term to its glossary.
*/
final String glossaryTermGUIDParameterName = "glossaryTermGUID";
this.linkElementToElement(userId,
null,
null,
glossaryGUID,
glossaryGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TYPE_NAME,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.TERM_ANCHOR_TYPE_GUID,
OpenMetadataAPIMapper.TERM_ANCHOR_TYPE_NAME,
null,
methodName);
}
return glossaryTermGUID;
}
/**
* Create a new metadata element to represent a glossary term using an existing metadata element as a template.
*
* @param userId calling user
* @param templateGUID unique identifier of the metadata element to copy
* @param qualifiedName unique name for the term - used in other configuration
* @param displayName short display name for the term
* @param description description of the term
* @param methodName calling method
*
* @return unique identifier of the new metadata element for the glossary term
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public String createGlossaryTermFromTemplate(String userId,
String templateGUID,
String qualifiedName,
String displayName,
String description,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
final String templateGUIDParameterName = "templateGUID";
final String qualifiedNameParameterName = "qualifiedName";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(templateGUID, templateGUIDParameterName, methodName);
invalidParameterHandler.validateName(qualifiedName, qualifiedNameParameterName, methodName);
GlossaryTermBuilder builder = new GlossaryTermBuilder(qualifiedName,
displayName,
description,
repositoryHelper,
serviceName,
serverName);
return this.createBeanFromTemplate(userId,
null,
null,
templateGUID,
templateGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
qualifiedName,
OpenMetadataAPIMapper.QUALIFIED_NAME_PROPERTY_NAME,
builder,
methodName);
}
/**
* Update the properties of the metadata element representing a glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the glossary term to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryGUID
* @param qualifiedName unique name for the category - used in other configuration
* @param displayName short display name for the term
* @param summary string text
* @param description description of the term
* @param examples string text
* @param abbreviation string text
* @param usage string text
* @param additionalProperties additional properties for a term
* @param typeName type name from the caller (enables creation of subtypes)
* @param extendedProperties properties for a term subtype
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void updateGlossaryTerm(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String qualifiedName,
String displayName,
String summary,
String description,
String examples,
String abbreviation,
String usage,
Map<String, String> additionalProperties,
String typeName,
Map<String, Object> extendedProperties,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
final String qualifiedNameParameterName = "qualifiedName";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
invalidParameterHandler.validateName(qualifiedName, qualifiedNameParameterName, methodName);
String typeGUID = invalidParameterHandler.validateTypeName(typeName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
serviceName,
methodName,
repositoryHelper);
GlossaryTermBuilder builder = new GlossaryTermBuilder(qualifiedName,
displayName,
summary,
description,
examples,
abbreviation,
usage,
additionalProperties,
extendedProperties,
InstanceStatus.ACTIVE,
repositoryHelper,
serviceName,
serverName);
this.updateBeanInRepository(userId,
null,
null,
glossaryTermGUID,
glossaryTermGUIDParameterName,
typeGUID,
typeName,
builder.getInstanceProperties(methodName),
false,
methodName);
}
/**
* Update the status of the metadata element representing a glossary term. This is only valid on
* a controlled glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the glossary term to update
* @param glossaryTermGUIDParameterName parameter name for glossaryTermGUID
* @param glossaryTermStatus new status value for the glossary term
* @param glossaryTermStatusParameterName parameter name for the status value
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void updateGlossaryTermStatus(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
InstanceStatus glossaryTermStatus,
String glossaryTermStatusParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
this.updateBeanStatusInRepository(userId,
null,
null,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
supportedZones,
glossaryTermStatus,
glossaryTermStatusParameterName,
methodName);
}
/**
* Link a term to a category.
*
* @param userId calling user
* @param glossaryCategoryGUID unique identifier of the glossary category
* @param glossaryCategoryGUIDParameterName parameter supplying glossaryCategoryGUID
* @param glossaryTermGUID unique identifier of the glossary term
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param description description of the categorization
* @param relationshipStatus ordinal for the relationship status enum
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setupTermCategory(String userId,
String glossaryCategoryGUID,
String glossaryCategoryGUIDParameterName,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String description,
int relationshipStatus,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryCategoryGUID, glossaryCategoryGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
GlossaryTermBuilder builder = new GlossaryTermBuilder(repositoryHelper, serviceName, serverName);
this.linkElementToElement(userId,
null,
null,
glossaryCategoryGUID,
glossaryCategoryGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_CATEGORY_TYPE_NAME,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.TERM_CATEGORIZATION_TYPE_GUID,
OpenMetadataAPIMapper.TERM_CATEGORIZATION_TYPE_NAME,
builder.getTermCategorizationProperties(description, relationshipStatus, methodName),
methodName);
}
/**
* Unlink a term from a category.
*
* @param userId calling user
* @param glossaryCategoryGUID unique identifier of the glossary category
* @param glossaryCategoryGUIDParameterName parameter supplying glossaryCategoryGUID
* @param glossaryTermGUID unique identifier of the glossary term
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermCategory(String userId,
String glossaryCategoryGUID,
String glossaryCategoryGUIDParameterName,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryCategoryGUID, glossaryCategoryGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.unlinkElementFromElement(userId,
false,
null,
null,
glossaryCategoryGUID,
glossaryCategoryGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_CATEGORY_TYPE_NAME,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.TERM_CATEGORIZATION_TYPE_GUID,
OpenMetadataAPIMapper.TERM_CATEGORIZATION_TYPE_NAME,
methodName);
}
/**
* Link two terms together using a specialist relationship.
*
* @param userId calling user
* @param glossaryTermOneGUID unique identifier of the glossary term at end 1
* @param glossaryTermOneGUIDParameterName parameter supplying glossaryTermOneGUID
* @param relationshipTypeName name of the type of relationship to create
* @param relationshipTypeParameterName name of parameter passing the relationship
* @param glossaryTermTwoGUID unique identifier of the glossary term at end 2
* @param glossaryTermTwoGUIDParameterName parameter supplying glossaryTermTwoGUID
* @param description description of the relationship
* @param expression expression that describes the relationship
* @param relationshipStatus ordinal for the relationship status enum (draft, active, deprecated, obsolete, other)
* @param steward user id or name of steward id who assigned the relationship (or approved the discovered value).
* @param source id of the source of the knowledge of the relationship
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setupTermRelationship(String userId,
String glossaryTermOneGUID,
String glossaryTermOneGUIDParameterName,
String relationshipTypeName,
String relationshipTypeParameterName,
String glossaryTermTwoGUID,
String glossaryTermTwoGUIDParameterName,
String expression,
String description,
int relationshipStatus,
String steward,
String source,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermOneGUID, glossaryTermOneGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(glossaryTermTwoGUID, glossaryTermTwoGUIDParameterName, methodName);
invalidParameterHandler.validateName(relationshipTypeName, relationshipTypeParameterName, methodName);
String relationshipTypeGUID = invalidParameterHandler.validateTypeName(relationshipTypeName,
relationshipTypeName,
serviceName,
methodName,
repositoryHelper);
GlossaryTermBuilder builder = new GlossaryTermBuilder(repositoryHelper, serviceName, serverName);
this.linkElementToElement(userId,
null,
null,
glossaryTermOneGUID,
glossaryTermOneGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
glossaryTermTwoGUID,
glossaryTermTwoGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
relationshipTypeGUID,
relationshipTypeName,
builder.getTermRelationshipProperties(expression,
description,
relationshipStatus,
steward,
source,
methodName),
methodName);
}
/**
* Update the relationship properties for the two terms.
*
* @param userId calling user
* @param glossaryTermOneGUID unique identifier of the glossary term at end 1
* @param glossaryTermOneGUIDParameterName parameter supplying glossaryTermOneGUID
* @param relationshipTypeName name of the type of relationship to create
* @param relationshipTypeParameterName name of parameter passing the relationship
* @param glossaryTermTwoGUID unique identifier of the glossary term at end 2
* @param glossaryTermTwoGUIDParameterName parameter supplying glossaryTermTwoGUID
* @param description description of the relationship
* @param expression expression that describes the relationship
* @param relationshipStatus ordinal for the relationship status enum (draft, active, deprecated, obsolete, other)
* @param steward user id or name of steward id who assigned the relationship (or approved the discovered value).
* @param source id of the source of the knowledge of the relationship
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void updateTermRelationship(String userId,
String glossaryTermOneGUID,
String glossaryTermOneGUIDParameterName,
String relationshipTypeName,
String relationshipTypeParameterName,
String glossaryTermTwoGUID,
String glossaryTermTwoGUIDParameterName,
String expression,
String description,
int relationshipStatus,
String steward,
String source,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermOneGUID, glossaryTermOneGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(glossaryTermTwoGUID, glossaryTermTwoGUIDParameterName, methodName);
invalidParameterHandler.validateName(relationshipTypeName, relationshipTypeParameterName, methodName);
String relationshipTypeGUID = invalidParameterHandler.validateTypeName(relationshipTypeName,
relationshipTypeName,
serviceName,
methodName,
repositoryHelper);
GlossaryTermBuilder builder = new GlossaryTermBuilder(repositoryHelper, serviceName, serverName);
this.updateElementToElementLink(userId,
null,
null,
glossaryTermOneGUID,
glossaryTermOneGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
glossaryTermTwoGUID,
glossaryTermTwoGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
relationshipTypeGUID,
relationshipTypeName,
builder.getTermRelationshipProperties(expression,
description,
relationshipStatus,
steward,
source,
methodName),
methodName);
}
/**
* Remove the relationship between two terms.
*
* @param userId calling user
* @param glossaryTermOneGUID unique identifier of the glossary term at end 1
* @param glossaryTermOneGUIDParameterName parameter supplying glossaryTermOneGUID
* @param relationshipTypeName name of the type of relationship to create
* @param relationshipTypeParameterName name of parameter passing the relationship
* @param glossaryTermTwoGUID unique identifier of the glossary term at end 2
* @param glossaryTermTwoGUIDParameterName parameter supplying glossaryTermTwoGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermRelationship(String userId,
String glossaryTermOneGUID,
String glossaryTermOneGUIDParameterName,
String relationshipTypeName,
String relationshipTypeParameterName,
String glossaryTermTwoGUID,
String glossaryTermTwoGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermOneGUID, glossaryTermOneGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(glossaryTermTwoGUID, glossaryTermTwoGUIDParameterName, methodName);
invalidParameterHandler.validateName(relationshipTypeName, relationshipTypeParameterName, methodName);
String relationshipTypeGUID = invalidParameterHandler.validateTypeName(relationshipTypeName,
relationshipTypeName,
serviceName,
methodName,
repositoryHelper);
this.unlinkElementFromElement(userId,
false,
null,
null,
glossaryTermOneGUID,
glossaryTermOneGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
glossaryTermTwoGUID,
glossaryTermTwoGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
relationshipTypeGUID,
relationshipTypeName,
methodName);
}
/**
* Classify the glossary term to indicate that it describes an abstract concept.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsAbstractConcept(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.ABSTRACT_CONCEPT_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.ABSTRACT_CONCEPT_CLASSIFICATION_TYPE_NAME,
null,
methodName);
}
/**
* Remove the abstract concept designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsAbstractConcept(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.ABSTRACT_CONCEPT_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.ABSTRACT_CONCEPT_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Classify the glossary term to indicate that it describes a data value.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsDataValue(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.DATA_VALUE_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.DATA_VALUE_CLASSIFICATION_TYPE_NAME,
null,
methodName);
}
/**
* Remove the data value designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsDataValue(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.DATA_VALUE_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.DATA_VALUE_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Classify the glossary term to indicate that it describes a data value.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param activityType ordinal for type of activity
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsActivity(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
int activityType,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
GlossaryTermBuilder builder = new GlossaryTermBuilder(repositoryHelper, serviceName, serverName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.ACTIVITY_DESC_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.ACTIVITY_DESC_CLASSIFICATION_TYPE_NAME,
builder.getActivityTypeProperties(activityType, methodName),
methodName);
}
/**
* Remove the activity designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsActivity(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.ACTIVITY_DESC_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.ACTIVITY_DESC_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Classify the glossary term to indicate that it describes a context.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param description description of the context
* @param scope the scope of where the context applies
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsContext(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String description,
String scope,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
GlossaryTermBuilder builder = new GlossaryTermBuilder(repositoryHelper, serviceName, serverName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.CONTEXT_DEFINITION_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.CONTEXT_DEFINITION_CLASSIFICATION_TYPE_NAME,
builder.getContextDescriptionProperties(description, scope, methodName),
methodName);
}
/**
* Remove the context definition designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsContext(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.CONTEXT_DEFINITION_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.CONTEXT_DEFINITION_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Classify the glossary term to indicate that it describes a spine object.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsSpineObject(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.SPINE_OBJECT_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.SPINE_OBJECT_CLASSIFICATION_TYPE_NAME,
null,
methodName);
}
/**
* Remove the spine object designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsSpineObject(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.SPINE_OBJECT_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.SPINE_OBJECT_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Classify the glossary term to indicate that it describes a spine attribute.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsSpineAttribute(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.SPINE_ATTRIBUTE_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.SPINE_ATTRIBUTE_CLASSIFICATION_TYPE_NAME,
null,
methodName);
}
/**
* Remove the spine attribute designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsSpineAttribute(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.SPINE_ATTRIBUTE_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.SPINE_ATTRIBUTE_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Classify the glossary term to indicate that it describes an object identifier.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void setTermAsObjectIdentifier(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.setClassificationInRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.OBJECT_IDENTIFIER_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.OBJECT_IDENTIFIER_CLASSIFICATION_TYPE_NAME,
null,
methodName);
}
/**
* Remove the object identifier designation from the glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param glossaryTermGUIDParameterName parameter supplying glossaryTermGUID
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void clearTermAsObjectIdentifier(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(glossaryTermGUID, glossaryTermGUIDParameterName, methodName);
this.removeClassificationFromRepository(userId,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
OpenMetadataAPIMapper.OBJECT_IDENTIFIER_CLASSIFICATION_TYPE_GUID,
OpenMetadataAPIMapper.OBJECT_IDENTIFIER_CLASSIFICATION_TYPE_NAME,
methodName);
}
/**
* Remove the metadata element representing a glossary term.
*
* @param userId calling user
* @param glossaryTermGUID unique identifier of the metadata element to update
* @param methodName calling method
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public void removeGlossaryTerm(String userId,
String glossaryTermGUID,
String glossaryTermGUIDParameterName,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
this.deleteBeanInRepository(userId,
null,
null,
glossaryTermGUID,
glossaryTermGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
null,
null,
methodName);
}
/**
* Returns the glossary term object corresponding to the supplied term name.
*
* @param userId String - userId of user making request.
* @param name this may be the qualifiedName or displayName of the term.
* @param nameParameterName property that provided the name
* @param startFrom index of the list to start from (0 for start)
* @param pageSize maximum number of elements to return.
* @param methodName calling method
*
* @return List of glossary terms retrieved from property server
* @throws InvalidParameterException one of the parameters is null or invalid.
* @throws PropertyServerException there is a problem retrieving information from the property (metadata) server.
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public List<B> getTermsByName(String userId,
String name,
String nameParameterName,
int startFrom,
int pageSize,
String methodName) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
List<String> specificMatchPropertyNames = new ArrayList<>();
specificMatchPropertyNames.add(OpenMetadataAPIMapper.QUALIFIED_NAME_PROPERTY_NAME);
specificMatchPropertyNames.add(OpenMetadataAPIMapper.DISPLAY_NAME_PROPERTY_NAME);
return this.getBeansByValue(userId,
name,
nameParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
specificMatchPropertyNames,
true,
startFrom,
pageSize,
methodName);
}
/**
* Returns the glossary term object containing the supplied term name. This may include wildcard characters
*
* @param userId String - userId of user making request.
* @param name this may be the qualifiedName or displayName of the term
* @param nameParameterName property that provided the name - interpreted as a to be a regular expression
* @param startFrom index of the list to start from (0 for start)
* @param pageSize maximum number of elements to return.
* @param methodName calling method
*
* @return List of glossary terms retrieved from property server
* @throws InvalidParameterException one of the parameters is null or invalid.
* @throws PropertyServerException there is a problem retrieving information from the property (metadata) server.
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public List<B> findTerms(String userId,
String name,
String nameParameterName,
int startFrom,
int pageSize,
String methodName) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
List<String> specificMatchPropertyNames = new ArrayList<>();
specificMatchPropertyNames.add(OpenMetadataAPIMapper.QUALIFIED_NAME_PROPERTY_NAME);
specificMatchPropertyNames.add(OpenMetadataAPIMapper.DISPLAY_NAME_PROPERTY_NAME);
return this.getBeansByValue(userId,
name,
nameParameterName,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_GUID,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
specificMatchPropertyNames,
false,
startFrom,
pageSize,
methodName);
}
/**
* Returns the glossary term object corresponding to the supplied glossary term GUID.
*
* @param userId String - userId of user making request
* @param guid the unique id for the glossary term within the property server
* @param guidParameter name of parameter supplying the guid
* @param methodName calling method
*
* @return Glossary Term retrieved from the property server
* @throws InvalidParameterException one of the parameters is null or invalid.
* @throws PropertyServerException there is a problem retrieving information from the property (metadata) server.
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public B getTerm(String userId,
String guid,
String guidParameter,
String methodName) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
return this.getBeanFromRepository(userId,
guid,
guidParameter,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
methodName);
}
/**
* Retrieve the list of glossary terms associated with a glossary.
*
* @param userId calling user
* @param glossaryGUID unique identifier of the glossary of interest
* @param glossaryGUIDParameterName property supplying the glossaryGUID
* @param startFrom paging start point
* @param pageSize maximum results that can be returned
* @param methodName calling method
*
* @return list of associated metadata elements
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public List<B> getTermsForGlossary(String userId,
String glossaryGUID,
String glossaryGUIDParameterName,
int startFrom,
int pageSize,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
return this.getAttachedElements(userId,
glossaryGUID,
glossaryGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_TYPE_NAME,
OpenMetadataAPIMapper.TERM_ANCHOR_TYPE_GUID,
OpenMetadataAPIMapper.TERM_ANCHOR_TYPE_NAME,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
startFrom,
pageSize,
methodName);
}
/**
* Retrieve the list of glossary terms associated with a glossary category.
*
* @param userId calling user
* @param glossaryCategoryGUID unique identifier of the glossary category of interest
* @param glossaryCategoryGUIDParameterName property supplying the glossaryCategoryGUID
* @param startFrom paging start point
* @param pageSize maximum results that can be returned
* @param methodName calling method
*
* @return list of associated metadata elements
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public List<B> getTermsForGlossaryCategory(String userId,
String glossaryCategoryGUID,
String glossaryCategoryGUIDParameterName,
int startFrom,
int pageSize,
String methodName) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
return this.getAttachedElements(userId,
glossaryCategoryGUID,
glossaryCategoryGUIDParameterName,
OpenMetadataAPIMapper.GLOSSARY_CATEGORY_TYPE_NAME,
OpenMetadataAPIMapper.TERM_CATEGORIZATION_TYPE_GUID,
OpenMetadataAPIMapper.TERM_CATEGORIZATION_TYPE_NAME,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
startFrom,
pageSize,
methodName);
}
/**
* Return the glossary terms attached to a supplied entity through the semantic assignment.
*
* @param userId calling user
* @param elementGUID identifier for the entity that the feedback is attached to
* @param elementGUIDParameterName name of parameter supplying the GUID
* @param elementTypeName name of the type of object being attached to
* @param serviceSupportedZones supported zones for calling service
* @param startingFrom where to start from in the list
* @param pageSize maximum number of results that can be returned
* @param methodName calling method
* @return list of objects or null if none found
* @throws InvalidParameterException the input properties are invalid
* @throws UserNotAuthorizedException user not authorized to issue this request
* @throws PropertyServerException problem accessing the property server
*/
public List<B> getAttachedMeanings(String userId,
String elementGUID,
String elementGUIDParameterName,
String elementTypeName,
List<String> serviceSupportedZones,
int startingFrom,
int pageSize,
String methodName) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
return this.getAttachedElements(userId,
elementGUID,
elementGUIDParameterName,
elementTypeName,
OpenMetadataAPIMapper.REFERENCEABLE_TO_MEANING_TYPE_GUID,
OpenMetadataAPIMapper.REFERENCEABLE_TO_MEANING_TYPE_NAME,
OpenMetadataAPIMapper.GLOSSARY_TERM_TYPE_NAME,
serviceSupportedZones,
startingFrom,
pageSize,
methodName);
}
}
|
<reponame>balankarthikeyan/pencil-art-porfolio
export * from './About'
export * from './Header'
export * from './Contact'
export * from './Portfolio'
export * from './GridLayout'
|
/**
* Copyright (c) 2012-2013 <NAME>
*
* This file is part of css.java.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jknack.css.expression;
import static org.apache.commons.lang3.Validate.notNull;
import com.github.jknack.css.Expression;
public abstract class AbstractExpression implements Expression {
@Override
public <E extends Expression> E adapt(final Class<E> expressionType) {
notNull(expressionType, "The expressionType is required.");
if (expressionType.isInstance(this)) {
return expressionType.cast(this);
}
return null;
}
}
|
var searchData=
[
['hash',['hash',['../structgeopm__region__info__s.html#a90c009e856bd20774b560ab279d35cce',1,'geopm_region_info_s']]],
['hint',['hint',['../structgeopm__region__info__s.html#ab558192f5e29fc7c5a63054bd7fc8d91',1,'geopm_region_info_s']]]
];
|
package com.sphereon.factom.identity.did.request;
import com.sphereon.factom.identity.did.DIDVersion;
import com.sphereon.factom.identity.did.IdentityFactory;
import com.sphereon.factom.identity.did.entry.CreateFactomDIDEntry;
import foundation.identity.did.DID;
import foundation.identity.did.DIDDocument;
import org.blockchain_innovation.factom.client.impl.Networks;
import org.factomprotocol.identity.did.model.DidMethodVersion;
import org.factomprotocol.identity.did.model.FactomDidContent;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static com.sphereon.factom.identity.did.Constants.DID.DID_FACTOM;
public class CreateFactomDidRequest {
private final DIDVersion didVersion;
private final List<CreateKeyRequest> managementKeys;
private final List<CreateKeyRequest> didKeys;
private final List<CreateServiceRequest> services;
private final String[] tags;
private final String nonce;
private final String networkName;
private CreateFactomDidRequest(DIDVersion didVersion, String networkName, List<CreateKeyRequest> managementKeys, List<CreateKeyRequest> didKey, List<CreateServiceRequest> service, String nonce, String... tags) {
this.didVersion = didVersion;
this.networkName = networkName;
this.managementKeys = managementKeys;
this.didKeys = didKey;
this.services = service;
this.nonce = nonce;
this.tags = tags;
}
public String getNonce() {
return this.nonce;
}
public String[] getTags() {
return this.tags;
}
public DIDVersion getDidVersion() {
return this.didVersion;
}
public FactomDidContent toFactomDidContent() {
final CreateFactomDIDEntry createFactomDIDEntry = toCreateFactomDIDEntry();
String chainId = createFactomDIDEntry.getChainId();
String did = getDidURL(chainId);
return new FactomDidContent()
.didMethodVersion(DidMethodVersion.fromValue(didVersion.getSchemaVersion()))
.didKey(this.didKeys.stream().map(key -> key.toDidKey(did))
.collect(Collectors.toList()))
.managementKey(this.managementKeys.stream().map(key -> key.toManagementKey(did))
.collect(Collectors.toList()))
.service(this.services.stream().map(didService -> didService.toService(did))
.collect(Collectors.toList()));
}
@NotNull
private CreateFactomDIDEntry toCreateFactomDIDEntry() {
return new CreateFactomDIDEntry(this.didVersion, null, this.nonce, this.tags);
}
public DIDDocument toDIDDocument() {
return new IdentityFactory().toDid(toCreateFactomDIDEntry().getChainId(), toFactomDidContent());
}
private String getDidURL(String chainId) {
if (networkName == null || Networks.MAINNET.equalsIgnoreCase(networkName)) {
return DID_FACTOM + chainId;
}
return DID_FACTOM + networkName + ':' + chainId;
}
public static final class Builder {
private DIDVersion didVersion;
private List<CreateKeyRequest> managementKeys;
private List<CreateKeyRequest> didKeys;
private List<CreateServiceRequest> services;
private List<String> tags;
private String nonce;
private String networkName;
public Builder() {
}
public Builder didVersion(DIDVersion didVersion) {
this.didVersion = didVersion;
return this;
}
public Builder managementKeys(List<CreateKeyRequest> managementKeys) {
this.managementKeys = managementKeys;
return this;
}
public Builder managementKey(CreateKeyRequest managementKey) {
if (this.managementKeys == null) {
this.managementKeys = new ArrayList<>(Arrays.asList(managementKey));
} else if (!managementKeys.contains(managementKey)){
this.managementKeys.add(managementKey);
}
return this;
}
public Builder didKeys(List<CreateKeyRequest> didKeys) {
this.didKeys = didKeys;
return this;
}
public Builder didKey(CreateKeyRequest didKey) {
if (this.didKeys == null) {
this.didKeys = new ArrayList<>(Arrays.asList(didKey));
} else if (!didKeys.contains(didKey)) {
this.didKeys.add(didKey);
}
return this;
}
public Builder services(List<CreateServiceRequest> services) {
this.services = services;
return this;
}
public Builder service(CreateServiceRequest service) {
if (this.services == null) {
this.services = new ArrayList<>(Arrays.asList(service));
} else if (!services.contains(service)) {
this.services.add(service);
}
return this;
}
public Builder tag(String tag) {
if (this.tags == null) {
this.tags = new ArrayList<>(Arrays.asList(tag));
} else {
this.tags.add(tag);
}
return this;
}
public Builder nonce(String nonce) {
this.nonce = nonce;
return this;
}
public Builder networkName(String networkName) {
this.networkName = networkName;
return this;
}
public CreateFactomDidRequest build() throws IncompleteRequestException {
if (this.services == null) {
this.services = Collections.emptyList();
}
this.assertComplete();
return new CreateFactomDidRequest(
didVersion,
networkName,
managementKeys,
didKeys,
services,
nonce,
tags.toArray(new String[0])
);
}
private void assertComplete() throws IncompleteRequestException {
if (this.didKeys == null || this.didKeys.size() == 0) {
throw new IncompleteRequestException("At least one DID key is required to create a new DID");
}
if (this.managementKeys == null || this.managementKeys.size() == 0) {
throw new IncompleteRequestException("At least one management key is required to create a new DID");
}
}
}
}
|
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/dlp/deid.py
# https://cloud.google.com/docs/authentication/production
# https://cloud.google.com/dlp/docs/infotypes-reference
pip install --upgrade google-cloud-dlp
python deidentify_with_mask.py deid_mask \
'[PROJECT_ID]' \
'My name is Alicia Abernathy, and my email address is aabernathy@example.com.' \
--info_types ALL_BASIC \
-m '#'
# where [PROJECT_ID] is your project ID.
# OUT: My name is ################, and my email address is ######################.
|
# routes.rb
Rails.application.routes.draw do
# posts resource
resources :posts
end
# posts_controller.rb
class PostsController < ApplicationController
# create post
def create
@post = Post.create(post_params)
render json: @post, status: :created
end
# show post
def show
render json: Post.find(params[:id])
end
# update post
def update
@post = Post.find(params[:id])
@post.update(post_params)
render json: @post, status: :ok
end
# delete post
def destroy
@post = Post.find(params[:id])
@post.destroy
render json: {message: 'Post deleted'}, status: :ok
end
private
def post_params
params.require(:post).permit(:title, :body, :author, :date)
end
end |
#pragma once
#include <typed-geometry/types/vec.hh>
namespace tg
{
namespace detail
{
template <class ScalarT>
constexpr ScalarT& csubscript(vec<1, ScalarT>& v, int)
{
return v.x;
}
template <class ScalarT>
constexpr ScalarT const& csubscript(vec<1, ScalarT> const& v, int)
{
return v.x;
}
template <class ScalarT>
constexpr ScalarT const& csubscript(vec<2, ScalarT> const& v, int i)
{
if (i == 0)
return v.x;
else
return v.y;
}
template <class ScalarT>
constexpr ScalarT& csubscript(vec<2, ScalarT>& v, int i)
{
if (i == 0)
return v.x;
else
return v.y;
}
template <class ScalarT>
constexpr ScalarT const& csubscript(vec<3, ScalarT> const& v, int i)
{
if (i == 0)
return v.x;
else if (i == 1)
return v.y;
else
return v.z;
}
template <class ScalarT>
constexpr ScalarT& csubscript(vec<3, ScalarT>& v, int i)
{
if (i == 0)
return v.x;
else if (i == 1)
return v.y;
else
return v.z;
}
template <class ScalarT>
constexpr ScalarT& csubscript(vec<4, ScalarT>& v, int i)
{
if (i == 0)
return v.x;
else if (i == 1)
return v.y;
else if (i == 2)
return v.z;
else
return v.w;
}
template <class ScalarT>
constexpr ScalarT const& csubscript(vec<4, ScalarT> const& v, int i)
{
if (i == 0)
return v.x;
else if (i == 1)
return v.y;
else if (i == 2)
return v.z;
else
return v.w;
}
} // namespace detail
} // namespace tg
|
<reponame>belliappa/promotego-org<filename>vendor/plugins/geokit/test/base_geocoder_test.rb
require 'test/unit'
require 'net/http'
require 'rubygems'
require 'mocha'
require File.join(File.dirname(__FILE__), '../../../../config/environment')
class MockSuccess < Net::HTTPSuccess #:nodoc: all
def initialize
end
end
class MockFailure < Net::HTTPServiceUnavailable #:nodoc: all
def initialize
end
end
# Base class for testing geocoders.
class BaseGeocoderTest < Test::Unit::TestCase #:nodoc: all
# Defines common test fixtures.
def setup
@address = 'San Francisco, CA'
@full_address = '100 Spear St, San Francisco, CA, 94105-1522, US'
@full_address_short_zip = '100 Spear St, San Francisco, CA, 94105, US'
@success = GeoKit::GeoLoc.new({:city=>"SAN FRANCISCO", :state=>"CA", :country_code=>"US", :lat=>37.7742, :lng=>-122.417068})
@success.success = true
end
def test_timeout_call_web_service
GeoKit::Geocoders::Geocoder.class_eval do
def self.do_get(url)
sleep(2)
end
end
url = "http://www.anything.com"
GeoKit::Geocoders::timeout = 1
assert_nil GeoKit::Geocoders::Geocoder.call_geocoder_service(url)
end
def test_successful_call_web_service
url = "http://www.anything.com"
GeoKit::Geocoders::Geocoder.expects(:do_get).with(url).returns("SUCCESS")
assert_equal "SUCCESS", GeoKit::Geocoders::Geocoder.call_geocoder_service(url)
end
def test_find_geocoder_methods
public_methods = GeoKit::Geocoders::Geocoder.public_methods
assert public_methods.include?("yahoo_geocoder")
assert public_methods.include?("google_geocoder")
assert public_methods.include?("ca_geocoder")
assert public_methods.include?("us_geocoder")
assert public_methods.include?("multi_geocoder")
assert public_methods.include?("ip_geocoder")
end
end |
#!/bin/bash
set -euo pipefail
anchor build
mkdir -p app/src/lib/idl
for file in todo; do
METADATA=$(solana address -k target/deploy/${file}-keypair.json)
jq --arg id ${METADATA} '. + {"metadata":{ "address": $id }}' target/idl/${file}.json > app/src/lib/idl/${file}.json
done
|
<gh_stars>1-10
package com.zutubi.android.ant;
import static java.util.Arrays.asList;
import static junit.framework.Assert.assertEquals;
import org.junit.Test;
import java.util.ArrayList;
public class VersionTest {
@Test(expected = IllegalArgumentException.class)
public void testParseEmpty() {
new Version("");
}
@Test(expected = IllegalArgumentException.class)
public void testParseDot() {
new Version(".");
}
@Test(expected = IllegalArgumentException.class)
public void testParseInvalidElement() {
new Version("1.hello.2");
}
@Test(expected = IllegalArgumentException.class)
public void testParseEmptyElement() {
new Version("1..2");
}
@Test(expected = IllegalArgumentException.class)
public void testParseEmptyElementFirst() {
new Version(".1.2");
}
@Test(expected = IllegalArgumentException.class)
public void testParseEmptyElementLast() {
new Version("1.2.");
}
@Test(expected = IllegalArgumentException.class)
public void testNoElements() {
new Version(new ArrayList<Integer>());
}
@Test
public void testParseSingleElement() {
final Version version = new Version("42");
assertEquals(asList(42), version.getElements());
}
@Test
public void testParseMultipleElements() {
final Version version = new Version("2.4.54");
assertEquals(asList(2, 4, 54), version.getElements());
}
@Test
public void testParseNegativeElement() {
final Version version = new Version("22.0.-1");
assertEquals(asList(22, 0, -1), version.getElements());
}
@Test
public void testBumpSingleElement() {
final Version version = new Version("3");
assertEquals(new Version("4"), version.bump());
}
@Test
public void testBumpMultipleElements() {
final Version version = new Version("2.6.11");
assertEquals(new Version("2.6.12"), version.bump());
}
@Test
public void testToStringSingleElement() {
final Version version = new Version("11");
assertEquals("11", version.toString());
}
@Test
public void testToStringMultipleElements() {
final Version version = new Version("11.0.1");
assertEquals("11.0.1", version.toString());
}
}
|
<reponame>lananh265/social-network<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.steamSquare = void 0;
var steamSquare = {
"viewBox": "0 0 1536 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1242 647q0-80-57-136.5t-137-56.5-136.5 57-56.5 136q0 80 56.5 136.5t136.5 56.5 137-56.5 57-136.5zM632 1235q0 83-58 140.5t-140 57.5q-56 0-103-29t-72-77q52 20 98 40 60 24 120-1.5t85-86.5q24-60-1.5-120t-86.5-84l-82-33q22-5 42-5 82 0 140 57.5t58 140.5zM1536 416v960q0 119-84.5 203.5t-203.5 84.5h-960q-119 0-203.5-84.5t-84.5-203.5v-153l172 69q20 92 93.5 152t168.5 60q104 0 181-70t87-173l345-252q150 0 255.5-105.5t105.5-254.5q0-150-105.5-255.5t-255.5-105.5q-148 0-253 104.5t-107 252.5l-225 322q-9-1-28-1-75 0-137 37l-297-119v-468q0-119 84.5-203.5t203.5-84.5h960q119 0 203.5 84.5t84.5 203.5zM1289 649q0 100-71 170.5t-171 70.5-170.5-70.5-70.5-170.5 70.5-171 170.5-71q101 0 171.5 70.5t70.5 171.5z"
}
}]
};
exports.steamSquare = steamSquare; |
#!/bin/bash
set -euo pipefail
make-gitconfig() {
local alias_body=''
local alias_name=''
local base_filename=''
gitconfig_path=''
local -r script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
local temp_alias_file=''
local global_flg=0
local -r red=$(tput setaf 1)
local -r grn=$(tput setaf 2)
local -r org=$(tput setaf 3)
local -r rst=$(tput sgr0)
finish() {
printf "%s==> Cleaning up\n%s" "$(tput setaf 2)" "$(tput sgr0)"
rm "${gitconfig_path}.bak"
}
trap finish EXIT
cdToScriptPath() {
cd "$1" || exit
}
while getopts ':g' flg; do
case $flg in
g)
global_flg=1;;
*)
printf "%s\n" "${red}Invalid option expecting '<-g>'!${rst}";
exit 1;;
esac
done
shift $((OPTIND - 1))
cdToScriptPath "$script_path"
[[ $global_flg -eq 1 ]] && gitconfig_path="$HOME/.gitconfig" || gitconfig_path="${script_path}/.gitconfig"
[[ $global_flg -eq 1 ]] && printf "%s==> Writing to global gitconfig!\n%s" "${red}" "${rst}"
# Backup gitconfig
cp "$gitconfig_path" "${gitconfig_path}.bak"
printf "%s==> gitconfig backed up\n%s" "${grn}" "${rst}"
# Make gitconfig
printf "%s==> Making gitconfig\n%s" "${grn}" "${rst}"
# Write gitconfig simple aliases
printf "%s==> Adding simple aliases\n%s" "${grn}" "${rst}"
chmod +x "${script_path}/config/alias.sh"
. "${script_path}/config/alias.sh" "$gitconfig_path"
# Write gitconfig configuration
printf "%s==> Adding config options\n%s" "${grn}" "${rst}"
chmod +x "${script_path}/config/config.sh"
. "${script_path}/config/config.sh" "$gitconfig_path"
printf "%s==> Adding anonymous function aliases\n%s" "${grn}" "${rst}"
[[ -z ${1:-} ]] && FILE_PAT="${script_path}/anonymous_functions/**/*.sh" || FILE_PAT="$1"
for filename in $FILE_PAT; do
[[ ! -s $filename ]] && printf "%s==> $filename is empty - skipping\n%s" "${org}" "${rst}" && continue
[[ $filename == *"~"* ]] && printf "%s==> $filename is WIP - skipping\n%s" "${org}" "${rst}" && continue
temp_alias_file="${filename%.*}-alias.${filename##*.}"
# Strip all comments and collapse all whitespace to a single space - leave explicit whitespace escape characters
grep -E -v "^[[:blank:]]*#" "$filename" > "$temp_alias_file" && ex +%j -scwq "$temp_alias_file"
alias_body=$(<"$temp_alias_file")
rm "$temp_alias_file"
# Build alias string - remove git- from base filename and concatentate
base_filename=$(basename "${filename%.*}")
alias_name=${base_filename#'git-'}
printf "%s==> $alias_name built\n%s" "${grn}" "${rst}"
# Let git write anonymous function aliases - will do a better job of escaping
alias_body="!bash -c '$alias_body' -"
git config --file "$gitconfig_path" "alias.${alias_name}" "$alias_body"
printf "%s\t==> $alias_name written\n%s" "${grn}" "${rst}"
done
}
make-gitconfig "$@"
|
#!/bin/bash
set -e
INIT_SEM=/tmp/initialized.sem
PACKAGE_FILE=Gopkg.lock
log () {
echo -e "\033[0;33m$(date "+%H:%M:%S")\033[0;37m ==> $1."
}
dependencies_up_to_date() {
# It it up to date if the package file is older than
# the last time the container was initialized
[ ! $PACKAGE_FILE -nt $INIT_SEM ]
}
if [ "$1" == "gin" -a "$3" == "run" ]; then
if ! dependencies_up_to_date; then
log "Packages updating..."
dep ensure
log "Packages updated"
fi
touch $INIT_SEM
# Set env vars if .env file exists
if [ -f .env ]; then
export $(egrep -v '^#' .env | xargs)
fi
fi
exec "$@"
|
"use strict";
//# sourceMappingURL=my-component2.es5.js.map |
#!/bin/bash
cd /home/steam
# perform config
/bin/bash /assets/config.sh
MODS=$(echo "$MODS" | sed 's|;|\\;|g')
# check the setup
echo "My IP = $IP"
echo "Target ServerIP = $SERVER_IP"
echo "Target ServerPORT = $SERVER_PORT"
echo "MODS = $MODS"
#tail -f /var/steam/log/console/arma3-server-console.log
cd /home/steam/serverfiles
cmd="./arma3server -client -connect=$SERVER_IP -ip=$IP -port=$SERVER_PORT "
cmd+="-password=$GAME_PASSWORD "
cmd+="-bepath=/home/steam/serverfiles/battleye "
cmd+="-mod=$MODS "
cmd+="-netlog "
while [ 1 ]
do
echo "CMD: $cmd"
# eval $cmd
sleep 600
echo restarting
sleep 3
done
|
<filename>cmd/applier/main.go
package main
import (
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"time"
"github.com/open-cluster-management/library-go/pkg/applier"
libgoclient "github.com/open-cluster-management/library-go/pkg/client"
"github.com/open-cluster-management/library-go/pkg/templateprocessor"
"gopkg.in/yaml.v1"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/klog"
crclient "sigs.k8s.io/controller-runtime/pkg/client"
)
type Values map[string]interface{}
func main() {
var dir string
var valuesPath string
var kubeconfigPath string
var dryRun bool
var prefix string
var delete bool
var timeout int
var force bool
klog.InitFlags(nil)
flag.StringVar(&dir, "d", ".", "The directory containing the templates, default '.'")
flag.StringVar(&valuesPath, "values", "values.yaml", "The directory containing the templates, default 'values.yaml'")
flag.StringVar(&kubeconfigPath, "k", "", "The kubeconfig file")
flag.BoolVar(&dryRun, "dry-run", false, "if set only the rendered yaml will be shown, default false")
flag.StringVar(&prefix, "p", "", "The prefix to add to each value names, for example 'Values'")
flag.BoolVar(&delete, "delete", false, "if set only the resource defined in the yamls will be deleted, default false")
flag.IntVar(&timeout, "t", 5, "Timeout in second to apply one resource, default 5 sec")
flag.BoolVar(&force, "force", false, "If set, the finalizers will be removed before delete")
flag.Parse()
err := apply(dir, valuesPath, kubeconfigPath, prefix, timeout, dryRun, delete, force)
if err != nil {
fmt.Printf("Failed to apply due to error: %s", err)
os.Exit(1)
}
if dryRun {
fmt.Println("Dryrun successfully executed")
} else {
fmt.Println("Successfully applied")
}
}
func apply(dir, valuesPath, kubeconfigPath, prefix string, timeout int, dryRun, delete, force bool) error {
b, err := ioutil.ReadFile(filepath.Clean(valuesPath))
if err != nil {
return err
}
valuesc := &Values{}
err = yaml.Unmarshal(b, valuesc)
if err != nil {
return err
}
values := Values{}
if prefix != "" {
values[prefix] = *valuesc
} else {
values = *valuesc
}
klog.V(5).Infof("values:\n%v", values)
client, err := libgoclient.NewDefaultClient(kubeconfigPath, crclient.Options{})
if err != nil {
return err
}
applierOptions := &applier.Options{
Backoff: &wait.Backoff{
Steps: 4,
Duration: 500 * time.Millisecond,
Factor: 5.0,
Jitter: 0.1,
Cap: time.Duration(timeout) * time.Second,
},
DryRun: dryRun,
ForceDelete: force,
}
if dryRun {
client = crclient.NewDryRunClient(client)
}
a, err := applier.NewApplier(templateprocessor.NewYamlFileReader(dir),
&templateprocessor.Options{},
client,
nil,
nil,
applier.DefaultKubernetesMerger,
applierOptions)
if err != nil {
return err
}
if delete {
err = a.DeleteInPath("", nil, true, values)
} else {
err = a.CreateOrUpdateInPath("", nil, true, values)
}
if err != nil {
return err
}
return nil
}
|
#!/bin/bash
geth --rpc --rpcaddr "0.0.0.0" --rpcport "8080" --rpccorsdomain "*" --datadir "chains/devtest" --port "2402" --ipcapi "admin,db,eth,debug,miner,net,shh,txpool,personal,web3" --rpcapi "db,eth,net,web3" --networkid 1001201 console
|
<reponame>lgoldstein/communitychest
/*
*
*/
package net.community.chest.ui.components.dialog.manifest;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import net.community.chest.CoVariantReturn;
import net.community.chest.ui.helpers.table.EnumTableColumn;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Sep 23, 2008 10:48:01 AM
*/
public class ManifestTableCol extends EnumTableColumn<ManifestTableColumns> {
/**
*
*/
private static final long serialVersionUID = 1579586600078609215L;
public ManifestTableCol (ManifestTableColumns colIndex, int colWidth,
TableCellRenderer colRenderer, TableCellEditor colEditor)
throws IllegalArgumentException
{
super(ManifestTableColumns.class, colIndex, colWidth, colRenderer, colEditor);
}
public ManifestTableCol (ManifestTableColumns colIndex, int colWidth)
{
this(colIndex, colWidth, null, null);
}
public ManifestTableCol (ManifestTableColumns colIndex)
{
this(colIndex, 75);
}
public ManifestTableCol (Element elem) throws Exception
{
super(ManifestTableColumns.class, elem);
}
/*
* @see net.community.chest.swing.component.table.BaseTableColumn#getColumnConverter(org.w3c.dom.Element)
*/
@Override
@CoVariantReturn
protected ManifestTableColReflectiveProxy getColumnConverter (final Element elem) throws Exception
{
return (null == elem) ? null : ManifestTableColReflectiveProxy.DEFAULT;
}
}
|
package animalfarm;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Main {
public static void main(String[] args) {
try (BufferedReader bfr = new BufferedReader(new InputStreamReader(System.in))) {
var name = bfr.readLine();
var age = Integer.parseInt(bfr.readLine());
Chicken chicken;
try{
chicken = new Chicken(name, age);
System.out.println(chicken);
} catch (IllegalArgumentException e) {
System.out.println(e.getMessage());
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
package hub
import (
"fmt"
"sort"
"strconv"
)
const (
ConfigHubName = "hub.name"
ConfigHubDesc = "hub.desc"
ConfigHubTopic = "hub.topic"
ConfigHubOwner = "hub.owner"
ConfigHubWebsite = "hub.website"
ConfigHubEmail = "hub.email"
ConfigHubMOTD = "hub.motd"
)
const (
ConfigZlibLevel = "zlib.level"
)
var configAliases = map[string]string{
"name": ConfigHubName,
"desc": ConfigHubDesc,
"topic": ConfigHubTopic,
"owner": ConfigHubOwner,
"website": ConfigHubWebsite,
"email": ConfigHubEmail,
"motd": ConfigHubMOTD,
}
// configIgnored is a list of ignored config keys that can only be set in the config file.
var configIgnored = map[string]struct{}{
"chat.encoding": {},
"chat.log.join": {},
"chat.log.max": {},
"database.path": {},
"database.type": {},
"plugins.path": {},
"serve.host": {},
"serve.port": {},
"serve.tls.cert": {},
"serve.tls.key": {},
}
func (h *Hub) MergeConfig(m Map) {
h.MergeConfigPath("", m)
}
func (h *Hub) MergeConfigPath(path string, m Map) {
for k, v := range m {
if path != "" {
k = path + "." + k
}
switch v := v.(type) {
case Map:
h.MergeConfigPath(k, v)
case map[string]interface{}:
h.MergeConfigPath(k, Map(v))
default:
h.setConfig(k, v, false)
}
}
}
func (h *Hub) saveConfig(key string, val interface{}) {
if _, ok := configIgnored[key]; ok {
return
}
// TODO: persist config
}
func (h *Hub) setConfigMap(key string, val interface{}) {
if _, ok := configIgnored[key]; ok {
return
}
h.conf.Lock()
if h.conf.m == nil {
h.conf.m = make(Map)
}
h.conf.m[key] = val
h.conf.Unlock()
}
func (h *Hub) getConfigMap(key string) (interface{}, bool) {
h.conf.RLock()
val, ok := h.conf.m[key]
h.conf.RUnlock()
return val, ok
}
func (h *Hub) setConfig(key string, val interface{}, save bool) {
if _, ok := configIgnored[key]; ok {
return
}
switch val := val.(type) {
case bool:
h.setConfigBool(key, val)
case string:
h.setConfigString(key, val)
case int:
h.setConfigInt(key, int64(val))
case int64:
h.setConfigInt(key, val)
case int32:
h.setConfigInt(key, int64(val))
case uint:
h.setConfigUint(key, uint64(val))
case uint64:
h.setConfigUint(key, val)
case uint32:
h.setConfigUint(key, uint64(val))
case float64:
h.setConfigFloat(key, val)
case float32:
h.setConfigFloat(key, float64(val))
default:
panic(fmt.Errorf("unsupported config type: %T", val))
}
if save {
h.saveConfig(key, val)
}
}
func (h *Hub) SetConfig(key string, val interface{}) {
h.setConfig(key, val, true)
}
func (h *Hub) ConfigKeys() []string {
keys := []string{
ConfigHubName,
ConfigHubDesc,
ConfigHubTopic,
ConfigHubMOTD,
ConfigHubOwner,
ConfigHubWebsite,
ConfigHubEmail,
ConfigZlibLevel,
}
h.conf.RLock()
for k := range h.conf.m {
if _, ok := configIgnored[k]; ok {
continue
}
keys = append(keys, k)
}
h.conf.RUnlock()
sort.Strings(keys)
return keys
}
func (h *Hub) GetConfig(key string) (interface{}, bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
case ConfigHubName,
ConfigHubDesc,
ConfigHubTopic,
ConfigHubMOTD,
ConfigHubOwner,
ConfigHubWebsite,
ConfigHubEmail:
v, ok := h.GetConfigString(key)
if !ok {
return nil, false
}
return v, true
case ConfigZlibLevel:
v, ok := h.GetConfigInt(key)
if !ok {
return nil, false
}
return v, true
}
h.conf.RLock()
v, ok := h.conf.m[key]
h.conf.RUnlock()
return v, ok && v != nil
}
func (h *Hub) setConfigString(key string, val string) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
case ConfigHubName:
h.setName(val)
case ConfigHubDesc:
h.setDesc(val)
case ConfigHubTopic:
h.setTopic(val)
case ConfigHubMOTD:
h.setMOTD(val)
case ConfigHubOwner:
h.conf.Lock()
h.conf.Owner = val
h.conf.Unlock()
case ConfigHubWebsite:
h.conf.Lock()
h.conf.Website = val
h.conf.Unlock()
case ConfigHubEmail:
h.conf.Lock()
h.conf.Email = val
h.conf.Unlock()
default:
h.setConfigMap(key, val)
}
}
func (h *Hub) SetConfigString(key string, val string) {
h.setConfigString(key, val)
h.saveConfig(key, val)
}
func (h *Hub) GetConfigString(key string) (string, bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
case ConfigHubName:
return h.getName(), true
case ConfigHubDesc:
h.conf.RLock()
v := h.conf.Owner
h.conf.RUnlock()
return v, true
case ConfigHubTopic:
h.conf.RLock()
v := h.conf.Topic
h.conf.RUnlock()
return v, true
case ConfigHubMOTD:
h.conf.RLock()
v := h.conf.MOTD
h.conf.RUnlock()
return v, true
case ConfigHubOwner:
h.conf.RLock()
v := h.conf.Owner
h.conf.RUnlock()
return v, true
case ConfigHubWebsite:
h.conf.RLock()
v := h.conf.Website
h.conf.RUnlock()
return v, true
case ConfigHubEmail:
h.conf.RLock()
v := h.conf.Email
h.conf.RUnlock()
return v, true
default:
v, ok := h.getConfigMap(key)
if !ok || v == nil {
return "", false
}
switch v := v.(type) {
case string:
return v, true
default:
return fmt.Sprint(v), true
}
}
}
func (h *Hub) setConfigBool(key string, val bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
if _, ok := configIgnored[key]; ok {
return
}
switch key {
default:
h.setConfigMap(key, val)
}
}
func (h *Hub) SetConfigBool(key string, val bool) {
h.setConfigBool(key, val)
h.saveConfig(key, val)
}
func (h *Hub) GetConfigBool(key string) (bool, bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
default:
v, ok := h.getConfigMap(key)
if !ok || v == nil {
return false, false
}
switch v := v.(type) {
case bool:
return v, true
case int64:
return v != 0, true
case uint64:
return v != 0, true
case float64:
return v != 0, true
case string:
b, _ := strconv.ParseBool(v)
return b, true
default:
return false, true
}
}
}
func (h *Hub) setConfigInt(key string, val int64) {
if alias, ok := configAliases[key]; ok {
key = alias
}
if _, ok := configIgnored[key]; ok {
return
}
switch key {
case ConfigZlibLevel:
h.setZlibLevel(int(val))
default:
h.setConfigMap(key, val)
}
}
func (h *Hub) SetConfigInt(key string, val int64) {
h.setConfigInt(key, val)
h.saveConfig(key, val)
}
func (h *Hub) GetConfigInt(key string) (int64, bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
case ConfigZlibLevel:
return int64(h.zlibLevel()), true
default:
v, ok := h.getConfigMap(key)
if !ok || v == nil {
return 0, false
}
switch v := v.(type) {
case int64:
return v, true
case uint64:
return int64(v), true
case float64:
return int64(v), true
case bool:
if v {
return 1, true
}
return 0, true
case string:
i, _ := strconv.ParseInt(v, 10, 64)
return i, true
default:
return 0, true
}
}
}
func (h *Hub) setConfigUint(key string, val uint64) {
if alias, ok := configAliases[key]; ok {
key = alias
}
if _, ok := configIgnored[key]; ok {
return
}
switch key {
default:
h.setConfigMap(key, val)
}
}
func (h *Hub) SetConfigUint(key string, val uint64) {
h.setConfigUint(key, val)
h.saveConfig(key, val)
}
func (h *Hub) GetConfigUint(key string) (uint64, bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
default:
v, ok := h.getConfigMap(key)
if !ok || v == nil {
return 0, false
}
switch v := v.(type) {
case uint64:
return v, true
case int64:
return uint64(v), true
case float64:
return uint64(v), true
case bool:
if v {
return 1, true
}
return 0, true
case string:
i, _ := strconv.ParseUint(v, 10, 64)
return i, true
default:
return 0, true
}
}
}
func (h *Hub) setConfigFloat(key string, val float64) {
if alias, ok := configAliases[key]; ok {
key = alias
}
if _, ok := configIgnored[key]; ok {
return
}
switch key {
default:
h.setConfigMap(key, val)
}
}
func (h *Hub) SetConfigFloat(key string, val float64) {
h.setConfigFloat(key, val)
h.saveConfig(key, val)
}
func (h *Hub) GetConfigFloat(key string) (float64, bool) {
if alias, ok := configAliases[key]; ok {
key = alias
}
switch key {
default:
v, ok := h.getConfigMap(key)
if !ok || v == nil {
return 0, false
}
switch v := v.(type) {
case uint64:
return float64(v), true
case int64:
return float64(v), true
case float64:
return v, true
case bool:
if v {
return 1, true
}
return 0, true
case string:
f, _ := strconv.ParseFloat(v, 64)
return f, true
default:
return 0, true
}
}
}
|
package net.ninjacat.omg.errors;
import net.ninjacat.omg.conditions.Condition;
import net.ninjacat.omg.conditions.Conditions;
import net.ninjacat.omg.patterns.CompilingStrategy;
import net.ninjacat.omg.patterns.PatternCompiler;
import net.ninjacat.omg.patterns.Patterns;
import org.immutables.value.Value;
import org.junit.Test;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
@RunWith(Theories.class)
public class TestTypeConversionError {
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailIntTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("intField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailByteTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("byteField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailTypeCharConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("charField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailDoubleTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("doubleField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailFloatTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("floatField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailLongTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("longField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailShortTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("shortField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailStringTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("stringField").eq(1).build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailBoxedIntTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedIntField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailBoxedByteTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedByteField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailTypeBoxedCharConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedCharField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailBoxedDoubleTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedDoubleField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailBoxedLongTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedLongField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailBoxedShortTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedShortField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Theory
@Test(expected = TypeConversionException.class)
public void shouldFailBoxedFloatTypeConversion(final CompilingStrategy strategy) {
final Condition condition = Conditions.matcher().property("boxedFloatField").eq("1").build();
Patterns.compile(condition, PatternCompiler.forClass(FieldTest.class, strategy));
}
@Value.Immutable
public interface FieldTest {
int getIntField();
long getLongField();
short getShortField();
byte getByteField();
char getCharField();
double getDoubleField();
float getFloatField();
String getStringField();
Integer getBoxedIntField();
Long getBoxedLongField();
Short getBoxedShortField();
Byte getBoxedByteField();
Character getBoxedCharField();
Double getBoxedDoubleField();
Float getBoxedFloatField();
}
}
|
<reponame>BrunoGrisci/EngineeringDesignusingMultiObjectiveEvolutionaryAlgorithms
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.analysis.sensitivity;
import java.io.File;
import java.io.IOException;
import java.util.Properties;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.moeaframework.TestUtils;
import org.moeaframework.core.NondominatedPopulation;
import org.moeaframework.core.Problem;
import org.moeaframework.core.Settings;
import org.moeaframework.core.Solution;
import org.moeaframework.core.Variable;
import org.moeaframework.core.variable.BinaryVariable;
import org.moeaframework.core.variable.Grammar;
import org.moeaframework.core.variable.Permutation;
import org.moeaframework.core.variable.RealVariable;
import org.moeaframework.problem.AbstractProblem;
import org.moeaframework.util.TypedProperties;
/**
* Tests the {@link ResultFileWriter} class.
*/
public class ResultFileWriterTest {
/**
* The problem used for testing.
*/
private Problem problem;
/**
* A feasible solution.
*/
private Solution solution1;
/**
* Another feasible solution.
*/
private Solution solution2;
/**
* A solution violating its constraints.
*/
private Solution solution3;
/**
* Creates the problem used for testing.
*/
@Before
public void setUp() {
problem = new AbstractProblem(3, 2, 1) {
@Override
public void evaluate(Solution solution) {
throw new UnsupportedOperationException();
}
@Override
public Solution newSolution() {
Solution solution = new Solution(3, 2, 1);
solution.setVariable(0, new RealVariable(0.0, 1.0));
solution.setVariable(1, new BinaryVariable(5));
solution.setVariable(2, new Permutation(3));
return solution;
}
};
solution1 = problem.newSolution();
((RealVariable)solution1.getVariable(0)).setValue(0.0);
((BinaryVariable)solution1.getVariable(1)).set(2, true);
((Permutation)solution1.getVariable(2)).swap(0, 2);
solution1.setObjectives(new double[] { 0.0, 1.0 });
solution2 = problem.newSolution();
((RealVariable)solution1.getVariable(0)).setValue(1.0);
((BinaryVariable)solution1.getVariable(1)).set(1, true);
((Permutation)solution1.getVariable(2)).swap(0, 1);
solution2.setObjectives(new double[] { 1.0, 0.0 });
solution3 = problem.newSolution();
((RealVariable)solution1.getVariable(0)).setValue(0.5);
((BinaryVariable)solution1.getVariable(1)).set(1, true);
((Permutation)solution1.getVariable(2)).swap(1, 2);
solution3.setObjectives(new double[] { 0.5, 0.5 });
solution3.setConstraints(new double[] { -1.0 });
}
/**
* Removes references to shared objects so they can be garbage collected.
*/
@After
public void tearDown() {
problem = null;
solution1 = null;
solution2 = null;
solution3 = null;
}
/**
* Tests if special characters are escaped correctly when writing property
* files.
*
* @throws IOException should not occur
*/
@Test
public void testSpecialCharactersInProperties() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Properties properties = new Properties();
properties.setProperty("\"'!@#$=:%^&*()\\\r\n//\t ", "\"'!@#$=:%^&*()\\\r\n//\t ");
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
Assert.assertEquals(properties, reader.next().getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if {@code null} properties are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testNullProperties() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Properties properties = new Properties();
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, (TypedProperties)null));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
Assert.assertEquals(properties, reader.next().getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if empty properties are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testEmptyProperties() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Properties properties = new Properties();
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
Assert.assertEquals(properties, reader.next().getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if the population and properties are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testNormal() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution1);
population.add(solution2);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if the population and properties are written correctly when
* writing decision variables is disabled.
*
* @throws IOException should not occur
*/
@Test
public void testNoVariables() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution1);
population.add(solution2);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file, false);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
population.clear();
population.add(new Solution(solution1.getObjectives()));
population.add(new Solution(solution2.getObjectives()));
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if constraint violating solutions are not written, and that
* empty populations are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testConstrainedSolution() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution3);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
Assert.assertEquals(0, entry.getPopulation().size());
Assert.assertEquals(properties, entry.getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if result files with multiple entries are written correctly, and
* that writing can be resumed correctly.
*
* @throws IOException should not occur
*/
@Test
public void testResume() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution1);
population.add(solution2);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file);
Assert.assertEquals(0, writer.getNumberOfEntries());
writer.append(new ResultEntry(population, properties));
writer.append(new ResultEntry(population, properties));
Assert.assertEquals(2, writer.getNumberOfEntries());
} finally {
if (writer != null) {
writer.close();
}
}
try {
writer = new ResultFileWriter(problem, file);
Assert.assertEquals(2, writer.getNumberOfEntries());
writer.append(new ResultEntry(population, properties));
Assert.assertEquals(3, writer.getNumberOfEntries());
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = null;
Assert.assertTrue(reader.hasNext());
entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
Assert.assertTrue(reader.hasNext());
entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
Assert.assertTrue(reader.hasNext());
entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
Assert.assertFalse(reader.hasNext());
} finally {
if (reader != null) {
reader.close();
}
}
}
@Test
public void testUnsupportedDecisionVariable() throws IOException {
File file = TestUtils.createTempFile();
final Variable variable = new Variable() {
private static final long serialVersionUID = -54413529004858950L;
@Override
public Variable copy() {
return this;
}
};
problem = new AbstractProblem(2, 2, 1) {
@Override
public void evaluate(Solution solution) {
throw new UnsupportedOperationException();
}
@Override
public Solution newSolution() {
Solution solution = new Solution(2, 2, 1);
solution.setVariable(0, new RealVariable(0.0, 1.0));
solution.setVariable(1, variable);
return solution;
}
};
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Solution solution = problem.newSolution();
((RealVariable)solution.getVariable(0)).setValue(0.5);
solution.setObjectives(new double[] { 0.0, 1.0 });
population.add(solution);
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, (Properties)null));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
Assert.assertEquals(1, entry.getPopulation().size());
Assert.assertArrayEquals(solution.getObjectives(),
entry.getPopulation().get(0).getObjectives(), Settings.EPS);
Assert.assertEquals(solution.getVariable(0),
entry.getPopulation().get(0).getVariable(0));
} finally {
if (reader != null) {
reader.close();
}
}
}
@Test
public void testEncode() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
try {
writer = new ResultFileWriter(problem, file);
RealVariable rv = new RealVariable(0.5, 0.0, 1.0);
Assert.assertEquals("0.5", writer.encode(rv));
Assert.assertFalse(writer.encode(rv).matches(".*\\s.*"));
BinaryVariable bv = new BinaryVariable(5);
bv.set(2, true);
Assert.assertEquals("00100", writer.encode(bv));
Assert.assertFalse(writer.encode(bv).matches(".*\\s.*"));
Permutation p = new Permutation(5);
Assert.assertEquals("0,1,2,3,4", writer.encode(p));
Assert.assertFalse(writer.encode(p).matches(".*\\s.*"));
Grammar g = new Grammar(5);
//Assert.assertEquals("-", writer.encode(g));
Assert.assertFalse(writer.encode(g).matches(".*\\s.*"));
} finally {
if (writer != null) {
writer.close();
}
}
}
}
|
<filename>server/src/main/java/com/decathlon/ara/repository/TeamRepository.java
package com.decathlon.ara.repository;
import com.decathlon.ara.domain.Team;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
/**
* Spring Data JPA repository for the Team entity.
*/
@Repository
public interface TeamRepository extends JpaRepository<Team, Long> {
List<Team> findAllByProjectIdOrderByName(long projectId);
Team findByProjectIdAndId(long projectId, long id);
Team findByProjectIdAndName(long projectId, String name);
}
|
# Sample usage of the FileMetadata class
# Initialize file metadata with default creation time
file1 = FileMetadata('text', filepath='example.txt', comments=['Sample file'])
file1.addprop('size', '1024KB')
# Initialize file metadata with specified creation time
custom_time = datetime.datetime(2022, 1, 1, 12, 0, 0)
file2 = FileMetadata('image', creation_time=custom_time, comments=['Holiday photo'])
file2.addprop('resolution', '1920x1080')
# Accessing file metadata
print(file1.filetype) # Output: text
print(file1.creation_time) # Output: <creation time of example.txt>
print(file1.comments) # Output: ['Sample file']
print(file1.props) # Output: {'size': '1024KB'}
print(file2.filetype) # Output: image
print(file2.creation_time) # Output: 2022-01-01 12:00:00
print(file2.comments) # Output: ['Holiday photo']
print(file2.props) # Output: {'resolution': '1920x1080'} |
<gh_stars>1-10
"""
WSGI config for swhweb project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
import sys
# import paths from settings.py
from .local_settings import DJANGO_PROJECT_PATH, VIRTUALENV_PACKAGES_PATH, MSWH_PROJECT_PATH
# add the path to this file
sys.path.append(DJANGO_PROJECT_PATH)
# add the virtualenv site-packages path
sys.path.append(VIRTUALENV_PACKAGES_PATH)
# add the path to the mswh Python package
sys.path.append(MSWH_PROJECT_PATH)
# import Django wsgi implementation only after appending all paths to SYS paths
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "swhweb.settings")
application = get_wsgi_application()
|
<gh_stars>0
import React from 'react';
import styled from 'styled-components';
import CurrentTime from './CurrentTime';
import Users from './Users';
const Dashboard = () => (
<>
<h1>Example React App</h1>
{window.env.ENABLE_DEBUG_MODE === 'true' && <div>debug mode enabled</div>}
<Container>
<CurrentTime />
<Users />
</Container>
</>
);
export default Dashboard;
const Container = styled.div`
display: flex;
flex-direction: column;
`;
|
<html>
<head>
<script>
function submitForm(){
// Get the values entered in the input fields
let name = document.getElementById('name').value;
let age = document.getElementById('age').value;
let table = document.getElementById('results');
let row = table.insertRow();
let cell1 = row.insertCell();
let cell2 = row.insertCell();
cell1.innerHTML = name;
cell2.innerHTML = age;
}
</script>
</head>
<body>
<h3>Enter Data:</h3>
<form>
Name: <input type="text" id="name"/> <br><br>
Age: <input type="text" id="age"/> <br><br>
<input type="button" value="Submit" onclick="submitForm()"/>
</form>
<h3>Results:</h3>
<table id="results">
<tr>
<th>Name</th>
<th>Age</th>
</tr>
</table>
</body>
</html> |
#!/bin/bash -e
install_requirements() {
echo "Installing requirements"
# Ubuntu's cloud images has cloud-init doing an apt-get update / upgrade on first boot
echo "Waiting for apt-get update, this may take a few minutes"
while pgrep apt >/dev/null 2>&1 ; do
sleep 0.5
done
sudo apt-get update
if [ "${APP_APT_UPGRADE}" != "False" ]; then
sudo DEBIAN_FRONTEND=noninteractive apt-get -y upgrade
fi
# Ensure curl is installed for the next step
#sudo DEBIAN_FRONTEND=noninteractive apt-get install -y curl
# Not using Ubuntu distro node 4.x package due to intermittent bug cases
#curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash -
# We're mixing build with with runtime a bit here (git, npm). Consider separating build-deps (git npm)
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip nginx libxml2-dev libxmlsec1-dev \
python3-dev mysql-client libmysqlclient-dev libncurses5-dev swig \
supervisor libffi-dev libsasl2-dev libldap2-dev git curl
# Update pip to fix bug in 8.1.1
sudo -H pip3 install --upgrade pip virtualenv
}
install_requirements
|
<filename>DeviceCode/Drivers/Display/TX09D71VM1CCA/TX09D71VM1CCA.h
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) Microsoft Corporation. All rights reserved.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#include "tinyhal.h"
//--//
#ifndef _DRIVERS_DISPLAY_TX09D71VM1CCA_H_
#define _DRIVERS_DISPLAY_TX09D71VM1CCA_H_ 1
//////////////////////////////////////////////////////////////////////////////
struct TX09D71VM1CCA_LCD_CONFIG
{
GPIO_FLAG LcdEnable;
};
struct TX09D71VM1CCA_CONFIG
{
HAL_DRIVER_CONFIG_HEADER Header;
//--//
TX09D71VM1CCA_LCD_CONFIG LcdConfig;
DISPLAY_CONTROLLER_CONFIG ControllerConfig;
static LPCSTR GetDriverName() { return "TX09D71VM1CCA"; }
};
extern TX09D71VM1CCA_CONFIG g_TX09D71VM1CCA_Config;
//////////////////////////////////////////////////////////////////////////////
struct TX09D71VM1CCA_Driver
{
//--//
UINT32 m_cursor;
//--//
static BOOL Initialize();
static BOOL Uninitialize();
static void PowerSave( BOOL On );
static void Clear();
static void BitBltEx( int x, int y, int width, int height, UINT32 data[] );
static void BitBlt( int width, int height, int widthInWords, UINT32 data[], BOOL fUseDelta );
static void WriteChar ( unsigned char c, int row, int col );
static void WriteFormattedChar( unsigned char c );
private:
static UINT32 PixelsPerWord();
static UINT32 TextRows();
static UINT32 TextColumns();
static UINT32 WidthInWords();
static UINT32 SizeInWords();
static UINT32 SizeInBytes();
};
extern TX09D71VM1CCA_Driver g_TX09D71VM1CCA_Driver;
#endif // _DRIVERS_DISPLAY_TX09D71VM1CCA_H_
|
#!/bin/sh
docker run --rm -p 8083:8083 -p 8093:8093 \
--env AGENT_CONFIG='https://raw.githubusercontent.com/pambrose/prometheus-proxy/master/examples/simple.conf' \
--env PROXY_HOSTNAME=mymachine.lan \
pambrose/prometheus-agent:1.10.1
|
SELECT COUNT(*)
FROM users
WHERE JOINED_DATE BETWEEN '2019-12-02' AND '2019-12-31' |
<gh_stars>1-10
/*
* Copyright (c) 2019, FinancialForce.com, inc
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the FinancialForce.com, inc nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import { json, NextFunction, Request, Response, Server } from '@financialforcedev/orizuru';
import { flow } from '@financialforcedev/orizuru-auth';
import { Transport } from '@financialforcedev/orizuru-transport-rabbitmq';
import config from 'config';
import https from 'https';
import pem, { CertificateCreationResult } from 'pem';
// Define a function that creates a self-signed certificate
function createCertificate(): Promise<CertificateCreationResult> {
return new Promise((resolve, reject) => {
pem.createCertificate({ days: 1, selfSigned: true }, (err, result) => {
if (err) {
return reject(err);
}
process.stdout.write('Created certificate\n');
return resolve(result);
});
});
}
// Define a simple error middleware
const errorMiddleware = (error: Error | undefined, req: Request, res: Response, next: NextFunction) => {
if (error) {
server.error(error);
res.sendStatus(401);
} else {
next();
}
};
// Create the server
const server = new Server({
authProvider: {
salesforce: config.get('app.authProvider.salesforce')
},
openid: {
salesforce: config.get('app.openid.salesforce'),
salesforceConnection: config.get('app.openid.salesforceConnection')
},
port: 8080,
transport: new Transport({
prefetch: 1,
url: 'amqp://localhost'
})
});
// Add listeners for the server error and info events
server.on(Server.ERROR, (message) => {
process.stdout.write(`${message}\n`);
});
server.on(Server.INFO, (message) => {
process.stdout.write(`${message}\n`);
});
// Add the route to generate the authorization URL (in this case we use 'test' as the state parameter)
server.addRoute({
method: 'get',
middleware: [
json(),
errorMiddleware
],
responseWriter: () => async (err: Error | undefined, req: Request, res: Response) => {
const url = await flow.webServer.authorizationUrlGenerator(server.options.authProvider.salesforce)(server.options.openid.salesforce, server.options.openid.salesforce);
res.redirect(url);
},
schema: {
fields: [],
name: 'auth',
namespace: 'api.v1_0',
type: 'record'
},
synchronous: true
});
// **All code specified in the rest of the readme should be added here**
// Create a self-signed certificate and then start the server listening to connections using HTTPS
createCertificate().then((certificate) => {
const serverOptions: https.ServerOptions = {
cert: certificate.certificate,
key: certificate.clientKey
};
const httpsServer = https.createServer(serverOptions, server.serverImpl);
httpsServer.listen(server.options.port);
process.stdout.write('Started server\n');
});
|
installNodeAndYarn () {
echo "Installing nodejs and yarn..."
sudo curl -sL https://deb.nodesource.com/setup_8.x | sudo bash -
sudo apt-get install -y nodejs npm
sudo curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
sudo echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
sudo apt-get update -y
sudo apt-get install -y yarn
sudo npm install -g pm2
sudo ln -s /usr/bin/nodejs /usr/bin/node
sudo chown -R explorer:explorer /home/explorer/.config
clear
}
installMongo () {
echo "Installing mongodb..."
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2930ADAE8CAF5059EE73BB4B58712A2291FA4AD5
sudo echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
sudo apt-get update -y
sudo apt-get install -y --allow-unauthenticated mongodb-org
sudo chown -R mongodb:mongodb /data/db
sudo systemctl start mongod
sudo systemctl enable mongod
mongo bulwark --eval "db.createUser( { user: \"$rpcuser\", pwd: \"$rpcpassword\", roles: [ \"readWrite\" ] } )"
mongo twins --eval "db.createUser( { user: \"$rpcuser\", pwd: \"$rpcpassword\", roles: [ \"readWrite\" ] } )"
clear
}
installBulwark () {
echo "Installing Bulwark..."
mkdir -p /tmp/bulwark
cd /tmp/bulwark
curl -Lo bulwark.tar.gz $bwklink
tar -xzf bulwark.tar.gz
sudo mv ./bin/* /usr/local/bin
cd
rm -rf /tmp/bulwark
mkdir -p /home/explorer/.bulwark
sudo cat > /home/explorer/.bulwark/bulwark.conf << EOL
rpcport=52544
rpcuser=$rpcuser
rpcpassword=$rpcpassword
daemon=1
txindex=1
EOL
sudo cat > /etc/systemd/system/bulwarkd.service << EOL
[Unit]
Description=bulwarkd
After=network.target
[Service]
Type=forking
User=explorer
WorkingDirectory=/home/explorer
ExecStart=/home/explorer/bin/bulwarkd -datadir=/home/explorer/.bulwark
ExecStop=/home/explorer/bin/bulwark-cli -datadir=/home/explorer/.bulwark stop
Restart=on-abort
[Install]
WantedBy=multi-user.target
EOL
sudo systemctl start bulwarkd
sudo systemctl enable bulwarkd
echo "Sleeping for 1 hour while node syncs blockchain..."
sleep 1h
clear
}
installTwins() {
echo "Installing Twins..."
mkdir -p /tmp/twins
cd /tmp/twins
curl -Lo twins.tar.gz $twinslink
tar -xzf twins.tar.gz
sudo mv ./bin/* /usr/local/bin
cd
rm -rf /tmp/twins
mkdir -p /home/explorer/.twins
cat > /home/explorer/.twins/twins.conf << EOL
rpcport=37818
rpcuser=$rpcuser
rpcpassword=$rpcpassword
daemon=1
txindex=1
EOL
sudo cat > /etc/systemd/system/twinsd.service << EOL
[Unit]
Description=twinsd
After=network.target
[Service]
Type=forking
User=explorer
WorkingDirectory=/home/explorer
ExecStart=/home/explorer/bin/twinsd -datadir=/home/explorer/.twins
ExecStop=/home/explorer/bin/twins-cli -datadir=/home/explorer/.twins stop
Restart=on-abort
[Install]
WantedBy=multi-user.target
EOL
sudo systemctl start twinsd
sudo systemctl enable twinsd
echo "Sleeping for 1 hour while node syncs blockchain..."
sleep 1h
clear
}
installForever(){
yarn install forever -g
}
installBlockEx () {
echo "Installing BlockEx..."
git clone https://github.com/crypto-critic/Crypto-API.git /home/explorer/blockex
cd /home/explorer/blockex
yarn install
nodejs ./scripts/block.js
nodejs ./scripts/coin.js
nodejs ./scripts/masternode.js
nodejs ./scripts/peer.js
nodejs ./scripts/rich.js
clear
cat > mycron << EOL
*/1 * * * * cd /home/explorer/blockex && ./shell-scripts/cron_block.sh >> ./tmp/block.log 2>&1
*/1 * * * * cd /home/explorer/blockex && /usr/bin/nodejs ./scripts/masternode.js >> ./tmp/masternode.log 2>&1
*/1 * * * * cd /home/explorer/blockex && /usr/bin/nodejs ./scripts/peer.js >> ./tmp/peer.log 2>&1
*/1 * * * * cd /home/explorer/blockex && /usr/bin/nodejs ./scripts/rich.js >> ./tmp/rich.log 2>&1
*/5 * * * * cd /home/explorer/blockex && /usr/bin/nodejs ./scripts/coin.js >> ./tmp/coin.log 2>&1
EOL
crontab mycron
rm -f mycron
forever start ./server/index.js
sudo pm2 startup ubuntu
}
# Setup
echo "Updating system..."
sudo apt-get update -y
sudo apt-get install -y apt-transport-https build-essential cron curl gcc git g++ make sudo vim wget
clear
# Variables
echo "Setting up variables..."
bwklink=`curl -s https://api.github.com/repos/bulwark-crypto/bulwark/releases/latest | grep browser_download_url | grep linux64 | cut -d '"' -f 4`
twinslink=`curl -s https://github.com/NewCapital/TWINS-Core/releases/download/twins_v3.2.2.2/twins-3.2.2.2-x86_64-linux-gnu.tar.gz`
rpcuser=`sha1`
rpcpassword=`sha1`
echo "BWk: $bwklink"
echo "User: $rpcuser"
echo "Pass: $rpcpassword"
sleep 5s
clear
installMongo
installBulwark
installTwins
installNodeAndYarn
installForever
installBlockEx |
<gh_stars>1-10
package cim4j;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import cim4j.ExcitationSystemDynamics;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
import cim4j.PU;
import cim4j.Seconds;
import cim4j.Boolean;
/*
The class represents IEEE Std 421.5-2005 type ST2A model. Some static systems utilize both current and voltage sources (generator terminal quantities) to comprise the power source. The regulator controls the exciter output through controlled saturation of the power transformer components. These compound-source rectifier excitation systems are designated Type ST2A and are represented by ExcIEEEST2A. Reference: IEEE Standard 421.5-2005 Section 7.2.
*/
public class ExcIEEEST2A extends ExcitationSystemDynamics
{
private BaseClass[] ExcIEEEST2A_class_attributes;
private BaseClass[] ExcIEEEST2A_primitive_attributes;
private java.lang.String rdfid;
public void setRdfid(java.lang.String id) {
rdfid = id;
}
private abstract interface PrimitiveBuilder {
public abstract BaseClass construct(java.lang.String value);
};
private enum ExcIEEEST2A_primitive_builder implements PrimitiveBuilder {
ka(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
ta(){
public BaseClass construct (java.lang.String value) {
return new Seconds(value);
}
},
vrmax(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
vrmin(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
ke(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
te(){
public BaseClass construct (java.lang.String value) {
return new Seconds(value);
}
},
kf(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
tf(){
public BaseClass construct (java.lang.String value) {
return new Seconds(value);
}
},
kp(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
ki(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
kc(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
efdmax(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
uelin(){
public BaseClass construct (java.lang.String value) {
return new Boolean(value);
}
},
LAST_ENUM() {
public BaseClass construct (java.lang.String value) {
return new cim4j.Integer("0");
}
};
}
private enum ExcIEEEST2A_class_attributes_enum {
ka,
ta,
vrmax,
vrmin,
ke,
te,
kf,
tf,
kp,
ki,
kc,
efdmax,
uelin,
LAST_ENUM;
}
public ExcIEEEST2A() {
ExcIEEEST2A_primitive_attributes = new BaseClass[ExcIEEEST2A_primitive_builder.values().length];
ExcIEEEST2A_class_attributes = new BaseClass[ExcIEEEST2A_class_attributes_enum.values().length];
}
public void updateAttributeInArray(ExcIEEEST2A_class_attributes_enum attrEnum, BaseClass value) {
try {
ExcIEEEST2A_class_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void updateAttributeInArray(ExcIEEEST2A_primitive_builder attrEnum, BaseClass value) {
try {
ExcIEEEST2A_primitive_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void setAttribute(java.lang.String attrName, BaseClass value) {
try {
ExcIEEEST2A_class_attributes_enum attrEnum = ExcIEEEST2A_class_attributes_enum.valueOf(attrName);
updateAttributeInArray(attrEnum, value);
System.out.println("Updated ExcIEEEST2A, setting " + attrName);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
/* If the attribute is a String, it is a primitive and we will make it into a BaseClass */
public void setAttribute(java.lang.String attrName, java.lang.String value) {
try {
ExcIEEEST2A_primitive_builder attrEnum = ExcIEEEST2A_primitive_builder.valueOf(attrName);
updateAttributeInArray(attrEnum, attrEnum.construct(value));
System.out.println("Updated ExcIEEEST2A, setting " + attrName + " to: " + value);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
public java.lang.String toString(boolean topClass) {
java.lang.String result = "";
java.lang.String indent = "";
if (topClass) {
for (ExcIEEEST2A_primitive_builder attrEnum: ExcIEEEST2A_primitive_builder.values()) {
BaseClass bc = ExcIEEEST2A_primitive_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " ExcIEEEST2A." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
for (ExcIEEEST2A_class_attributes_enum attrEnum: ExcIEEEST2A_class_attributes_enum.values()) {
BaseClass bc = ExcIEEEST2A_class_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " ExcIEEEST2A." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
result += super.toString(true);
}
else {
result += "(ExcIEEEST2A) RDFID: " + rdfid;
}
return result;
}
public final java.lang.String debugName = "ExcIEEEST2A";
public java.lang.String debugString()
{
return debugName;
}
public void setValue(java.lang.String s) {
System.out.println(debugString() + " is not sure what to do with " + s);
}
public BaseClass construct() {
return new ExcIEEEST2A();
}
};
|
# This script will use rsync to
# copy the files for the Jenkins job workspace to the provided target server:directory,
# excluding the files listed in exclude.txt (regular expression list)
# setting proper file owner and permissions
# 1 - Location of the build checkout - double quoted, no trailing slash
# 2 - Relative path to be deployed - no trailing slash
# 3 - Target server IP
# 4 - Target server location
# 5 - Target server sudo user
# building the unix path for the root directory
dirRoot=$1
echo -e "\n\n\e[0;34mUnix path for Jenkins Workspace working dir is:\e[0m" $dirRoot
echo -e "\n\e[0;34mUnix path for Deployable files dir is:\e[0m" $dirRoot/$2
echo -e "\n\n\e[0;34m********** Start Synchronising files with Rsync **********\e[0m"
echo /usr/bin/rsync -arivzt --delete --no-p --no-o --no-g --exclude-from=$dirRoot/build/exclude.txt --stats $dirRoot/$2/ -e \"/usr/bin/ssh\" --rsync-path=\"sudo /usr/bin/rsync\" $5@$3:$4
sudo -H -u $5 bash -c "/usr/bin/rsync -arivzt --delete --no-p --no-o --no-g --exclude-from=$dirRoot/build/exclude.txt --stats $dirRoot/$2/ -e '/usr/bin/ssh' --rsync-path='sudo /usr/bin/rsync' $5@$3:$4"
echo -e "\n\n\e[0;34m********** Set folder owners and permissions **********\e[0m"
echo /usr/bin/ssh $5@$3 "sudo chown -R www-data:www-data $4"
sudo -H -u $5 bash -c "/usr/bin/ssh $5@$3 'sudo chown -R www-data:www-data $4'"
echo -e "\n\n\e[0;34m********** Set permissions to folders **********\e[0m"
echo /usr/bin/ssh $5@$3 "sudo find $4 -type d -print0 | xargs -0 sudo chmod 0755"
sudo -H -u $5 bash -c "/usr/bin/ssh $5@$3 'sudo find $4 -type d -print0 | xargs -0 sudo chmod 0755'"
echo -e "\n\n\e[0;34m********** Set permissions to files **********\e[0m"
echo /usr/bin/ssh $5@$3 "sudo find $4 -type f -print0 | xargs -0 sudo chmod 0644"
sudo -H -u $5 bash -c "/usr/bin/ssh $5@$3 'sudo find $4 -type f -print0 | xargs -0 sudo chmod 0644'"
echo -e "\n\n\e[0;34m********** Set permissions to uploads folder **********\e[0m"
echo /usr/bin/ssh $5@$3 "sudo chmod 0755 -R $4/uploads"
sudo -H -u $5 bash -c "/usr/bin/ssh $5@$3 'sudo chmod 0755 -R $4/uploads'"
# Purge cache if you use a CDN
# echo -e "\n\n\e[0;34m********** Purge CDN Cache **********\e[0m"
# echo php curl_exec(curl_init("https://www.cloudflare.com/api_json.html?a=fpurge_ts&tkn=TOKEN&email=your@email.com&z=yourdomain.com&v=1"));
# php curl_exec(curl_init("https://www.cloudflare.com/api_json.html?a=fpurge_ts&tkn=TOKEN&email=your@email.com&z=yourdomain.com&v=1")); |
import React from 'react';
import {FormGroup, Label, Col, Input } from 'reactstrap';
//renderField จะรับ props ต่างๆ ของ Field ที่ได้จาก redux-form
const renderFieldGroup = ({ input, label, type, holder, meta: { touched, error } }) => {
return (
<div>
<FormGroup row>
<Col md="2">
<Label htmlFor={input.name}>{label}</Label>
</Col>
<Col xs="12" md="10">
<Input {...input} placeholder={holder} type={type} id={input.name} disabled/>
</Col>
</FormGroup>
</div>
)
}
export default renderFieldGroup; |
# Copyright (c) 2013 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound Brocoin protocol traffic to this rate
LIMIT="160kbit"
#defines the address space for which you wish to disable rate limiting
LOCALNET="192.168.0.0/16"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 9222. but not when dealing with a host on the local network
# (defined by $LOCALNET)
# --set-mark marks packages matching these criteria with the number "2"
# these packages are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 9222 ! -d ${LOCALNET} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 9222 ! -d ${LOCALNET} -j MARK --set-mark 0x2
|
package auth
import java.net.URI
import com.gu.mediaservice.lib.argo.ArgoHelpers
import com.gu.mediaservice.lib.argo.model.Link
import com.gu.mediaservice.lib.auth.Authentication.{MachinePrincipal, UserPrincipal}
import com.gu.mediaservice.lib.auth.provider.AuthenticationProviders
import com.gu.mediaservice.lib.auth.{Authentication, Permissions, PermissionsHandler}
import play.api.libs.json.Json
import play.api.mvc.{BaseController, ControllerComponents, Result}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
class AuthController(auth: Authentication, providers: AuthenticationProviders, val config: AuthConfig,
override val controllerComponents: ControllerComponents)(implicit ec: ExecutionContext)
extends BaseController
with ArgoHelpers
with PermissionsHandler {
val indexResponse = {
val indexData = Map("description" -> "This is the Auth API")
val indexLinks = List(
Link("root", config.mediaApiUri),
Link("login", config.services.loginUriTemplate),
Link("ui:logout", s"${config.rootUri}/logout"),
Link("session", s"${config.rootUri}/session")
)
respond(indexData, indexLinks)
}
def index = auth { indexResponse }
def session = auth { request =>
val showPaid = hasPermission(request.user, Permissions.ShowPaid)
request.user match {
case UserPrincipal(firstName, lastName, email, _) =>
respond(
Json.obj("user" ->
Json.obj(
"name" -> s"$firstName $lastName",
"firstName" -> firstName,
"lastName" -> lastName,
"email" -> email,
"permissions" ->
Json.obj(
"showPaid" -> showPaid
)
)
)
)
case MachinePrincipal(accessor, _) => respond(
Json.obj("api-key" ->
Json.obj(
"name" -> accessor.identity,
"tier" -> accessor.tier.toString,
"permissions" ->
Json.obj(
"showPaid" -> showPaid
)
)
)
)
}
}
def isOwnDomainAndSecure(uri: URI): Boolean = {
uri.getHost.endsWith(config.domainRoot) && uri.getScheme == "https"
}
def isValidDomain(inputUri: String): Boolean = {
val success = Try(URI.create(inputUri)).filter(isOwnDomainAndSecure).isSuccess
if (!success) logger.warn(s"Provided login redirect URI is invalid: $inputUri")
success
}
// Play session key used to store the URI to redirect to during login
val REDIRECT_SESSION_KEY = "gridRedirectUri"
// Trigger the auth cycle
// If a redirectUri is provided, redirect the browser there once auth'd,
// else return a dummy page (e.g. for automatically re-auth'ing in the background)
def doLogin(redirectUri: Option[String] = None) = Action.async { implicit req =>
val checkedRedirectUri = redirectUri collect {
case uri if isValidDomain(uri) => uri
}
providers.userProvider.sendForAuthentication match {
case Some(authCallback) =>
authCallback(req).map(_.addingToSession(checkedRedirectUri.map(REDIRECT_SESSION_KEY -> _).toSeq:_*))
case None =>
Future.successful(InternalServerError("Login not supported by configured authentication provider"))
}
}
def oauthCallback = Action.async { implicit request =>
providers.userProvider.sendForAuthenticationCallback match {
case Some(callback) =>
val maybeRedirectUri = request.session.get(REDIRECT_SESSION_KEY)
callback(request, maybeRedirectUri).map(_.removingFromSession(REDIRECT_SESSION_KEY))
case None =>
Future.successful(InternalServerError("No callback for configured authentication provider"))
}
}
def logout = Action { implicit request =>
val result: Result = providers.userProvider.flushToken match {
case Some(callback) => callback(request, Ok("Logged out"))
case None => InternalServerError("Logout not supported by configured authentication provider")
}
result.withNewSession
}
}
|
package chylex.hee.entity.projectile;
import net.minecraft.client.entity.EntityClientPlayerMP;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.item.EntityEnderPearl;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.NetHandlerPlayServer;
import net.minecraft.util.DamageSource;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.World;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.entity.living.EnderTeleportEvent;
import chylex.hee.mechanics.enhancements.EnhancementList;
import chylex.hee.mechanics.enhancements.types.EnderPearlEnhancements;
import cpw.mods.fml.client.FMLClientHandler;
public class EntityProjectileEnhancedEnderPearl extends EntityEnderPearl{
private final EnhancementList<EnderPearlEnhancements> enhancements;
private EntityPlayer ride = null;
private short life = 0;
public EntityProjectileEnhancedEnderPearl(World world){
super(world);
this.enhancements = new EnhancementList<>(EnderPearlEnhancements.class);
}
public EntityProjectileEnhancedEnderPearl(World world, EntityLivingBase thrower, EnhancementList<EnderPearlEnhancements> enhancements){
super(world, thrower);
this.enhancements = enhancements;
if (thrower instanceof EntityPlayer){
/* TODO EntityPlayer player = (EntityPlayer)thrower;
if (pearlTypes.contains(EnderPearlEnhancements.RIDING)){
ride = (EntityPlayer)thrower;
for(Object o:world.loadedEntityList){
if (o instanceof EntityProjectileEnhancedEnderPearl){
EntityProjectileEnhancedEnderPearl pearl = (EntityProjectileEnhancedEnderPearl)o;
if (pearl.ride != null && pearl.ride.getCommandSenderName().equals(player.getCommandSenderName())){
pearl.ride = null;
if (!pearl.pearlTypes.contains(EnderPearlEnhancements.NO_FALL_DAMAGE))player.attackEntityFrom(DamageSource.fall, 5F);
pearl.setDead();
break;
}
}
}
}*/
}
}
@Override
public void entityInit(){
super.entityInit();
dataWatcher.addObject(16, ride == null ? "" : ride.getCommandSenderName());
}
@Override
public void onUpdate(){
super.onUpdate();
if (!worldObj.isRemote){
if (ride != null){
dataWatcher.updateObject(16, ride.getCommandSenderName());
updateRidePosition();
ride.fallDistance = 0F;
ride.setPosition(posX, posY+ride.height, posZ);
}
/* TODO if (pearlTypes.contains(EnderPearlEnhancements.DOUBLE_SPEED)){
if (inGround){
motionX *= 0.2D;
motionY *= 0.2D;
motionZ *= 0.2D;
}
else{
super.onUpdate();
if (ride != null)updateRidePosition();
}
}*/
if (++life > 200)setDead();
}
else{
EntityClientPlayerMP clientPlayer = FMLClientHandler.instance().getClient().thePlayer;
if (dataWatcher.getWatchableObjectString(16).equals(clientPlayer.getCommandSenderName()))clientPlayer.setPosition(posX, posY+clientPlayer.height, posZ);
}
}
private void updateRidePosition(){
ride.lastTickPosX = ride.prevPosX = ride.posX = posX;
ride.lastTickPosY = ride.prevPosY = ride.posY = posY+ride.height+yOffset;
ride.lastTickPosZ = ride.prevPosZ = ride.posZ = posZ;
NetHandlerPlayServer serverHandler = ((EntityPlayerMP)ride).playerNetServerHandler;
serverHandler.floatingTickCount = 0;
serverHandler.lastPosX = ride.posX;
serverHandler.lastPosY = ride.posY;
serverHandler.lastPosZ = ride.posZ;
}
@Override
protected void onImpact(MovingObjectPosition mop){
if (mop.entityHit != null){
if (ride != null && mop.entityHit.equals(ride))return;
mop.entityHit.attackEntityFrom(DamageSource.causeThrownDamage(this, getThrower()), 0F);
}
for(int i = 0; i < 32; ++i){
worldObj.spawnParticle("portal", posX, posY+rand.nextDouble()*2D, posZ, rand.nextGaussian(), 0D, rand.nextGaussian());
}
if (!worldObj.isRemote){
if (getThrower() != null && getThrower() instanceof EntityPlayerMP){
EntityPlayerMP player = (EntityPlayerMP)getThrower();
if (player.playerNetServerHandler.func_147362_b().isChannelOpen() && player.worldObj == worldObj){ // OBFUSCATED get network manager
EnderTeleportEvent event = new EnderTeleportEvent(player, posX, posY, posZ, 5F);
if (!MinecraftForge.EVENT_BUS.post(event)){
// TODO if (pearlTypes.contains(EnderPearlEnhancements.EXPLOSIVE))worldObj.newExplosion(this, posX, posY, posZ, 2.7F, false, true);
/* TODO if (pearlTypes.contains(EnderPearlEnhancements.FREEZE)){
for(EntityLivingBase entity:(List<EntityLivingBase>)worldObj.getEntitiesWithinAABB(EntityLivingBase.class, boundingBox.expand(5D, 3D, 5D))){
double dist = entity.getDistanceSqToEntity(this);
if (dist <= 5D)entity.addPotionEffect(new PotionEffect(Potion.moveSlowdown.id, 80+(int)(10D*(6D-dist)), 3, true));
}
}*/
if (player.isRiding())player.mountEntity((Entity)null);
player.setPositionAndUpdate(event.targetX, event.targetY, event.targetZ);
player.fallDistance = 0F;
// TODO if (!pearlTypes.contains(EnderPearlEnhancements.NO_FALL_DAMAGE))player.attackEntityFrom(DamageSource.fall, event.attackDamage);
// TODO if (pearlTypes.contains(EnderPearlEnhancements.FREEZE))PacketPipeline.sendToAllAround(this, 64D, new C20Effect(FXType.Basic.ENDER_PEARL_FREEZE, this));
}
}
}
setDead();
}
}
@Override
protected float getGravityVelocity(){
return 1F; // TODO pearlTypes.contains(EnderPearlEnhancements.NO_GRAVITY) ? 0F : (super.getGravityVelocity()*(pearlTypes.contains(EnderPearlEnhancements.INCREASED_RANGE) ? 0.75F : 1F));
}
@Override
public void writeToNBT(NBTTagCompound nbt){
super.writeToNBT(nbt);
nbt.setString("enhancements2", enhancements.serialize());
nbt.setShort("life", life);
}
@Override
public void readFromNBT(NBTTagCompound nbt){
super.readFromNBT(nbt);
enhancements.deserialize(nbt.getString("enhancements2"));
life = nbt.getShort("life");
}
}
|
<reponame>linuzri/samourai-wallet-android<filename>app/src/main/java/com/samourai/codescanner/CodeScannerView.java
/*
* MIT License
*
* Copyright (c) 2017 <NAME> [<EMAIL>]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.samourai.codescanner;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.os.Build;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.samourai.wallet.R;
import androidx.annotation.AttrRes;
import androidx.annotation.ColorInt;
import androidx.annotation.FloatRange;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.Px;
import androidx.annotation.RequiresApi;
import androidx.annotation.StyleRes;
/**
* A view to display code scanner preview
*
* @see CodeScanner
*/
public final class CodeScannerView extends ViewGroup {
private static final boolean DEFAULT_AUTO_FOCUS_BUTTON_VISIBLE = true;
private static final boolean DEFAULT_FLASH_BUTTON_VISIBLE = true;
private static final int DEFAULT_AUTO_FOCUS_BUTTON_VISIBILITY = VISIBLE;
private static final int DEFAULT_FLASH_BUTTON_VISIBILITY = VISIBLE;
private static final int DEFAULT_MASK_COLOR = 0x77000000;
private static final int DEFAULT_FRAME_COLOR = Color.WHITE;
private static final int DEFAULT_AUTO_FOCUS_BUTTON_COLOR = Color.WHITE;
private static final int DEFAULT_FLASH_BUTTON_COLOR = Color.WHITE;
private static final float DEFAULT_FRAME_THICKNESS_DP = 2f;
private static final float DEFAULT_FRAME_ASPECT_RATIO_WIDTH = 1f;
private static final float DEFAULT_FRAME_ASPECT_RATIO_HEIGHT = 1f;
private static final float DEFAULT_FRAME_CORNER_SIZE_DP = 50f;
private static final float DEFAULT_FRAME_CORNERS_RADIUS_DP = 0f;
private static final float DEFAULT_FRAME_SIZE = 0.75f;
private static final float BUTTON_SIZE_DP = 56f;
private static final float FOCUS_AREA_SIZE_DP = 20f;
private SurfaceView mPreviewView;
private ViewFinderView mViewFinderView;
private ImageView mAutoFocusButton;
private ImageView mFlashButton;
private Point mPreviewSize;
private SizeListener mSizeListener;
private CodeScanner mCodeScanner;
private int mButtonSize;
private int mAutoFocusButtonColor;
private int mFlashButtonColor;
private int mFocusAreaSize;
/**
* A view to display code scanner preview
*
* @see CodeScanner
*/
public CodeScannerView(@NonNull final Context context) {
super(context);
initialize(context, null, 0, 0);
}
/**
* A view to display code scanner preview
*
* @see CodeScanner
*/
public CodeScannerView(@NonNull final Context context, @Nullable final AttributeSet attrs) {
super(context, attrs);
initialize(context, attrs, 0, 0);
}
/**
* A view to display code scanner preview
*
* @see CodeScanner
*/
public CodeScannerView(@NonNull final Context context, @Nullable final AttributeSet attrs,
@AttrRes final int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize(context, attrs, defStyleAttr, 0);
}
/**
* A view to display code scanner preview
*
* @see CodeScanner
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public CodeScannerView(final Context context, final AttributeSet attrs,
@AttrRes final int defStyleAttr, @StyleRes final int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
initialize(context, attrs, defStyleAttr, defStyleRes);
}
private void initialize(@NonNull final Context context, @Nullable final AttributeSet attrs,
@AttrRes final int defStyleAttr, @StyleRes final int defStyleRes) {
mPreviewView = new SurfaceView(context);
mPreviewView.setLayoutParams(
new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
mViewFinderView = new ViewFinderView(context);
mViewFinderView.setLayoutParams(
new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
final float density = context.getResources().getDisplayMetrics().density;
mButtonSize = Math.round(density * BUTTON_SIZE_DP);
mFocusAreaSize = Math.round(density * FOCUS_AREA_SIZE_DP);
mAutoFocusButton = new ImageView(context);
mAutoFocusButton.setLayoutParams(new LayoutParams(mButtonSize, 400));
mAutoFocusButton.setScaleType(ImageView.ScaleType.CENTER);
mAutoFocusButton.setImageResource(R.drawable.ic_code_scanner_auto_focus_on);
mAutoFocusButton.setOnClickListener(new AutoFocusClickListener());
mFlashButton = new ImageView(context);
mFlashButton.setLayoutParams(new LayoutParams(mButtonSize, mButtonSize) );
mFlashButton.setScaleType(ImageView.ScaleType.CENTER);
mFlashButton.setImageResource(R.drawable.ic_code_scanner_flash_on);
mFlashButton.setOnClickListener(new FlashClickListener());
if (attrs == null) {
mViewFinderView.setFrameAspectRatio(DEFAULT_FRAME_ASPECT_RATIO_WIDTH,
DEFAULT_FRAME_ASPECT_RATIO_HEIGHT);
mViewFinderView.setMaskColor(DEFAULT_MASK_COLOR);
mViewFinderView.setFrameColor(DEFAULT_FRAME_COLOR);
mViewFinderView.setFrameThickness(Math.round(DEFAULT_FRAME_THICKNESS_DP * density));
mViewFinderView.setFrameCornersSize(Math.round(DEFAULT_FRAME_CORNER_SIZE_DP * density));
mViewFinderView
.setFrameCornersRadius(Math.round(DEFAULT_FRAME_CORNERS_RADIUS_DP * density));
mViewFinderView.setFrameSize(DEFAULT_FRAME_SIZE);
mAutoFocusButton.setColorFilter(DEFAULT_AUTO_FOCUS_BUTTON_COLOR);
mFlashButton.setColorFilter(DEFAULT_FLASH_BUTTON_COLOR);
mAutoFocusButton.setVisibility(DEFAULT_AUTO_FOCUS_BUTTON_VISIBILITY);
mFlashButton.setVisibility(DEFAULT_FLASH_BUTTON_VISIBILITY);
} else {
TypedArray a = null;
try {
a = context.getTheme()
.obtainStyledAttributes(attrs, R.styleable.CodeScannerView, defStyleAttr,
defStyleRes);
setMaskColor(a.getColor(R.styleable.CodeScannerView_maskColor, DEFAULT_MASK_COLOR));
setFrameColor(
a.getColor(R.styleable.CodeScannerView_frameColor, DEFAULT_FRAME_COLOR));
setFrameThickness(
a.getDimensionPixelOffset(R.styleable.CodeScannerView_frameThickness,
Math.round(DEFAULT_FRAME_THICKNESS_DP * density)));
setFrameCornersSize(
a.getDimensionPixelOffset(R.styleable.CodeScannerView_frameCornersSize,
Math.round(DEFAULT_FRAME_CORNER_SIZE_DP * density)));
setFrameCornersRadius(
a.getDimensionPixelOffset(R.styleable.CodeScannerView_frameCornersRadius,
Math.round(DEFAULT_FRAME_CORNERS_RADIUS_DP * density)));
setFrameAspectRatio(a.getFloat(R.styleable.CodeScannerView_frameAspectRatioWidth,
DEFAULT_FRAME_ASPECT_RATIO_WIDTH),
a.getFloat(R.styleable.CodeScannerView_frameAspectRatioHeight,
DEFAULT_FRAME_ASPECT_RATIO_HEIGHT));
setFrameSize(a.getFloat(R.styleable.CodeScannerView_frameSize, DEFAULT_FRAME_SIZE));
setAutoFocusButtonVisible(
a.getBoolean(R.styleable.CodeScannerView_autoFocusButtonVisible,
DEFAULT_AUTO_FOCUS_BUTTON_VISIBLE));
setFlashButtonVisible(a.getBoolean(R.styleable.CodeScannerView_flashButtonVisible,
DEFAULT_FLASH_BUTTON_VISIBLE));
setAutoFocusButtonColor(a.getColor(R.styleable.CodeScannerView_autoFocusButtonColor,
DEFAULT_AUTO_FOCUS_BUTTON_COLOR));
setFlashButtonColor(a.getColor(R.styleable.CodeScannerView_flashButtonColor,
DEFAULT_FLASH_BUTTON_COLOR));
} finally {
if (a != null) {
a.recycle();
}
}
}
addView(mPreviewView);
addView(mViewFinderView);
addView(mAutoFocusButton);
addView(mFlashButton);
// enable autofocus by default
final CodeScanner scanner = mCodeScanner;
if (scanner == null || !scanner.isAutoFocusSupportedOrUnknown()) {
return;
}
final boolean enabled = !scanner.isAutoFocusEnabled();
scanner.setAutoFocusEnabled(enabled);
setAutoFocusEnabled(enabled);
}
@Override
protected void onLayout(final boolean changed, final int left, final int top, final int right,
final int bottom) {
performLayout(right - left, bottom - top);
}
@Override
protected void onSizeChanged(final int width, final int height, final int oldWidth,
final int oldHeight) {
performLayout(width, height);
final SizeListener listener = mSizeListener;
if (listener != null) {
listener.onSizeChanged(width, height);
}
}
@Override
@SuppressLint("ClickableViewAccessibility")
public boolean onTouchEvent(@NonNull final MotionEvent event) {
final CodeScanner codeScanner = mCodeScanner;
final Rect frameRect = getFrameRect();
final int x = (int) event.getX();
final int y = (int) event.getY();
if (codeScanner != null && frameRect != null &&
codeScanner.isAutoFocusSupportedOrUnknown() && codeScanner.isTouchFocusEnabled() &&
event.getAction() == MotionEvent.ACTION_DOWN && frameRect.isPointInside(x, y)) {
final int areaSize = mFocusAreaSize;
codeScanner.performTouchFocus(
new Rect(x - areaSize, y - areaSize, x + areaSize, y + areaSize)
.fitIn(frameRect));
}
return super.onTouchEvent(event);
}
/**
* Get current mask color
*
* @see #setMaskColor
*/
@ColorInt
public int getMaskColor() {
return mViewFinderView.getMaskColor();
}
/**
* Set color of the space outside of the framing rect
*
* @param color Mask color
*/
public void setMaskColor(@ColorInt final int color) {
mViewFinderView.setMaskColor(color);
}
/**
* Get current frame color
*
* @see #setFrameColor
*/
@ColorInt
public int getFrameColor() {
return mViewFinderView.getFrameColor();
}
/**
* Set color of the frame
*
* @param color Frame color
*/
public void setFrameColor(@ColorInt final int color) {
mViewFinderView.setFrameColor(color);
}
/**
* Get current frame thickness
*
* @see #setFrameThickness
*/
@Px
public int getFrameThickness() {
return mViewFinderView.getFrameThickness();
}
/**
* Set frame thickness
*
* @param thickness Frame thickness in pixels
*/
public void setFrameThickness(@Px final int thickness) {
if (thickness < 0) {
throw new IllegalArgumentException("Frame thickness can't be negative");
}
mViewFinderView.setFrameThickness(thickness);
}
/**
* Get current frame corners size
*
* @see #setFrameCornersSize
*/
@Px
public int getFrameCornersSize() {
return mViewFinderView.getFrameCornersSize();
}
/**
* Set size of the frame corners
*
* @param size Size in pixels
*/
public void setFrameCornersSize(@Px final int size) {
if (size < 0) {
throw new IllegalArgumentException("Frame corners size can't be negative");
}
mViewFinderView.setFrameCornersSize(size);
}
/**
* Get current frame corners radius
*
* @see #setFrameCornersRadius
*/
@Px
public int getFrameCornersRadius() {
return mViewFinderView.getFrameCornersRadius();
}
/**
* Set current frame corners radius
*
* @param radius Frame corners radius in pixels
*/
public void setFrameCornersRadius(@Px final int radius) {
if (radius < 0) {
throw new IllegalArgumentException("Frame corners radius can't be negative");
}
mViewFinderView.setFrameCornersRadius(radius);
}
/**
* Get current frame size
*
* @see #setFrameSize
*/
@FloatRange(from = 0.1, to = 1.0)
public float getFrameSize() {
return mViewFinderView.getFrameSize();
}
/**
* Set relative frame size where 1.0 means full size
*
* @param size Relative frame size between 0.1 and 1.0
*/
public void setFrameSize(@FloatRange(from = 0.1, to = 1) final float size) {
if (size < 0.1 || size > 1) {
throw new IllegalArgumentException(
"Max frame size value should be between 0.1 and 1, inclusive");
}
mViewFinderView.setFrameSize(size);
}
/**
* Get current frame aspect ratio width
*
* @see #setFrameAspectRatioWidth
* @see #setFrameAspectRatio
*/
@FloatRange(from = 0, fromInclusive = false)
public float getFrameAspectRatioWidth() {
return mViewFinderView.getFrameAspectRatioWidth();
}
/**
* Set frame aspect ratio width
*
* @param ratioWidth Frame aspect ratio width
* @see #setFrameAspectRatio
*/
public void setFrameAspectRatioWidth(
@FloatRange(from = 0, fromInclusive = false) final float ratioWidth) {
if (ratioWidth <= 0) {
throw new IllegalArgumentException(
"Frame aspect ratio values should be greater than zero");
}
mViewFinderView.setFrameAspectRatioWidth(ratioWidth);
}
/**
* Get current frame aspect ratio height
*
* @see #setFrameAspectRatioHeight
* @see #setFrameAspectRatio
*/
@FloatRange(from = 0, fromInclusive = false)
public float getFrameAspectRatioHeight() {
return mViewFinderView.getFrameAspectRatioHeight();
}
/**
* Set frame aspect ratio height
*
* @param ratioHeight Frame aspect ratio width
* @see #setFrameAspectRatio
*/
public void setFrameAspectRatioHeight(
@FloatRange(from = 0, fromInclusive = false) final float ratioHeight) {
if (ratioHeight <= 0) {
throw new IllegalArgumentException(
"Frame aspect ratio values should be greater than zero");
}
mViewFinderView.setFrameAspectRatioHeight(ratioHeight);
}
/**
* Set frame aspect ratio (ex. 1:1, 15:10, 16:9, 4:3)
*
* @param ratioWidth Frame aspect ratio width
* @param ratioHeight Frame aspect ratio height
*/
public void setFrameAspectRatio(
@FloatRange(from = 0, fromInclusive = false) final float ratioWidth,
@FloatRange(from = 0, fromInclusive = false) final float ratioHeight) {
if (ratioWidth <= 0 || ratioHeight <= 0) {
throw new IllegalArgumentException(
"Frame aspect ratio values should be greater than zero");
}
mViewFinderView.setFrameAspectRatio(ratioWidth, ratioHeight);
}
/**
* Whether if auto focus button is currently visible
*
* @see #setAutoFocusButtonVisible
*/
public boolean isAutoFocusButtonVisible() {
return mAutoFocusButton.getVisibility() == VISIBLE;
}
/**
* Set whether auto focus button is visible or not
*
* @param visible Visibility
*/
public void setAutoFocusButtonVisible(final boolean visible) {
mAutoFocusButton.setVisibility(visible ? VISIBLE : INVISIBLE);
}
/**
* Whether if flash button is currently visible
*
* @see #setFlashButtonVisible
*/
public boolean isFlashButtonVisible() {
return mFlashButton.getVisibility() == VISIBLE;
}
/**
* Set whether flash button is visible or not
*
* @param visible Visibility
*/
public void setFlashButtonVisible(final boolean visible) {
mFlashButton.setVisibility(visible ? VISIBLE : INVISIBLE);
}
/**
* Get current auto focus button color
*
* @see #setAutoFocusButtonColor
*/
@ColorInt
public int getAutoFocusButtonColor() {
return mAutoFocusButtonColor;
}
/**
* Set auto focus button color
*
* @param color Color
*/
public void setAutoFocusButtonColor(@ColorInt final int color) {
mAutoFocusButtonColor = color;
mAutoFocusButton.setColorFilter(color);
}
/**
* Get current flash button color
*
* @see #setFlashButtonColor
*/
@ColorInt
public int getFlashButtonColor() {
return mFlashButtonColor;
}
/**
* Set flash button color
*
* @param color Color
*/
public void setFlashButtonColor(@ColorInt final int color) {
mFlashButtonColor = color;
mFlashButton.setColorFilter(color);
}
@NonNull
SurfaceView getPreviewView() {
return mPreviewView;
}
@NonNull
ViewFinderView getViewFinderView() {
return mViewFinderView;
}
@Nullable
Rect getFrameRect() {
return mViewFinderView.getFrameRect();
}
void setPreviewSize(@Nullable final Point previewSize) {
mPreviewSize = previewSize;
requestLayout();
}
void setSizeListener(@Nullable final SizeListener sizeListener) {
mSizeListener = sizeListener;
}
void setCodeScanner(@NonNull final CodeScanner codeScanner) {
if (mCodeScanner != null) {
throw new IllegalStateException("Code scanner has already been set");
}
mCodeScanner = codeScanner;
setAutoFocusEnabled(codeScanner.isAutoFocusEnabled());
setFlashEnabled(codeScanner.isFlashEnabled());
}
void setAutoFocusEnabled(final boolean enabled) {
mAutoFocusButton.setImageResource(enabled ? R.drawable.ic_code_scanner_auto_focus_on :
R.drawable.ic_code_scanner_auto_focus_off);
}
void setFlashEnabled(final boolean enabled) {
mFlashButton.setImageResource(enabled ? R.drawable.ic_code_scanner_flash_on :
R.drawable.ic_code_scanner_flash_off);
}
private void performLayout(final int width, final int height) {
final Point previewSize = mPreviewSize;
if (previewSize == null) {
mPreviewView.layout(0, 0, width, height);
} else {
int frameLeft = 0;
int frameTop = 0;
int frameRight = width;
int frameBottom = height;
final int previewWidth = previewSize.getX();
if (previewWidth > width) {
final int d = (previewWidth - width) / 2;
frameLeft -= d;
frameRight += d;
}
final int previewHeight = previewSize.getY();
if (previewHeight > height) {
final int d = (previewHeight - height) / 2;
frameTop -= d;
frameBottom += d;
}
mPreviewView.layout(frameLeft, frameTop, frameRight, frameBottom);
}
mViewFinderView.layout(0, 0, width, height);
final int buttonSize = mButtonSize;
mAutoFocusButton.layout(0, 0, buttonSize, buttonSize);
mFlashButton.layout(width - buttonSize, 0, width, buttonSize);
}
interface SizeListener {
void onSizeChanged(int width, int height);
}
private final class AutoFocusClickListener implements OnClickListener {
@Override
public void onClick(final View view) {
final CodeScanner scanner = mCodeScanner;
if (scanner == null || !scanner.isAutoFocusSupportedOrUnknown()) {
return;
}
final boolean enabled = !scanner.isAutoFocusEnabled();
scanner.setAutoFocusEnabled(enabled);
setAutoFocusEnabled(enabled);
}
}
private final class FlashClickListener implements OnClickListener {
@Override
public void onClick(final View view) {
final CodeScanner scanner = mCodeScanner;
if (scanner == null || !scanner.isFlashSupportedOrUnknown()) {
return;
}
final boolean enabled = !scanner.isFlashEnabled();
scanner.setFlashEnabled(enabled);
setFlashEnabled(enabled);
}
}
}
|
class MappedFeatureCreatorId:
def __init__(self, dataset_id):
self.dataset_id = dataset_id
def load_or_create(self):
# Implement feature loading or creation logic based on the dataset_id
# Example: Load feature data from the dataset if it exists; otherwise, create the feature data
feature_data = None # Placeholder for feature data
# Logic to load or create the feature data based on the dataset_id
# ...
return feature_data |
from flask import Flask, request
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_marshmallow import Marshmallow
from .config import config_by_name
from flask.app import Flask
db = SQLAlchemy()
ma = Marshmallow()
flask_bcrypt = Bcrypt()
def create_app(config_name: str) -> Flask:
app = Flask(__name__)
app.config.from_object(config_by_name[config_name])
db.init_app(app)
flask_bcrypt.init_app(app)
# @app.before_request
# def before_request() -> None:
# """ Get server_name from http_host """
# http_host = request.environ.get('HTTP_HOST')
# app.config['SERVER_NAME'] = http_host
return app
|
<gh_stars>0
import React, { useState } from 'react';
import styled, { css, withTheme } from 'styled-components';
import { useDispatch } from 'react-redux';
import { View, Text } from 'react-native';
import { TextInput } from 'react-native-gesture-handler';
import LayoutWrapper from 'sharedUI/LayoutWrapper';
import Icon from 'sharedUI/Icon';
import RectButton from 'sharedUI/Button/RectButton';
import useKeyBoard from 'hooks/useKeyboard';
import { rgba } from 'utils';
import { EMAIL_ACCOUNT, KEY_PUZZLE_D, SCREENS } from 'configs';
import { unlockEmail, incrementChanges } from 'states/actions/gameActions';
import { resetEmailsNotifs } from 'states/actions/mergedDataActions';
const COMMON_SIZES = {
w: '64%',
h: '28px',
hButton: '40px',
};
const PASSWORD = <PASSWORD>_<PASSWORD>;
const EMAIL = EMAIL_ACCOUNT;
const LogoContainer = styled.View`
height: 28%;
background-color: ${({ theme }) => theme.colors.ghostWhite};
${({ theme }) => theme.styles.flex(null, null, null, true)};
`;
const ContentContainer = styled.View`
flex: 1;
top: ${({ keyboardShown }) => (keyboardShown ? 0 : -8)}%;
${({ theme }) => theme.styles.flex(null, null, null, true)};
`;
const StyledText = styled.Text`
color: ${({ theme }) => theme.colors.charcoal};
font-family: ${({ theme }) => theme.fonts.cairo.regular};
font-size: ${({ size }) => size}px;
letter-spacing: ${({ letterSpacing }) => letterSpacing || 0}px;
`;
const Input = styled.TextInput`
width: ${COMMON_SIZES.w};
height: ${COMMON_SIZES.h};
${({ theme }) => theme.styles.flexWithoutSize()};
background-color: ${({ theme }) => rgba(theme.colors.persianRedAlpha, 0.2)};
color: ${({ theme }) => theme.colors.charcoal};
${({ theme }) => theme.styles.os.input}
letter-spacing: 0.21px;
padding: 0 14px;
margin-bottom: 18px;
`;
const FailedText = styled.Text`
${({ theme }) => theme.styles.flex()};
${({ theme }) => theme.styles.os.inputItalic}
color: ${({ theme }) => theme.colors.persianRed};
letter-spacing: 0.21px;
margin-bottom: 16px;
`;
const SeparatorContainer = styled.View`
margin-top: 18px;
margin-bottom: 28px;
width: ${COMMON_SIZES.w};
${({ theme }) => theme.styles.flexWithoutSize()};
`;
const Separator = styled.View`
background-color: ${({ theme }) => theme.colors.charcoal};
height: 1px;
width: 100%;
margin-top: 14px;
`;
const EmailLoginScreen = ({ route, navigation, theme }) => {
const dispatch = useDispatch();
const { keyboardShown } = useKeyBoard();
const [emailInput, setEmailInput] = useState(EMAIL);
const [passwordInput, setPasswordInput] = useState('');
const [failed, setFailed] = useState(false);
const onSubmit = () => {
if (passwordInput !== PASSWORD || emailInput !== EMAIL) {
setFailed(true);
} else {
navigation.navigate(SCREENS.EMAIL);
unlockEmail(dispatch);
resetEmailsNotifs(dispatch);
incrementChanges(dispatch);
}
};
const rectButtonTextStyle = css`
font-family: ${theme.fonts.cairo.regular};
font-size: ${theme.typo.sizes.h3};
color: ${theme.colors.ghostWhite};
letter-spacing: 0.28px;
`;
return (
<LayoutWrapper screenName={route.name}>
<LogoContainer>
<Icon type="EMAIL_XL" />
</LogoContainer>
<ContentContainer keyboardShown={keyboardShown}>
<StyledText
size={19}
letterSpacing={0.38}
css={`
${css`
${theme.styles.flexWithoutSize()}
width: ${COMMON_SIZES.w};
margin-bottom: 32px;
`}
`}>
Connexion
</StyledText>
<Input value={emailInput} onChangeText={(text) => setEmailInput(text)} />
{failed && <FailedText>Email ou mot de passe incorrect.</FailedText>}
<Input
value={passwordInput}
secureTextEntry
blurOnSubmit
onSubmitEditing={onSubmit}
onChangeText={(text) => setPasswordInput(text)}
/>
<RectButton
width={COMMON_SIZES.w}
height={COMMON_SIZES.hButton}
text="connexion"
pressHandler={onSubmit}
additionalTextStyle={rectButtonTextStyle}
backgroundColor={theme.colors.persianRed}
/>
<SeparatorContainer>
<StyledText size={12} letterSpacing={0.23}>
Mot de passe oublié ?
</StyledText>
<Separator />
</SeparatorContainer>
<RectButton
width={COMMON_SIZES.w}
height={COMMON_SIZES.hButton}
text="se créer un compte"
pressHandler={onSubmit}
additionalTextStyle={rectButtonTextStyle}
backgroundColor={theme.colors.persianRed}
/>
</ContentContainer>
</LayoutWrapper>
);
};
export default withTheme(EmailLoginScreen);
|
<gh_stars>1-10
package malte0811.controlengineering.blockentity.base;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.state.BlockState;
import javax.annotation.Nullable;
public interface IHasMaster<T extends BlockEntity> extends IHasMasterBase {
@Nullable
T computeMasterBE(BlockState stateHere);
@Nullable
default T getOrComputeMasterBE(BlockState stateHere) {
if (getCachedMaster() != null) {
return (T) getCachedMaster();
} else {
return computeMasterBE(stateHere);
}
}
}
|
#!/bin/bash
# Copyright 2022 The BladeDISC Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script copies headers and libraries of cuDNN from /usr to /usr/local/cuda in
# nvidia:cuda-10.x images. This is needed because tensorflow assumes that.
device=$1
echo "Patching ${device} ..."
# Copy cudnn headers and lib for cuda
headers=$(ls /usr/include/*.h | grep -E 'cublas|cudnn|nvblas')
for hdr in ${headers[@]}; do
echo "Copy ${hdr} to cuda home."
cp ${hdr} /usr/local/cuda/include/
done
libs=$(ls /usr/lib/x86_64-linux-gnu/ | grep -E 'cublas|cudnn|nvblas' | grep -E '\.so|\.a')
for lib in ${libs[@]}; do
echo "Copy /usr/lib/x86_64-linux-gnu/${lib} to cuda home."
cp /usr/lib/x86_64-linux-gnu/${lib} /usr/local/cuda/lib64/
done
if [[ ${device} == "cu102" ]]; then
echo "Installing cuda 10.2 patches ..."
# patch 1
curl -sL https://developer.download.nvidia.com/compute/cuda/10.2/Prod/patches/1/cuda_10.2.1_linux.run -o cuda_10.2.1_linux.run
bash ./cuda_10.2.1_linux.run --silent --toolkit
rm -f ./cuda_10.2.1_linux.run
# patch 2
curl -sL https://developer.download.nvidia.com/compute/cuda/10.2/Prod/patches/2/cuda_10.2.2_linux.run -o cuda_10.2.2_linux.run
bash ./cuda_10.2.2_linux.run --silent --toolkit
rm -f ./cuda_10.2.2_linux.run
fi |
curl --include --request POST http://localhost:3000/foods \
--header "Content-Type: application/json" \
--data '{
"food" : {
"desc" : "new food",
"calories" : "1",
"grams_per_serving" : "1",
"fat_sat" : "1",
"fat_mono" : "1",
"fat_poly" : "1",
"carbs" : "1",
"sugar" : "1",
"protein" : "1",
"fiber" : "1",
"sodium" : "1",
"choles" : "1"
}
}'
|
#!/usr/bin/env bash
# persist.sh
# run container without making it a daemon - useful to see logging output
# we are adding a named volume for /data in the container so the
# counter persists between runs.
docker run \
--rm \
-p8086:80 \
--name="chapter2" \
-v `pwd`:/home/app \
-v name:/data \
chapter2
|
class ModelProcessor:
def __init__(self, input_model_path, output_results_path,
input_dataset_path=None, properties=None, **kwargs) -> None:
self.input_model_path = input_model_path
self.output_results_path = output_results_path
self.input_dataset_path = input_dataset_path
self.properties = properties or {}
# Call parent class constructor
super().__init__(self.properties) |
The best course of action for this query is to use an inner join in order to join the Employees table with the Departments table, based on the department_id of an employee. This will retrieve the employee's name, salary, and the department name associated with them. |
<filename>app/components/callout/callout.rb
# frozen_string_literal: true
module Callout
class Callout < ApplicationComponent; end
end
|
from pioneer.das.api.interpolators import nearest_interpolator
from pioneer.das.api.samples.annotations.box_3d import Box3d
from pioneer.das.api.samples.annotations.box_2d import Box2d
from pioneer.das.api.samples.annotations.poly_2d import Poly2d
from pioneer.das.api.samples.annotations.seg_2d import Seg2d
from pioneer.das.api.samples.annotations.seg_2d_image import Seg2dImage
from pioneer.das.api.samples.annotations.seg_3d import Seg3d
from pioneer.das.api.samples.annotations.lane import Lane
ANNOTATIONS_FACTORY = {
'box3d': (Box3d, nearest_interpolator),
'box2d': (Box2d, nearest_interpolator),
'poly2d': (Poly2d, nearest_interpolator),
'seg2d': (Seg2d, nearest_interpolator),
'seg2dimg': (Seg2dImage, nearest_interpolator),
'seg3d': (Seg3d, nearest_interpolator),
'lane': (Lane, nearest_interpolator),
} |
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the bin/rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Researcher.create(
name: "<NAME>",
email: "<EMAIL>",
address: "5000 Mission Oaks, #41, Austin, Texas 78735",
phone_number: "(512) 358-0014",
organization_name: "<NAME>'s Bat Conservation",
password: "<PASSWORD>",
access: "admin"
)
Researcher.create(
name: "Bat Conservation International",
email: "<EMAIL>",
address: "500 N Capital of TX Hwy. Bldg. 1Austin, TX 78746",
phone_number: "1.800.538.BATS",
organization_name: "Bat Conservation International",
password: "<PASSWORD>",
access: "admin"
)
Researcher.create(
name: "<NAME>",
email: "<EMAIL>",
address: "500 N Capital of TX Hwy. Bldg. 1Austin, TX 78746",
phone_number: "1.800.538.BATS",
organization_name: "Bat Conservation International",
password: "<PASSWORD>",
access: "member"
)
Bat.create(
tag_number: "PL24568",
nickname: "Fox",
species: "Pteropus Livingstonii",
date_found: "2020-07-23",
location: "Taolagnaro, Madagascar",
date_last_seen: "2020-10-31",
weight: 850,
age: 10,
sex: "F",
wing_span: 101,
colony_size: 98,
conservation_status: "Endangered",
white_nose_syndrome: false,
discoverer_id: 1
)
Bat.create(
tag_number: "EA78990",
nickname: "Ghost",
species: "Ectophylla Alba",
date_found: "2020-09-02",
location: "Choloma, Honduras",
date_last_seen: "2021-03-14",
weight: 5,
age: 7,
sex: "M",
wing_span: 7,
colony_size: 8,
conservation_status: "Near Threatened",
white_nose_syndrome: true,
discoverer_id: 3
)
Note.create(
content: "Middle aged female found with two pups. No signs of disease. Rearing a pup.",
researcher_id: 1,
bat_id: 1
)
Note.create(
content: "Male found roosting under a leaf with minor tear in wing membrane.",
researcher_id: 3,
bat_id: 2
)
|
#!/bin/bash
rm GeoLite2-Country.mmdb.gz
wget http://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.mmdb.gz
gunzip GeoLite2-Country.mmdb.gz
php script.php
|
package com.ibm.streamsx.objectstorage.writer;
import static com.ibm.streamsx.objectstorage.Utils.getParamSingleBoolValue;
import static com.ibm.streamsx.objectstorage.Utils.getParamSingleIntValue;
import static com.ibm.streamsx.objectstorage.Utils.getParamSingleStringValue;
import org.apache.hadoop.fs.Path;
import java.util.logging.Logger;
import org.apache.parquet.column.ParquetProperties.WriterVersion;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import com.ibm.streams.operator.OperatorContext;
import com.ibm.streams.operator.Tuple;
import com.ibm.streams.operator.Type.MetaType;
import com.ibm.streams.operator.logging.TraceLevel;
import com.ibm.streamsx.objectstorage.BaseObjectStorageSink;
import com.ibm.streamsx.objectstorage.IObjectStorageConstants;
import com.ibm.streamsx.objectstorage.client.IObjectStorageClient;
import com.ibm.streamsx.objectstorage.internal.sink.StorageFormat;
import com.ibm.streamsx.objectstorage.writer.parquet.ParquetOSWriter;
import com.ibm.streamsx.objectstorage.writer.parquet.ParquetWriterConfig;
import com.ibm.streamsx.objectstorage.writer.raw.RawAsyncWriter;
import com.ibm.streamsx.objectstorage.writer.raw.RawSyncWriter;
public class WriterFactory {
private static final String CLASS_NAME = WriterFactory.class.getName();
private static Logger TRACE = Logger.getLogger(CLASS_NAME);
private static WriterFactory instance = null;
public synchronized static WriterFactory getInstance() {
if (instance == null) {
instance = new WriterFactory();
}
return instance;
}
public IWriter getWriter(String path,
OperatorContext opContext,
int dataAttrIndex,
IObjectStorageClient storageClient,
StorageFormat fStorageFormat,
byte[] newLine) throws Exception {
IWriter res = null;
boolean isBlob = dataAttrIndex >=0 ? com.ibm.streamsx.objectstorage.Utils.getAttrMetaType(opContext, dataAttrIndex) == MetaType.BLOB : false;
switch (fStorageFormat) {
case raw:
if (TRACE.isLoggable(TraceLevel.TRACE)) {
TRACE.log(TraceLevel.TRACE, "Creating raw sync writer for object with path '" + path + "'");
}
//res = new RawAsyncWriter(path, opContext, storageClient, isBlob ? new byte[0] : newLine);
res = new RawSyncWriter(path, opContext, storageClient, isBlob ? new byte[0] : newLine);
break;
case parquet:
// container for default parquet options
ParquetWriterConfig defaultParquetWriterConfig = ParquetOSWriter.getDefaultPWConfig();
// initialize parquet related parameters (if exists) from the
// context
CompressionCodecName compressionType = CompressionCodecName
.valueOf(getParamSingleStringValue(opContext, IObjectStorageConstants.PARAM_PARQUET_COMPRESSION,
defaultParquetWriterConfig.getCompressionType().name()));
int blockSize = getParamSingleIntValue(opContext, IObjectStorageConstants.PARAM_PARQUET_BLOCK_SIZE,
defaultParquetWriterConfig.getBlockSize());
int pageSize = getParamSingleIntValue(opContext, IObjectStorageConstants.PARAM_PARQUET_PAGE_SIZE,
defaultParquetWriterConfig.getPageSize());
int dictPageSize = getParamSingleIntValue(opContext, IObjectStorageConstants.PARAM_PARQUET_DICT_PAGE_SIZE,
defaultParquetWriterConfig.getDictPageSize());
boolean enableDictionary = getParamSingleBoolValue(opContext,
IObjectStorageConstants.PARAM_PARQUET_ENABLE_DICT, defaultParquetWriterConfig.isEnableDictionary());
boolean enableSchemaValidation = getParamSingleBoolValue(opContext,
IObjectStorageConstants.PARAM_PARQUET_ENABLE_SCHEMA_VALIDATION,
defaultParquetWriterConfig.isEnableSchemaValidation());
WriterVersion parquetWriterVersion = WriterVersion.fromString(
getParamSingleStringValue(opContext, IObjectStorageConstants.PARAM_PARQUET_WRITER_VERSION,
defaultParquetWriterConfig.getParquetWriterVersion().name()));
ParquetWriterConfig parquetWriterConfig = new ParquetWriterConfig(compressionType, blockSize, pageSize,
dictPageSize, enableDictionary, enableSchemaValidation, parquetWriterVersion);
if (TRACE.isLoggable(TraceLevel.TRACE)) {
TRACE.log(TraceLevel.TRACE,
"Creating parquet writer for object with parent path '"
+ storageClient.getObjectStorageURI()
+ "' and child path '" + path + "'");
}
res = new ParquetOSWriter(
new Path(storageClient.getObjectStorageURI() + path),
opContext,
storageClient.getConnectionConfiguration(), parquetWriterConfig);
break;
default:
if (TRACE.isLoggable(TraceLevel.TRACE)) {
TRACE.log(TraceLevel.TRACE, "Creating raw async writer for object with path '" + path + "'");
}
res = new RawAsyncWriter(path,
opContext,
storageClient,
isBlob ? new byte[0] : newLine);
break;
}
return res;
}
}
|
package com.infamous.zod.media.streaming.endpoint.impl;
import com.infamous.zod.base.rest.RestEndPoint;
import com.infamous.zod.media.streaming.controller.MediaStreamingController;
import com.infamous.zod.media.streaming.endpoint.MediaStreamingEndPointV1;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
@Path("/view/v1")
public class MediaStreamingEndPointV1Impl implements MediaStreamingEndPointV1 {
private final MediaStreamingController m_controller;
public MediaStreamingEndPointV1Impl(MediaStreamingController controller) {
m_controller = controller;
}
@Path("/{fileId}")
@GET
@Produces("audio/mp3")
@RestEndPoint
public Response view(@PathParam("fileId") String fileId, @HeaderParam("Range") String range) {
String newFileId = URLDecoder.decode(fileId, StandardCharsets.UTF_8);
return m_controller.view(newFileId, range);
}
} |
import pkg from '../package.json';
export default {
deploy: {
ghPages: {
/* none */
}
},
template: {
version: pkg.version,
title: 'FANTOM CAT',
webcomponents: false
}
};
|
var dataSexism = [
{id: 'sexism', x: 1999, and:77, overall:77},
{id: 'sexism', x: 2000, and:109, overall:109},
{id: 'sexism', x: 2001, and:120, overall:120},
{id: 'sexism', x: 2002, and:114, overall:114},
{id: 'sexism', x: 2003, and:119, overall:119},
{id: 'sexism', x: 2004, and:159, overall:159},
{id: 'sexism', x: 2005, and:131, overall:131},
{id: 'sexism', x: 2006, and:170, overall:170},
{id: 'sexism', x: 2007, and:196, overall:196},
{id: 'sexism', x: 2008, and:310, overall:310},
{id: 'sexism', x: 2009, and:222, overall:222},
{id: 'sexism', x: 2010, and:251, overall:251},
{id: 'sexism', x: 2011, and:395, overall:395},
{id: 'sexism', x: 2012, and:436, overall:436},
{id: 'sexism', x: 2013, and:677, overall:677},
{id: 'sexism', x: 2014, and:863, overall:863},
{id: 'sexism', x: 2015, and:993, overall:993},
{id: 'sexism', x: 2016, and:1187, overall:1187},
{id: 'sexism', x: 2017, and:903, overall:903},
];
|
const db = require('../data')
const { AP } = require('../ledgers')
const mapAccountCodes = async (paymentRequest) => {
for (const invoiceLine of paymentRequest.invoiceLines) {
const accountCode = await db.accountCode.findOne({
include: [{
model: db.schemeCode,
as: 'schemeCode'
}],
where: {
'$schemeCode.schemeCode$': invoiceLine.schemeCode,
lineDescription: invoiceLine.description
}
})
invoiceLine.accountCode = paymentRequest.ledger === AP ? accountCode.accountCodeAP : accountCode.accountCodeAR
}
}
module.exports = mapAccountCodes
|
<reponame>DeIaube/YiXing<filename>lib_base/src/main/java/baselib/base2/IRepository.java
package baselib.base2;
public interface IRepository {
}
|
<filename>addon/extensions/route.js
import Ember from 'ember';
import {
DS_ROUTE_ACTIVATED,
DS_ROUTE_DEACTIVATED,
DS_ROUTE_PARAMS_LOADED
} from '../constants/actions';
export default {
redux: Ember.inject.service('redux'),
model(params, transition) {
const redux = this.get('redux');
redux.dispatch({
type: DS_ROUTE_PARAMS_LOADED,
routeName: this.routeName,
params
});
return this._super(params, transition);
},
activate() {
this._super();
const redux = this.get('redux');
redux.dispatch({
type: DS_ROUTE_ACTIVATED,
routeName: this.routeName
});
},
deactivate() {
this._super();
const redux = this.get('redux');
redux.dispatch({
type: DS_ROUTE_DEACTIVATED,
routeName: this.routeName
});
}
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.