repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
heatd/edk2-platforms
|
Silicon/Intel/TigerlakeSiliconPkg/Include/Library/SataLib.h
|
/** @file
Header file for SataLib.
Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
#ifndef _SATA_LIB_H_
#define _SATA_LIB_H_
#define SATA_1_CONTROLLER_INDEX 0
#define SATA_2_CONTROLLER_INDEX 1
#define SATA_3_CONTROLLER_INDEX 2
/**
Get Maximum Sata Port Number
@param[in] SataCtrlIndex SATA controller index
@retval Maximum Sata Port Number
**/
UINT8
MaxSataPortNum (
IN UINT32 SataCtrlIndex
);
/**
Gets Maximum Sata Controller Number
@retval Maximum Sata Controller Number
**/
UINT8
MaxSataControllerNum (
VOID
);
/**
Get SATA controller's Port Present Status
@param[in] SataCtrlIndex SATA controller index
@retval Port Present Status
**/
UINT8
GetSataPortPresentStatus (
IN UINT32 SataCtrlIndex
);
/**
Get SATA controller Function Disable Status
@param[in] SataCtrlIndex SATA controller index
@retval 0 SATA Controller is not Function Disabled
@retval 1 SATA Controller is Function Disabled
**/
BOOLEAN
SataControllerFunctionDisableStatus (
IN UINT32 SataCtrlIndex
);
/**
Get SATA controller ABAR size
@param[in] SataCtrlIndex SATA controller index
@retval SATA controller ABAR size
**/
UINT32
GetSataAbarSize (
IN UINT32 SataCtrlIndex
);
/**
Get SATA controller AHCI base address
@param[in] SataCtrlIndex SATA controller index
@retval SATA controller AHCI base address
**/
UINT32
GetSataAhciBase (
IN UINT32 SataCtrlIndex
);
/**
Check if SATA controller supports RST remapping
@param[in] SataCtrlIndex SATA controller index
@retval TRUE Controller supports remapping
@retval FALSE Controller does not support remapping
**/
BOOLEAN
IsRemappingSupportedOnSata (
IN UINT32 SataCtrlIndex
);
/**
Checks if SoC supports the SATA PGD power down on given
SATA controller.
@param[in] SataCtrlIndex SATA controller index
@retval TRUE SATA PGD power down supported
@retval FALSE SATA PGD power down not supported
**/
BOOLEAN
IsSataPowerGatingSupported (
IN UINT32 SataCtrlIndex
);
#endif // _SATA_LIB_H_
|
pengzhang/hm-framework
|
app/controllers/wechat/model/WechatRedpack.java
|
package controllers.wechat.model;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name="xml")
public class WechatRedpack implements Serializable{
/**
* 随机字符串,不长于32位。
微信支付API接口协议中包含字段nonce_str,主要保证签名不可预测。我们推荐生成随机数算法如下:调用随机数函数生成,将得到的值转换为字符串。
---必须参数
*/
private String nonce_str;
// 订单号
private String mch_billno;
private String mch_id;
private String wxappid;
private String nick_name;
private String send_name;
private String re_openid;
private int total_amount;
private String min_value;
private String max_value;
private int total_num;
private String wishing;
private String client_ip;
private String act_name;
private String remark;
private String sign;
@XmlElement(name="sign")
public String getSign() {
return sign;
}
public void setSign(String sign) {
this.sign = sign;
}
@XmlElement(name="nonce_str")
public String getNonce_str() {
return nonce_str;
}
public void setNonce_str(String nonce_str) {
this.nonce_str = nonce_str;
}
@XmlElement(name="mch_billno")
public String getMch_billno() {
return mch_billno;
}
public void setMch_billno(String mch_billno) {
this.mch_billno = mch_billno;
}
@XmlElement(name="mch_id")
public String getMch_id() {
return mch_id;
}
public void setMch_id(String mch_id) {
this.mch_id = mch_id;
}
@XmlElement(name="wxappid")
public String getWxappid() {
return wxappid;
}
public void setWxappid(String wxappid) {
this.wxappid = wxappid;
}
@XmlElement(name="nick_name")
public String getNick_name() {
return nick_name;
}
public void setNick_name(String nick_name) {
this.nick_name = nick_name;
}
@XmlElement(name="send_name")
public String getSend_name() {
return send_name;
}
public void setSend_name(String send_name) {
this.send_name = send_name;
}
@XmlElement(name="re_openid")
public String getRe_openid() {
return re_openid;
}
public void setRe_openid(String re_openid) {
this.re_openid = re_openid;
}
@XmlElement(name="total_amount")
public int getTotal_amount() {
return total_amount;
}
public void setTotal_amount(int total_amount) {
this.total_amount = total_amount;
}
@XmlElement(name="min_value")
public String getMin_value() {
return min_value;
}
public void setMin_value(String min_value) {
this.min_value = min_value;
}
@XmlElement(name="max_value")
public String getMax_value() {
return max_value;
}
public void setMax_value(String max_value) {
this.max_value = max_value;
}
@XmlElement(name="total_num")
public int getTotal_num() {
return total_num;
}
public void setTotal_num(int i) {
this.total_num = i;
}
@XmlElement(name="wishing")
public String getWishing() {
return wishing;
}
public void setWishing(String wishing) {
this.wishing = wishing;
}
@XmlElement(name="client_ip")
public String getClient_ip() {
return client_ip;
}
public void setClient_ip(String client_ip) {
this.client_ip = client_ip;
}
@XmlElement(name="act_name")
public String getAct_name() {
return act_name;
}
public void setAct_name(String act_name) {
this.act_name = act_name;
}
@XmlElement(name="remark")
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
}
|
franklin18ru/Indigo
|
aboutUs/forms.py
|
<reponame>franklin18ru/Indigo
from django import forms
class ContactForm(forms.Form):
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control validate', 'id': 'orangeForm-pass',
'placeholder': 'Vinsamlega skráið nafn hér.'}),
max_length=100, required=True)
email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'form-control validate', 'id': 'orangeForm-pass',
'placeholder': 'Vinsamlega skráið eigin netfang hér.'}), required=True)
message = forms.CharField(widget=forms.Textarea(attrs={'class': 'form-control validate', 'id': 'orangeForm-pass',
'placeholder': 'Vinsamlega skráið fyrirspurn hér.'}), required=True)
|
RohitAthithya/learntosolveit
|
languages/python/software_engineering_exceptions_testing.py
|
from urllib.error import URLError, HTTPError
from io import StringIO
print(isinstance(URLError("foo"), HTTPError))
print(isinstance(HTTPError("foo", "bar", "baz", "zap", StringIO()), URLError))
try:
raise HTTPError("foo", "bar", "baz", "zap", StringIO())
except URLError:
print("caught this exception")
else:
print("this exception escaped.")
|
wateret/ONE_private
|
compiler/moco/service/src/Service/TFTypeInferenceRule.cpp
|
<gh_stars>0
/*
* Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "moco/Service/TFTypeInferenceRule.h"
#include "moco/IR/TFDialect.h"
#include "moco/IR/TFNodeVisitor.h"
#include "moco/IR/TFNodes.h"
#include "moco/IR/TFNodeImpl.h"
#include <cassert>
namespace
{
using namespace moco;
struct TypeForwardAlgorithm final : public moco::TFNodeVisitor<loco::DataType>
{
loco::DataType visit(const TFAdd *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFAvgPool *node) { return dtype_get(node->value()); }
loco::DataType visit(const TFBiasAdd *node) { return dtype_get(node->value()); }
loco::DataType visit(const TFConcatV2 *node) { return dtype_get(node->values(0)); }
loco::DataType visit(const TFConst *node) { return node->dtype(); }
loco::DataType visit(const TFConv2D *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFConv2DBackpropInput *node)
{
return dtype_get(node->out_backprop());
}
loco::DataType visit(const TFDepthwiseConv2dNative *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFFakeQuantWithMinMaxVars *node) { return dtype_get(node->inputs()); }
loco::DataType visit(const TFFusedBatchNorm *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFIdentity *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFMaximum *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFMaxPool *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFMean *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFMul *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFPack *node) { return dtype_get(node->values(0)); }
loco::DataType visit(const TFPad *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFPlaceholder *node) { return node->dtype(); }
loco::DataType visit(const TFRealDiv *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFRelu *node) { return dtype_get(node->features()); }
loco::DataType visit(const TFRelu6 *node) { return dtype_get(node->features()); }
loco::DataType visit(const TFReshape *node) { return dtype_get(node->tensor()); }
loco::DataType visit(const TFRsqrt *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFShape *node) { return node->dtype(); }
loco::DataType visit(const TFSoftmax *node) { return dtype_get(node->logits()); }
loco::DataType visit(const TFSqrt *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFSquaredDifference *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFSqueeze *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFStopGradient *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFStridedSlice *node) { return dtype_get(node->input()); }
loco::DataType visit(const TFSub *node) { return dtype_get(node->x()); }
loco::DataType visit(const TFTanh *node) { return dtype_get(node->x()); }
// For virtual nodes
loco::DataType visit(const TFPush *node) { return dtype_get(node->from()); }
};
} // namespace
namespace moco
{
bool TFTypeInferenceRule::recognize(const loco::Dialect *d) const
{
// This rule recognizes only "TFDialect" dialect!
return TFDialect::get() == d;
}
bool TFTypeInferenceRule::infer(const loco::Node *node, loco::DataType &dtype) const
{
assert(node->dialect() == TFDialect::get());
TypeForwardAlgorithm alg;
// clang-format off
#define TENSORFLOW_NODE(OPCODE,CLASS) \
if (dynamic_cast<const moco::CLASS *>(node)) \
{ \
auto tfnode = dynamic_cast<const moco::CLASS *>(node); \
dtype = tfnode->accept(&alg); \
assert(dtype != loco::DataType::Unknown); \
return true; \
}
#include "moco/IR/TFNodes.lst"
#undef TENSORFLOW_NODE
// clang-format on
return false;
}
} // namespace moco
|
npocmaka/Windows-Server-2003
|
windows/advcore/gdiplus/test/gpcfm/inc/environ.hxx
|
<filename>windows/advcore/gdiplus/test/gpcfm/inc/environ.hxx
#ifndef _ENVIRON_HXX
#define _ENVIRON_HXX
#include <windows.h>
#include <debug.hxx>
#include <report.hxx>
#include <common.hxx>
#include <ntlog.h>
class Environment
{
public:
Environment(VOID)
{
// Determine if MMX is available. Note, Win95 does not support
// IsProcessorFeaturePresent(PF_MMX_INSTRUCTIONS_AVAILABLE)
__try
{
_asm emms
bIsMmx = TRUE;
}
__except(EXCEPTION_EXECUTE_HANDLER)
{
bIsMmx = FALSE;
}
// Get platform specific information
osVersion.dwOSVersionInfoSize = sizeof(osVersion);
GetVersionEx(&osVersion);
};
~Environment(VOID)
{
};
VOID vStatusReport(Report *rpt)
{
TCHAR strVersion[MAX_STRING];
if (bIsWinNT())
{
strcpy(strVersion, TEXT("NT"));
}
else if (bIsWin9x())
{
if (osVersion.dwMinorVersion > 0)
{
strcpy(strVersion, TEXT("98"));
}
else if (osVersion.dwMinorVersion == 0)
{
strcpy(strVersion, TEXT("95"));
}
}
else if (bIsWin32s())
{
strcpy(strVersion, TEXT("32s"));
}
else
{
strcpy(strVersion, TEXT("3.x"));
}
rpt->vLog(TLS_LOG, "OS Version: Win %s", strVersion);
rpt->vLog(TLS_LOG, "OS Minor Version: %d",
osVersion.dwMinorVersion);
rpt->vLog(TLS_LOG, "MMX: %s\n", ((bIsMmx) ? "Yes" : "No"));
};
VOID vGetDCInfo(HDC hdc)
{
// retrieving the dc specific info like number of rgb bits.
// if we are on winnt or win98, use DescribePixelFormat
// actually, if we can load opengl32...
if (!bIsWinNT())
{
hInstance = LoadLibrary("OPENGL32.DLL");
}
//
// hmm, win3.x may not support describepixelformat!
//
if (DescribePixelFormat(hdc, 1, sizeof(PIXELFORMATDESCRIPTOR), &pfd))
{
iRedBits = pfd.cRedBits;
iGrnBits = pfd.cGreenBits;
iBluBits = pfd.cBlueBits;
iAxpBits = pfd.cAlphaBits;
}
else
{
// use the long method
}
if (hInstance != NULL)
{
FreeLibrary(hInstance);
}
};
BOOL bIsWinNT(VOID)
{
return (osVersion.dwPlatformId == VER_PLATFORM_WIN32_NT);
};
BOOL bIsWin9x(VOID)
{
return (osVersion.dwPlatformId == VER_PLATFORM_WIN32_WINDOWS);
};
BOOL bIsWin98(VOID)
{
return (bIsWin9x() && (osVersion.dwMinorVersion > 0));
};
BOOL bIsWin95(VOID)
{
return (bIsWin9x() && (osVersion.dwMinorVersion == 0));
};
BOOL bIsWin32s(VOID)
{
return (osVersion.dwPlatformId == VER_PLATFORM_WIN32s);
};
public:
INT iRedBits;
INT iGrnBits;
INT iBluBits;
INT iAxpBits;
private:
PIXELFORMATDESCRIPTOR pfd;
BOOL bIsMmx;
OSVERSIONINFO osVersion;
HINSTANCE hInstance;
};
#endif
|
bunniescc/PocketSnacks
|
src/main/java/cc/bunnies/pocketsnacks/dao/FlavorDao.java
|
package cc.bunnies.pocketsnacks.dao;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface FlavorDao {
}
|
seasonl2014/xueden-cloud-edu
|
moyu-common/moyu-common-core/src/main/java/cn/xueden/common/core/edu/domain/EduEnvironmenParam.java
|
package cn.xueden.common.core.edu.domain;
import cn.xueden.common.core.web.domain.DataEntity;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**功能描述:开发环境实体类
* @Auther:梁志杰
* @Date:2021/2/11
* @Description:cn.xueden.common.core.edu.domain
* @version:1.0
*/
@Data
@TableName("edu_environmen_param")
public class EduEnvironmenParam extends DataEntity<EduEnvironmenParam> {
@ApiModelProperty(value = "课程ID")
private Long courseId;
@ApiModelProperty(value = "参数名称")
private String name;
@ApiModelProperty(value = "参数值")
private String value;
}
|
wiltonlazary/snappydata
|
core/src/test/scala/org/apache/spark/sql/streaming/SnappyStoreSinkProviderSuite.scala
|
<filename>core/src/test/scala/org/apache/spark/sql/streaming/SnappyStoreSinkProviderSuite.scala
/*
* Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
import java.sql.SQLException
import java.util.concurrent.atomic.AtomicInteger
import scala.reflect.io.Path
import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState.SNAPPY_CATALOG_SCHEMA_VERSION_MISMATCH
import io.snappydata.SnappyFunSuite
import org.junit.Assert
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, RowEncoder}
import org.apache.spark.sql.execution.CatalogStaleException
import org.apache.spark.sql.kafka010.KafkaTestUtils
import org.apache.spark.sql.streaming.SnappyStoreSinkProvider.{ATTEMPTS, TEST_FAILBATCH_OPTION}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{AnalysisException, Dataset, Row, SnappyContext, SnappySession}
class SnappyStoreSinkProviderSuite extends SnappyFunSuite
with BeforeAndAfter with BeforeAndAfterAll {
private val session = snc.sparkSession
import session.implicits._
private var kafkaTestUtils: KafkaTestUtils = _
private val testIdGenerator = new AtomicInteger(0)
private val tableName = "APP.USERS"
private val checkpointDirectory = "/tmp/SnappyStoreSinkProviderSuite"
private def getTopic(id: Int) = s"topic-$id"
override def beforeAll() {
super.beforeAll()
kafkaTestUtils = new KafkaTestUtils
kafkaTestUtils.setup()
}
override def afterAll() {
super.afterAll()
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown()
kafkaTestUtils = null
}
}
after {
baseCleanup(false)
// CAUTION!! - recursively deleting checkpoint directory. handle with care.
Path(checkpointDirectory).deleteRecursively()
}
test("_eventType column: absent, key columns: defined, table type: column") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val dataBatch1 = Seq(Seq(1, "name1", 30, "lname1"), Seq(2, "name2", 10, "lname2"),
Seq(3, "name3", 30, "lname3"))
kafkaTestUtils.sendMessages(topic, dataBatch1.map(r => r.mkString(",")).toArray)
val streamingQuery = createAndStartStreamingQuery(topic, testId, withEventTypeColumn = false)
waitTillTheBatchIsPickedForProcessing(0, testId)
val dataBatch2 = Seq(Seq(1, "name11", 40, "lname1"), Seq(4, "name4", 50, "lname4"))
kafkaTestUtils.sendMessages(topic, dataBatch2.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
val rows = Array(Row(1, "name11", 40, "lname1"), Row(2, "name2", 10, "lname2"),
Row(3, "name3", 30, "lname3"), Row(4, "name4", 50, "lname4"))
assertData(rows)
}
test("_eventType column: absent, key columns: undefined, table type: column") {
val testId = testIdGenerator.getAndIncrement()
createTable(withKeyColumn = false)()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val streamingQuery = createAndStartStreamingQuery(topic, testId, withEventTypeColumn = false)
val dataBatch = Seq(Seq(1, "name1", 30, "lname1"), Seq(2, "name2", 10, "lname2"),
Seq(3, "name3", 30, "lname3"), Seq(1, "name1", 30, "lname1"))
kafkaTestUtils.sendMessages(topic, dataBatch.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
val rows = Array(Row(1, "name1", 30, "lname1"), Row(1, "name1", 30, "lname1"),
Row(2, "name2", 10, "lname2"), Row(3, "name3", 30, "lname3"))
assertData(rows)
}
test("_eventType column: present, key columns: defined, table type: column") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val dataBatch1 = Seq(Seq(1, "name1", 20, "lname1", 0), Seq(2, "name2", 10, "lname2", 0))
kafkaTestUtils.sendMessages(topic, dataBatch1.map(r => r.mkString(",")).toArray)
val streamingQuery: StreamingQuery = createAndStartStreamingQuery(topic, testId)
waitTillTheBatchIsPickedForProcessing(0, testId)
val dataBatch2 = Seq(Seq(1, "name11", 30, "lname1", 1), Seq(2, "name2", 13, "lname2", 2),
Seq(3, "name3", 30, "lname3", 0), Seq(4, "name4", 10, "lname4", 2))
kafkaTestUtils.sendMessages(topic, dataBatch2.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
assertData(Array(Row(1, "name11", 30, "lname1"), Row(3, "name3", 30, "lname3")))
}
test("_eventType column: present, key columns: undefined, table type: column") {
val testId = testIdGenerator.getAndIncrement()
createTable(withKeyColumn = false)()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val dataBatch = Seq(Seq(1, "name1", 20, "lname1", 0), Seq(2, "name2", 10, "lname2", 0))
kafkaTestUtils.sendMessages(topic, dataBatch.map(r => r.mkString(",")).toArray)
val thrown = intercept[StreamingQueryException] {
val streamingQuery = createAndStartStreamingQuery(topic, testId)
streamingQuery.processAllAvailable()
streamingQuery.stop()
}
val errorMessage = "_eventType is present in data but key columns are not defined on table."
assert(thrown.getCause.getMessage == errorMessage)
}
test("_eventType column: absent, key columns: defined, table type: row") {
val testId = testIdGenerator.getAndIncrement()
createTable()(isRowTable = true)
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val dataBatch1 = Seq(Seq(1, "name1", 30, "lname1"), Seq(2, "name2", 10, "lname2"),
Seq(3, "name3", 30, "lname3"))
kafkaTestUtils.sendMessages(topic, dataBatch1.map(r => r.mkString(",")).toArray)
val streamingQuery = createAndStartStreamingQuery(topic, testId, withEventTypeColumn = false)
waitTillTheBatchIsPickedForProcessing(0, testId)
val dataBatch2 = Seq(Seq(1, "name11", 40, "lname1"), Seq(4, "name4", 50, "lname4"))
kafkaTestUtils.sendMessages(topic, dataBatch2.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
val rows = Array(Row(1, "name11", 40, "lname1"), Row(2, "name2", 10, "lname2"),
Row(3, "name3", 30, "lname3"), Row(4, "name4", 50, "lname4"))
assertData(rows)
}
test("_eventType column: absent, key columns: undefined, table type: row") {
val testId = testIdGenerator.getAndIncrement()
createTable(withKeyColumn = false)(isRowTable = true)
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val streamingQuery = createAndStartStreamingQuery(topic, testId, withEventTypeColumn = false)
val dataBatch = Seq(Seq(1, "name1", 30, "lname1"), Seq(2, "name2", 10, "lname2"),
Seq(3, "name3", 30, "lname3"), Seq(1, "name1", 30, "lname1"))
kafkaTestUtils.sendMessages(topic, dataBatch.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
val rows = Array(Row(1, "name1", 30, "lname1"), Row(1, "name1", 30, "lname1"),
Row(2, "name2", 10, "lname2"), Row(3, "name3", 30, "lname3"))
assertData(rows)
}
test("_eventType column: present, key columns: defined, table type: row") {
val testId = testIdGenerator.getAndIncrement()
createTable()(isRowTable = true)
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val dataBatch1 = Seq(Seq(1, "name1", 20, "lname1", 0), Seq(2, "name2", 10, "lname2", 0))
kafkaTestUtils.sendMessages(topic, dataBatch1.map(r => r.mkString(",")).toArray)
val streamingQuery: StreamingQuery = createAndStartStreamingQuery(topic, testId)
waitTillTheBatchIsPickedForProcessing(0, testId)
val dataBatch2 = Seq(Seq(1, "name11", 30, "lname1", 1), Seq(2, "name2", 13, "lname2", 2),
Seq(3, "name3", 30, "lname3", 0), Seq(4, "name4", 10, "lname4", 2))
kafkaTestUtils.sendMessages(topic, dataBatch2.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
assertData(Array(Row(1, "name11", 30, "lname1"), Row(3, "name3", 30, "lname3")))
}
test("_eventType column: present, key columns: undefined, table type: row") {
val testId = testIdGenerator.getAndIncrement()
createTable(withKeyColumn = false)(isRowTable = true)
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
val dataBatch = Seq(Seq(1, "name1", 20, "lname1", 0), Seq(2, "name2", 10, "lname2", 0))
kafkaTestUtils.sendMessages(topic, dataBatch.map(r => r.mkString(",")).toArray)
val streamingQuery = createAndStartStreamingQuery(topic, testId)
val thrown = intercept[StreamingQueryException] {
streamingQuery.processAllAvailable()
}
val errorMessage = "_eventType is present in data but key columns are not defined on table."
assert(thrown.getCause.getMessage == errorMessage)
streamingQuery.stop()
}
test("idempotency") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 3)
kafkaTestUtils.sendMessages(topic, (0 to 10).map(i => s"$i,name$i,$i,lname$i,0").toArray)
val streamingQuery: StreamingQuery = createAndStartStreamingQuery(topic, testId)
waitTillTheBatchIsPickedForProcessing(0, testId)
streamingQuery.stop()
val streamingQuery1: StreamingQuery = createAndStartStreamingQuery(topic, testId
, options = Map(TEST_FAILBATCH_OPTION -> "true"))
kafkaTestUtils.sendMessages(topic, (11 to 20).map(i => s"$i,name$i,$i,lname$i,0").toArray)
try {
streamingQuery1.processAllAvailable()
fail("StreamingQueryException expected.")
} catch {
case ex: StreamingQueryException if ex.cause.getMessage == "dummy failure for test" =>
streamingQuery1.stop()
}
val streamingQuery2: StreamingQuery = createAndStartStreamingQuery(topic, testId)
kafkaTestUtils.sendMessages(topic, (21 to 30).map(i => s"$i,name$i,$i,lname$i,0").toArray)
waitTillTheBatchIsPickedForProcessing(1, testId)
streamingQuery2.processAllAvailable()
streamingQuery2.stop()
assertData((0 to 30).map(i => Row(i, s"name$i", i, s"lname$i")).toArray)
}
test("conflation enabled") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 1)
// producing all records with same key `1` on partition 0.
kafkaTestUtils.sendMessages(topic, (0 to 999)
.map(i => s"1,name$i,$i,lname1,${i % 3}").toArray, Some(0))
// producing records with keh `1` on multiple partitions. This may not lead to expected result
// kafkaTestUtils.sendMessages(topic, (0 to 999).map(i => s"1,name$i,$i,${i%3}").toArray)
val streamingQuery = createAndStartStreamingQuery(topic, testId,
options = Map("conflation" -> "true"))
streamingQuery.processAllAvailable()
streamingQuery.stop()
assertData(Array(Row(1, "name999", 999, "lname1")))
}
test("conflation enabled, _eventType column: absent") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 1)
val batch2 = Seq(Seq(1, "name2", 30, "lname1"), Seq(1, "name3", 30, "lname1"))
kafkaTestUtils.sendMessages(topic, batch2.map(r => r.mkString(",")).toArray)
val streamingQuery = createAndStartStreamingQuery(topic, testId,
withEventTypeColumn = false, options = Map("conflation" -> "true"))
streamingQuery.processAllAvailable()
streamingQuery.stop()
assertData(Array(Row(1, "name3", 30, "lname1")))
}
test("conflation enabled, key columns : undefined") {
val testId = testIdGenerator.getAndIncrement()
createTable(withKeyColumn = false)()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 1)
val batch2 = Seq(Seq(1, "name2", 30, "lname1"), Seq(1, "name3", 30, "lname1"))
kafkaTestUtils.sendMessages(topic, batch2.map(r => r.mkString(",")).toArray)
val thrown = intercept[StreamingQueryException] {
val streamingQuery = createAndStartStreamingQuery(topic, testId,
withEventTypeColumn = false, options = Map("conflation" -> "true"))
streamingQuery.processAllAvailable()
streamingQuery.stop()
}
val errorMessage = "Key column(s) or primary key must be defined on table in order " +
"to perform conflation."
assert(thrown.getCause.isInstanceOf[IllegalStateException])
assert(thrown.getCause.getMessage == errorMessage)
}
test("[SNAP-2745]-conflation: delete,insert") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 1)
val batch1 = Seq(Seq(1, "name1", 30, "lname1", 0))
kafkaTestUtils.sendMessages(topic, batch1.map(r => r.mkString(",")).toArray)
val streamingQuery = createAndStartStreamingQuery(topic, testId,
options = Map("conflation" -> "true"))
waitTillTheBatchIsPickedForProcessing(0, testId)
val batch2 = Seq(Seq(1, "name1", 30, "lname1", 2), Seq(1, "name1", 30, "lname1", 0))
kafkaTestUtils.sendMessages(topic, batch2.map(r => r.mkString(",")).toArray)
streamingQuery.processAllAvailable()
streamingQuery.stop()
assertData(Array(Row(1, "name1", 30, "lname1")))
}
test("conflation disabled") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
kafkaTestUtils.createTopic(topic, partitions = 1)
val dataBatch = Seq(Seq(1, "name1", 1, "lname1", 0), Seq(1, "name1", 1, "lname1", 2))
kafkaTestUtils.sendMessages(topic, dataBatch.map(r => r.mkString(",")).toArray, Some(0))
val streamingQuery: StreamingQuery = createAndStartStreamingQuery(topic, testId)
streamingQuery.processAllAvailable()
streamingQuery.stop()
// The delete will be processed prior to insert event irrespective of their order or arrival.
// Hence when conflation is disabled, both the events are processed resulting into one record.
assertData(Array(Row(1, "name1", 1, "lname1")))
}
test("queryName not specified") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
val streamingQuery = createAndStartStreamingQuery(topic, testId, withQueryName = false)
try {
streamingQuery.processAllAvailable()
fail("StreamingQueryException expected.")
} catch {
case x: StreamingQueryException =>
val expectedMessage = s"queryName must be specified for ${SnappyContext.SNAPPY_SINK_NAME}."
assert(x.getCause.isInstanceOf[IllegalStateException])
assert(x.getCause.getMessage.equals(expectedMessage))
}
}
test("Streaming query fails after attempts exhausted") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
val streamingQuery = createAndStartStreamingQuery(topic, testId,
options = Map("withQueryName" -> "false",
"sinkCallback" -> "org.apache.spark.sql.streaming.TestSinkCallback",
ATTEMPTS -> "3", "attempts" -> "4"))
try {
streamingQuery.processAllAvailable()
fail("StreamingQueryException expected.")
} catch {
case x: StreamingQueryException =>
assert(x.getCause.getCause.isInstanceOf[SQLException] ||
x.getCause.getCause.isInstanceOf[CatalogStaleException])
}
}
test("Streaming query passes is attempts are not exhausted") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
val streamingQuery = createAndStartStreamingQuery(topic, testId,
options = Map("withQueryName" -> "false",
"sinkCallback" -> "org.apache.spark.sql.streaming.TestSinkCallback",
ATTEMPTS -> "3", "attempts" -> "3"))
streamingQuery.processAllAvailable()
streamingQuery.stop()
}
test("Streaming query fails on the first attempt itself when failure is not due" +
" to stale catalog") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
val streamingQuery = createAndStartStreamingQuery(topic, testId,
options = Map("withQueryName" -> "false",
"sinkCallback" -> "org.apache.spark.sql.streaming.TestSinkCallback",
"attempts" -> "1", "catalogNotStale" -> ""))
try {
streamingQuery.processAllAvailable()
fail("StreamingQueryException expected.")
} catch {
case x: StreamingQueryException =>
assert(x.getCause.isInstanceOf[RuntimeException]
&& x.getCause.getMessage.equals("catalogNotStale"))
}
}
test("At max only one streaming query with snappy sink allowed to run in single session") {
val testId = testIdGenerator.getAndIncrement()
createTable()()
val topic = getTopic(testId)
val memorySinkQuery = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("subscribe", topic)
.option("startingOffsets", "earliest")
.load().writeStream
.queryName("memorysink")
.option("checkpointLocation", checkpointDirectory + "/memorysink")
.format("memory")
.start()
try {
// This query should start successfully as earlier started query is using memory sink
val streamingQuery = createAndStartStreamingQuery(topic, testId * 100)
try {
val streamingQuery2 = createAndStartStreamingQuery(topic, testId * 200)
fail("StreamingQueryException expected.")
} catch {
case x: AnalysisException =>
val expectedMessage = "A streaming query with snappy sink is already running with" +
" current session. Please start query with new SnappySession.;"
Assert.assertEquals(expectedMessage, x.getMessage)
} finally {
streamingQuery.stop()
}
} finally {
memorySinkQuery.stop()
}
}
private def waitTillTheBatchIsPickedForProcessing(batchId: Int, testId: Int,
retries: Int = 15): Unit = {
if (retries == 0) {
throw new RuntimeException(s"Batch id $batchId not found in sink status table")
}
val sqlString = s"select batch_id from APP.${SnappyStoreSinkProvider.SINK_STATE_TABLE} " +
s"where stream_query_id = '${streamName(testId)}'"
val batchIdFromTable = session.sql(sqlString).collect()
if (batchIdFromTable.isEmpty || batchIdFromTable(0)(0) != batchId) {
Thread.sleep(1000)
waitTillTheBatchIsPickedForProcessing(batchId, testId, retries - 1)
}
}
private def assertData(expectedData: Array[Row]): Unit = {
val actualData = session.sql(s"select * from $tableName order by id, last_name").collect()
assertResult(expectedData)(actualData)
}
private def createTable(withKeyColumn: Boolean = true)(isRowTable: Boolean = false) = {
def provider = if (isRowTable) "row" else "column"
def options = if (!isRowTable && withKeyColumn) "options(key_columns 'id,last_name')" else ""
def primaryKey = if (isRowTable && withKeyColumn) ", primary key (id,last_name)" else ""
val s = s"create table IF NOT EXISTS $tableName (id long , first_name varchar(40), age int, " +
s"last_name varchar(40) $primaryKey) using $provider $options "
session.sql(s)
session.sql(s"truncate table $tableName")
}
private def createAndStartStreamingQuery(topic: String, testId: Int,
withEventTypeColumn: Boolean = true, withQueryName: Boolean = true,
options: Map[String, String] = Map.empty) = {
val streamingDF = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("subscribe", topic)
.option("startingOffsets", "earliest")
.load()
def structFields() = {
StructField("id", LongType, nullable = false) ::
StructField("firstName", StringType) ::
StructField("age", IntegerType) ::
StructField("last_name", StringType) ::
(if (withEventTypeColumn) {
StructField("_eventType", IntegerType, nullable = false) :: Nil
}
else {
Nil
})
}
val schema = StructType(structFields())
implicit val encoder: ExpressionEncoder[Row] = RowEncoder(schema)
var streamWriter = streamingDF.selectExpr("CAST(value AS STRING)")
.as[String]
.map(_.split(","))
.map(r => {
if (r.length == 5) {
Row(r(0).toLong, r(1), r(2).toInt, r(3), r(4).toInt)
} else {
Row(r(0).toLong, r(1), r(2).toInt, r(3))
}
})
.writeStream
.format("snappySink")
if (withQueryName) {
streamWriter = streamWriter.queryName(streamName(testId))
}
streamWriter.trigger(ProcessingTime("1 seconds"))
.option("tableName", tableName)
.option("checkpointLocation", checkpointDirectory)
streamWriter.options(options)
streamWriter.start()
}
private def streamName(testId: Int) = {
s"users_$testId"
}
}
class TestSinkCallback extends SnappySinkCallback {
private var attempt = -1
override def process(snappySession: SnappySession, sinkProps: Map[String, String], batchId: Long,
df: Dataset[Row], possibleDuplicate: Boolean): Unit = {
if (attempt == -1) attempt = sinkProps("attempts").toInt
if (attempt < sinkProps("attempts").toInt) {
assert(possibleDuplicate, "Value of possibleDuplicate should be true for retry attempts")
}
attempt -= 1
if (sinkProps.contains("catalogNotStale")) {
throw new RuntimeException("catalogNotStale")
}
if (attempt == 0) {
} else if (attempt % 2 == 0) {
throw new RuntimeException(new CatalogStaleException("dummy", null))
} else {
throw new RuntimeException(new SQLException("dummy", SNAPPY_CATALOG_SCHEMA_VERSION_MISMATCH))
}
}
}
|
molicode/TextileChallenge
|
src/main/java/com/textile/challenge/pricecatalog/util/exceptions/ProductNotFoundException.java
|
package com.textile.challenge.pricecatalog.util.exceptions;
/**
* @author <NAME>
*/
public class ProductNotFoundException extends Exception {
public ProductNotFoundException(String errorMessage) {
super(errorMessage);
}
}
|
1aerostorm/golos
|
plugins/json_rpc/include/golos/plugins/json_rpc/plugin.hpp
|
#pragma once
#include <appbase/application.hpp>
#include <golos/plugins/json_rpc/utility.hpp>
#include <fc/variant.hpp>
#include <fc/io/json.hpp>
#include <fc/reflect/variant.hpp>
#include <fc/exception/exception.hpp>
#include <boost/config.hpp>
#include <boost/any.hpp>
/**
* This plugin holds bindings for all APIs and their methods
* and can dispatch JSONRPC requests to the appropriate API.
*
* For a plugin to use the API Register, it needs to specify
* the register as a dependency. Then, during initializtion,
* register itself using add_api.
*
* Ex.
* appbase::app().get_plugin< json_rpc_plugin >().add_api(
* name(),
* {
* API_METHOD( method_1 ),
* API_METHOD( method_2 ),
* API_METHOD( method_3 )
* });
*
* All method should take a single struct as an argument called
* method_1_args, method_2_args, method_3_args, etc. and should
* return a single struct as a return type.
*
* For methods that do not require arguments, use api_void_args
* as the argument type.
*/
#define STEEM_JSON_RPC_PLUGIN_NAME "json_rpc"
#define JSON_RPC_REGISTER_API(API_NAME) \
{ \
golos::plugins::json_rpc::detail::register_api_method_visitor vtor( API_NAME ); \
for_each_api( vtor ); \
}
#define JSON_RPC_PARSE_ERROR (-32700)
#define JSON_RPC_INVALID_REQUEST (-32600)
#define JSON_RPC_METHOD_NOT_FOUND (-32601)
#define JSON_RPC_INVALID_PARAMS (-32602)
#define JSON_RPC_INTERNAL_ERROR (-32603)
#define SERVER_INTERNAL_ERROR (-32000)
#define SERVER_UNSUPPORTED_OPERATION (-32001) // unsupported_operation
#define SERVER_INVALID_PARAMETER (-32002) // parameter_exception (invalid_arguments_count, missing_object, object_already_exist)
#define SERVER_BUSINESS_LOGIC_ERROR (-32003) // business_exception (bandwidth_exception, insufficient_funds, logic_exception)
#define SERVER_MISSING_AUTHORITY (-32004) // tx_missing_authority
#define SERVER_INVALID_OPERATION (-32005) // tx_invalid_operation (client must check inner exception)
#define SERVER_INVALID_TRANSACTION (-32006) // transaction_exception
namespace golos {
namespace plugins {
namespace json_rpc {
using namespace appbase;
/**
* @brief Internal type used to bind api methods
* to names.
*
* Arguments: Variant object of propert arg type
*/
using api_method = std::function<fc::variant(msg_pack &)>;
/**
* @brief An API, containing APIs and Methods
*
* An API is composed of several calls, where each call has a
* name defined by the API class. The api_call functions
* are compile time bindings of names to methods.
*/
using api_description = std::map<string, api_method>;
struct api_method_signature {
fc::variant args;
fc::variant ret;
};
class plugin final : public appbase::plugin<plugin> {
public:
using response_handler_type = std::function<void (const std::string &)>;
plugin();
~plugin();
APPBASE_PLUGIN_REQUIRES();
void set_program_options(boost::program_options::options_description &,
boost::program_options::options_description &);
static const std::string &name() {
static std::string name = STEEM_JSON_RPC_PLUGIN_NAME;
return name;
}
void plugin_initialize(const boost::program_options::variables_map &options) override;
void plugin_startup() override;
void plugin_shutdown() override;
void add_api_method(const string &api_name, const string &method_name,
const api_method &api/*, const api_method_signature& sig */);
void call(const string &body, response_handler_type);
private:
class impl;
std::unique_ptr<impl> pimpl;
};
namespace detail {
class register_api_method_visitor {
public:
register_api_method_visitor(const std::string &api_name) : _api_name(api_name),
_json_rpc_plugin(appbase::app().get_plugin< json_rpc::plugin >()) {
}
template<typename Plugin, typename Method, typename Args, typename Ret>
void operator()(Plugin &plugin, const std::string &method_name, Method method, Args *args,
Ret *ret) {
_json_rpc_plugin.add_api_method(_api_name, method_name,
[&plugin, method](msg_pack &args) -> fc::variant {
return fc::variant((plugin.*method)(args));
});
/*api_method_signature{ fc::variant( Args() ), fc::variant( Ret() ) }*/ //);
}
private:
std::string _api_name;
json_rpc::plugin &_json_rpc_plugin;
};
}
}
}
} // steem::plugins::json_rpc
FC_REFLECT((golos::plugins::json_rpc::api_method_signature), (args)(ret))
|
mdonahue-godaddy/Go-for-DevOps
|
chapter/16/workflow/internal/service/executor/executor.go
|
<reponame>mdonahue-godaddy/Go-for-DevOps<filename>chapter/16/workflow/internal/service/executor/executor.go
/*
Package executor provides the Work type which is used to execute a pb.WorkReq.
This package is the meat of the engine.
To create a Work object, simply:
work := executor.New(req, status}
After creating a Work object, validate it:
if err := work.Validate(); err !=nil {
// Do something
}
To run the Work object, do:
ch := work.Run()
Once Run() returns, the pb.Status object passed will contain the results of running the WorkReq.
*/
package executor
import (
"context"
"fmt"
"log"
"sync"
"time"
"github.com/PacktPublishing/Go-for-DevOps/chapter/16/workflow/internal/es"
"github.com/PacktPublishing/Go-for-DevOps/chapter/16/workflow/internal/policy"
"github.com/PacktPublishing/Go-for-DevOps/chapter/16/workflow/internal/policy/config"
"github.com/PacktPublishing/Go-for-DevOps/chapter/16/workflow/internal/service/jobs"
"google.golang.org/protobuf/proto"
pb "github.com/PacktPublishing/Go-for-DevOps/chapter/16/workflow/proto"
)
// Work is an executor for executing a WorkReq received by the server.
type Work struct {
req *pb.WorkReq
mu sync.Mutex
status *pb.StatusResp
ch chan *pb.StatusResp
}
// New is the constructor for Work.
func New(req *pb.WorkReq, status *pb.StatusResp) *Work {
return &Work{
req: req,
status: status,
ch: make(chan *pb.StatusResp, 1),
}
}
// Run validates that a WorkReq is correct and passed policy, then executes it.
func (w *Work) Run(ctx context.Context) chan *pb.StatusResp {
w.setWorkStatus(pb.Status_StatusRunning, false)
go func() {
defer close(w.ch)
esCh, cancelES := es.Data.Subscribe(w.req.Name)
defer cancelES()
if <-esCh != es.Go {
w.setWorkStatus(pb.Status_StatusFailed, true)
return
}
var cancel context.CancelFunc
ctx, cancel = context.WithCancel(ctx)
defer cancel()
// If we get an emergency stop, cancel our context.
// If the context gets cancelled, then just exit.
go func() {
select {
case <-ctx.Done():
return
case <-esCh:
log.Println("Emergency Stop called on running workflow type ", w.req.Name)
w.setWorkStatus(pb.Status_StatusFailed, true)
cancel()
}
}()
// Loop through each block one at a time and execute the Jobs located in them
// at the rate limit defined for the block.
for i, block := range w.req.Blocks {
if ctx.Err() != nil {
break
}
stat := w.status.Blocks[i]
if err := w.runJobs(ctx, block, stat); err != nil {
break
}
}
// Record our final state based on if any of our blocks failed.
completed := true
for _, block := range w.status.Blocks {
if block.Status == pb.Status_StatusFailed {
completed = false
w.setWorkStatus(pb.Status_StatusFailed, false)
}
}
if completed {
w.setWorkStatus(pb.Status_StatusCompleted, false)
}
}()
return w.ch
}
func (w *Work) setWorkStatus(status pb.Status, esStopped bool) {
w.mu.Lock()
w.status.Status = status
w.status.WasEsStopped = esStopped
w.sendStatus(w.status)
w.mu.Unlock()
}
func (w *Work) setBlockStatus(block *pb.BlockStatus, status pb.Status) {
w.mu.Lock()
block.Status = status
w.sendStatus(w.status)
w.mu.Unlock()
}
func (w *Work) setJobStatus(job *pb.JobStatus, status pb.Status, err string) {
w.mu.Lock()
job.Status = status
job.Error = err
w.sendStatus(w.status)
w.mu.Unlock()
}
// sendStatus sends the status of the WorkReq on our output channel. If the channel
// is currently blocked with another status update, it removes that update for the newer one.
func (w *Work) sendStatus(status *pb.StatusResp) {
// We clone our status to prevent any concurrent access issues once the lock around
// sendStatus is released.
status = proto.Clone(status).(*pb.StatusResp)
for {
select {
case w.ch <- status:
return
default:
select {
case <-w.ch:
default:
}
}
}
}
func (w *Work) runJobs(ctx context.Context, block *pb.Block, blockStatus *pb.BlockStatus) error {
ctx, cancel := context.WithCancel(ctx)
// Setup our rate limiter.
limit := block.RateLimit
if limit < 1 {
limit = 1
}
rateLimiter := make(chan struct{}, int(limit))
w.setBlockStatus(blockStatus, pb.Status_StatusRunning)
// Execute our Jobs.
wg := sync.WaitGroup{}
for i, job := range block.Jobs {
i := i
job := job
select {
case rateLimiter <- struct{}{}:
case <-ctx.Done():
}
if ctx.Err() != nil {
break
}
wg.Add(1)
go func() {
defer wg.Done()
defer func() { <-rateLimiter }()
js := blockStatus.Jobs[i]
j, err := jobs.GetJob(job.Name)
if err != nil {
cancel()
w.setJobStatus(js, pb.Status_StatusFailed, fmt.Sprintf("a Job(%s) passed validation but when ran could not be found, bug?", job.Name))
return
}
w.setJobStatus(js, pb.Status_StatusRunning, "")
err = j.Run(ctx, job)
if err != nil {
if jobs.IsFatal(err) {
cancel()
}
w.setJobStatus(js, pb.Status_StatusFailed, err.Error())
return
}
w.setJobStatus(js, pb.Status_StatusCompleted, "")
}()
}
wg.Wait()
// If any Job failed, the block failed.
for _, js := range blockStatus.Jobs {
if js.Status == pb.Status_StatusFailed {
w.setBlockStatus(blockStatus, pb.Status_StatusFailed)
return ctx.Err()
}
}
w.setBlockStatus(blockStatus, pb.Status_StatusCompleted)
return ctx.Err()
}
// Validate validates that a WorkReq is valid. This will check that basic values are set correctly
// and run all policies for this Workflow.
func Validate(ctx context.Context, req *pb.WorkReq) error {
for blockNum, b := range req.Blocks {
if len(b.Jobs) == 0 {
return fmt.Errorf("Block(%d) had 0 jobs", blockNum)
}
for jobNum, j := range b.Jobs {
job, err := jobs.GetJob(j.Name)
if err != nil {
return fmt.Errorf("Block(%d) Job(%d) had a invalid Type(%s)", blockNum, jobNum, j.Name)
}
if err := job.Validate(j); err != nil {
return fmt.Errorf("Block(%d) Job(%d)(%s) did not validate: %s)", blockNum, jobNum, j.Name, err)
}
}
}
conf, err := config.Policies.Read()
if err != nil {
log.Println("policy config could not be read: ", err)
return fmt.Errorf("cannot read our policies config: %s", err)
}
workConf, ok := conf.Workflows[req.Name]
if !ok {
return fmt.Errorf("Workflow does not have an associated policy in the policy configuration file")
}
args := make([]policy.PolicyArgs, 0, len(workConf.Policies))
for _, p := range workConf.Policies {
args = append(args, policy.PolicyArgs{Name: p.Name, Settings: p.SettingsTyped})
}
policyContext, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
if err := policy.Run(policyContext, req, args...); err != nil {
return err
}
return nil
}
|
camsys/transam_trans
|
db/data_migrations/20180404185731_cleanup_fta_funding_types.rb
|
class CleanupFtaFundingTypes < ActiveRecord::DataMigration
def up
fta_funding_types = [
{:active => 1, :name => 'Urbanized Area Formula Program', :code => 'UA', :description => 'UA -Urbanized Area Formula Program.'},
{:active => 1, :name => 'Other Federal funds', :code => 'OF', :description => 'OF-Other Federal funds.'},
{:active => 1, :name => 'Non-Federal public funds', :code => 'NFPA', :description => 'NFPA-Non-Federal public funds.'},
{:active => 1, :name => 'Non-Federal private funds', :code => 'NFPE', :description => 'NFPE-Non-Federal private funds.'},
{:active => 1, :name => 'Rural Area Formula Program', :code => 'RAFP', :description => 'Rural Area Formula Program.'},
{:active => 1, :name => 'Enhanced Mobility for Seniors and Individuals with Disabilities', :code => 'EMSID', :description => 'Enhanced Mobility for Seniors and Individuals with Disabilities.'},
{:active => 0, :name => 'Unknown', :code => 'XX', :description => 'FTA funding type not specified.'}
].each do |funding_type|
FtaFundingType.create!(funding_type) if FtaFundingType.find_by(code: funding_type[:code]).nil?
end
end
end
|
cnsuhao/DirectUI-6
|
DirectUI/Progress.h
|
<reponame>cnsuhao/DirectUI-6
#pragma once
namespace DirectUI
{
class UILIB_API Progress : public Element
{
public:
Progress(Progress const &);
Progress(void);
virtual ~Progress(void);
Progress & operator=(Progress const &);
static long __stdcall Create(Element *, unsigned long *, Element * *);
static IClassInfo * __stdcall GetClassInfoPtr(void);
static PropertyInfo const * __stdcall PositionProp(void);
static long __stdcall Register(void);
static void __stdcall SetClassInfoPtr(IClassInfo *);
static PropertyInfo const * __stdcall MaximumProp(void);
static PropertyInfo const * __stdcall MinimumProp(void);
int GetMaximum(void);
int GetMinimum(void);
int GetPosition(void);
long Initialize(Element *, unsigned long *);
long SetMaximum(int);
long SetMinimum(int);
long SetPosition(int);
virtual IClassInfo * GetClassInfoW(void);
virtual SIZE GetContentSize(int, int, Surface *);
virtual void Paint(HDC, RECT const *, RECT const *, RECT *, RECT *);
private:
static IClassInfo * s_pClassInfo;
};
class UILIB_API ProgressRangeValueProxy : public IProxy
{
public:
ProgressRangeValueProxy(ProgressRangeValueProxy const &);
ProgressRangeValueProxy(void);
ProgressRangeValueProxy & operator=(ProgressRangeValueProxy const &);
virtual long DoMethod(int, char *);
protected:
virtual void Init(Element *);
};
}
|
bartveenstra/open-banking-gateway
|
opba-protocols/xs2a-protocol-tests/xs2a-bdd-sandbox/src/test/java/de/adorsys/opba/protocol/xs2a/tests/e2e/sandbox/SandboxE2EProtocolPisTest.java
|
package de.adorsys.opba.protocol.xs2a.tests.e2e.sandbox;
import de.adorsys.opba.protocol.api.common.Approach;
import de.adorsys.opba.protocol.xs2a.tests.e2e.JGivenConfig;
import de.adorsys.opba.protocol.xs2a.tests.e2e.sandbox.servers.SandboxServers;
import de.adorsys.opba.protocol.xs2a.tests.e2e.sandbox.servers.WebDriverBasedPaymentInitiation;
import de.adorsys.opba.protocol.xs2a.tests.e2e.sandbox.servers.config.RetryableConfig;
import de.adorsys.opba.protocol.xs2a.tests.e2e.stages.PaymentResult;
import de.adorsys.psd2.sandbox.cms.starter.Xs2aCmsAutoConfiguration;
import io.github.bonigarcia.seljup.SeleniumExtension;
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.hateoas.HypermediaAutoConfiguration;
import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.UUID;
import static de.adorsys.opba.protocol.xs2a.tests.Const.ENABLE_HEAVY_TESTS;
import static de.adorsys.opba.protocol.xs2a.tests.Const.TRUE_BOOL;
import static de.adorsys.opba.protocol.xs2a.tests.TestProfiles.MOCKED_SANDBOX;
import static de.adorsys.opba.protocol.xs2a.tests.TestProfiles.ONE_TIME_POSTGRES_RAMFS;
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
/**
* Happy-path heavy test that uses Dynamic-Sandbox to drive banking-protocol.
*/
@EnabledIfEnvironmentVariable(named = ENABLE_HEAVY_TESTS, matches = TRUE_BOOL)
@EnableAutoConfiguration(exclude = {
HypermediaAutoConfiguration.class,
Xs2aCmsAutoConfiguration.class,
ManagementWebSecurityAutoConfiguration.class,
SecurityAutoConfiguration.class,
})
@ExtendWith(SeleniumExtension.class)
@Transactional(propagation = Propagation.NOT_SUPPORTED)
@SpringBootTest(classes = {RetryableConfig.class, Xs2aRealSandboxProtocolApplication.class, JGivenConfig.class}, webEnvironment = RANDOM_PORT)
@ActiveProfiles(profiles = {ONE_TIME_POSTGRES_RAMFS, MOCKED_SANDBOX})
public class SandboxE2EProtocolPisTest extends SandboxCommonTest<
SandboxServers<? extends SandboxServers<?>>,
WebDriverBasedPaymentInitiation<? extends WebDriverBasedPaymentInitiation<?>>,
PaymentResult<? extends PaymentResult<?>>> {
private final String OPBA_LOGIN = UUID.randomUUID().toString();
private final String OPBA_PASSWORD = UUID.randomUUID().toString();
@ParameterizedTest
@EnumSource(Approach.class)
void testSinglePaymentUsingEmbedded(Approach expectedApproach) {
given()
.enabled_embedded_sandbox_mode()
.preferred_sca_approach_selected_for_all_banks_in_opba(expectedApproach)
.rest_assured_points_to_opba_server()
.user_registered_in_opba_with_credentials(OPBA_LOGIN, OPBA_PASSWORD);
when()
.fintech_calls_initiate_payment_for_max_musterman()
.and()
.user_logged_in_into_opba_as_opba_user_with_credentials_using_fintech_supplied_url_pis(OPBA_LOGIN, OPBA_PASSWORD)
.and()
.user_max_musterman_provided_initial_parameters_to_make_payment()
.and()
.user_max_musterman_provided_password_to_embedded_authorization()
.and()
.user_max_musterman_selected_sca_challenge_type_email2_to_embedded_authorization()
.and()
.user_max_musterman_provided_sca_challenge_result_to_embedded_authorization_and_sees_redirect_to_fintech_ok_pis();
then()
.open_banking_has_consent_for_max_musterman_payment()
.fintech_calls_consent_activation_for_current_authorization_id()
.fintech_calls_payment_status()
.fintech_calls_payment_information_iban_700();
}
@ParameterizedTest
@EnumSource(Approach.class)
void testSinglePaymentUsingRedirect(Approach expectedApproach, FirefoxDriver firefoxDriver) {
given()
.enabled_redirect_sandbox_mode()
.preferred_sca_approach_selected_for_all_banks_in_opba(expectedApproach)
.rest_assured_points_to_opba_server()
.user_registered_in_opba_with_credentials(OPBA_LOGIN, OPBA_PASSWORD);
when()
.fintech_calls_initiate_payment_for_anton_brueckner()
.and()
.user_logged_in_into_opba_as_opba_user_with_credentials_using_fintech_supplied_url_pis(OPBA_LOGIN, OPBA_PASSWORD)
.and()
.user_anton_brueckner_provided_initial_parameters_to_authorize_initiation_payment()
.and()
.user_anton_brueckner_sees_that_he_needs_to_be_redirected_to_aspsp_and_redirects_to_aspsp_pis()
.and()
.sandbox_anton_brueckner_navigates_to_bank_auth_page(firefoxDriver)
.and()
.sandbox_anton_brueckner_inputs_username_and_password(firefoxDriver)
.and()
.sandbox_anton_brueckner_confirms_consent_information(firefoxDriver)
.and()
.sandbox_anton_brueckner_selects_sca_method(firefoxDriver)
.and()
.sandbox_anton_brueckner_provides_sca_challenge_result(firefoxDriver)
.and()
.sandbox_anton_brueckner_clicks_redirect_back_to_tpp_button_api_localhost_cookie_only(firefoxDriver);
then()
.open_banking_has_consent_for_anton_brueckner_payment()
.fintech_calls_consent_activation_for_current_authorization_id()
.fintech_calls_payment_status()
.fintech_calls_payment_information_iban_400();
}
}
|
negz/xgql
|
internal/graph/resolvers/common_test.go
|
<filename>internal/graph/resolvers/common_test.go
// Copyright 2021 Upbound Inc
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resolvers
import (
"context"
"testing"
"github.com/99designs/gqlgen/graphql"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/pkg/errors"
"github.com/vektah/gqlparser/v2/gqlerror"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/utils/pointer"
"sigs.k8s.io/controller-runtime/pkg/client"
"github.com/crossplane/crossplane-runtime/pkg/test"
"github.com/upbound/xgql/internal/auth"
"github.com/upbound/xgql/internal/clients"
"github.com/upbound/xgql/internal/graph/generated"
"github.com/upbound/xgql/internal/graph/model"
)
var (
_ generated.GenericResourceResolver = &genericResource{}
_ generated.SecretResolver = &secret{}
_ generated.ConfigMapResolver = &configMap{}
_ generated.CustomResourceDefinitionResolver = &crd{}
)
func TestCRDDefinedResources(t *testing.T) {
errBoom := errors.New("boom")
gr := unstructured.Unstructured{}
ggr := model.GetGenericResource(&gr)
group := "example.org"
version := "v1"
kind := "Example"
// In almost all real cases this would be 'ExampleList', but we infer that
// when ListKind is not set, and want to test that this will override it.
listKind := "Examples"
type args struct {
ctx context.Context
obj *model.CustomResourceDefinition
version *string
}
type want struct {
krc *model.KubernetesResourceConnection
err error
errs gqlerror.List
}
cases := map[string]struct {
reason string
clients ClientCache
args args
want want
}{
"GetClientError": {
reason: "If we can't get a client we should add the error to the GraphQL context and return early.",
clients: ClientCacheFn(func(_ auth.Credentials, _ ...clients.GetOption) (client.Client, error) {
return &test.MockClient{}, errBoom
}),
args: args{
ctx: graphql.WithResponseContext(context.Background(), graphql.DefaultErrorPresenter, graphql.DefaultRecover),
},
want: want{
errs: gqlerror.List{
gqlerror.Errorf(errors.Wrap(errBoom, errGetClient).Error()),
},
},
},
"ListDefinedResourcesError": {
reason: "If we can't list defined resources we should add the error to the GraphQL context and return early.",
clients: ClientCacheFn(func(_ auth.Credentials, _ ...clients.GetOption) (client.Client, error) {
return &test.MockClient{
MockList: test.NewMockListFn(errBoom),
}, nil
}),
args: args{
ctx: graphql.WithResponseContext(context.Background(), graphql.DefaultErrorPresenter, graphql.DefaultRecover),
obj: &model.CustomResourceDefinition{
Spec: &model.CustomResourceDefinitionSpec{
Group: group,
Names: &model.CustomResourceDefinitionNames{Kind: kind},
},
},
},
want: want{
errs: gqlerror.List{
gqlerror.Errorf(errors.Wrap(errBoom, errListResources).Error()),
},
},
},
"InferServedVersion": {
reason: "We should successfully infer the served version (if none is referenceable) and return any defined resources we can list and model.",
clients: ClientCacheFn(func(_ auth.Credentials, _ ...clients.GetOption) (client.Client, error) {
return &test.MockClient{
MockList: test.NewMockListFn(nil, func(obj client.ObjectList) error {
u := *obj.(*unstructured.UnstructuredList)
// Ensure we're being asked to list the expected GVK.
got := u.GetObjectKind().GroupVersionKind()
want := schema.GroupVersionKind{Group: group, Version: version, Kind: listKind}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("-want GVK, +got GVK:\n%s", diff)
}
*obj.(*unstructured.UnstructuredList) = unstructured.UnstructuredList{Items: []unstructured.Unstructured{gr}}
return nil
}),
}, nil
}),
args: args{
ctx: graphql.WithResponseContext(context.Background(), graphql.DefaultErrorPresenter, graphql.DefaultRecover),
obj: &model.CustomResourceDefinition{
Spec: &model.CustomResourceDefinitionSpec{
Group: group,
Names: &model.CustomResourceDefinitionNames{
Kind: kind,
ListKind: pointer.StringPtr(listKind),
},
Versions: []model.CustomResourceDefinitionVersion{
// This version should be ignored because it is
// neither referenceable nor served.
{
Name: "v3",
},
{
Name: version,
Served: true,
},
},
},
},
},
want: want{
krc: &model.KubernetesResourceConnection{
Nodes: []model.KubernetesResource{ggr},
TotalCount: 1,
},
},
},
"SpecificVersion": {
reason: "We should successfully return any defined resources of the requested version that we can list and model.",
clients: ClientCacheFn(func(_ auth.Credentials, _ ...clients.GetOption) (client.Client, error) {
return &test.MockClient{
MockList: test.NewMockListFn(nil, func(obj client.ObjectList) error {
u := *obj.(*unstructured.UnstructuredList)
// Ensure we're being asked to list the expected GVK.
got := u.GetObjectKind().GroupVersionKind()
want := schema.GroupVersionKind{Group: group, Version: version, Kind: listKind}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("-want GVK, +got GVK:\n%s", diff)
}
*obj.(*unstructured.UnstructuredList) = unstructured.UnstructuredList{Items: []unstructured.Unstructured{gr}}
return nil
}),
}, nil
}),
args: args{
ctx: graphql.WithResponseContext(context.Background(), graphql.DefaultErrorPresenter, graphql.DefaultRecover),
obj: &model.CustomResourceDefinition{
Spec: &model.CustomResourceDefinitionSpec{
Group: group,
Names: &model.CustomResourceDefinitionNames{
Kind: kind,
ListKind: pointer.StringPtr(listKind),
},
Versions: []model.CustomResourceDefinitionVersion{
// Normally we'd pick this version first, but in
// this case the caller asked us to list a specific
// version.
{
Name: "v2",
Served: true,
},
{
Name: version,
Served: true,
},
},
},
},
version: pointer.StringPtr(version),
},
want: want{
krc: &model.KubernetesResourceConnection{
Nodes: []model.KubernetesResource{ggr},
TotalCount: 1,
},
},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
x := &crd{clients: tc.clients}
// Our GraphQL resolvers never return errors. We instead add an
// error to the GraphQL context and return early.
got, err := x.DefinedResources(tc.args.ctx, tc.args.obj, tc.args.version)
errs := graphql.GetErrors(tc.args.ctx)
if diff := cmp.Diff(tc.want.err, err, test.EquateErrors()); diff != "" {
t.Errorf("\n%s\nq.DefinedResources(...): -want error, +got error:\n%s\n", tc.reason, diff)
}
if diff := cmp.Diff(tc.want.errs, errs, test.EquateErrors()); diff != "" {
t.Errorf("\n%s\nq.DefinedResources(...): -want GraphQL errors, +got GraphQL errors:\n%s\n", tc.reason, diff)
}
if diff := cmp.Diff(tc.want.krc, got, cmpopts.IgnoreUnexported(model.ObjectMeta{})); diff != "" {
t.Errorf("\n%s\nq.DefinedResources(...): -want, +got:\n%s\n", tc.reason, diff)
}
})
}
}
|
Jamesrkiv/jabref
|
src/main/java/org/jabref/gui/maintable/OpenExternalFileAction.java
|
package org.jabref.gui.maintable;
import java.util.List;
import org.jabref.gui.DialogService;
import org.jabref.gui.Globals;
import org.jabref.gui.StateManager;
import org.jabref.gui.actions.ActionHelper;
import org.jabref.gui.actions.SimpleCommand;
import org.jabref.gui.externalfiletype.ExternalFileTypes;
import org.jabref.gui.fieldeditors.LinkedFileViewModel;
import org.jabref.logic.l10n.Localization;
import org.jabref.model.entry.BibEntry;
import org.jabref.preferences.PreferencesService;
public class OpenExternalFileAction extends SimpleCommand {
private final DialogService dialogService;
private final StateManager stateManager;
private final PreferencesService preferencesService;
public OpenExternalFileAction(DialogService dialogService, StateManager stateManager, PreferencesService preferencesService) {
this.dialogService = dialogService;
this.stateManager = stateManager;
this.preferencesService = preferencesService;
this.executable.bind(ActionHelper.isFilePresentForSelectedEntry(stateManager, preferencesService)
.and(ActionHelper.needsEntriesSelected(1, stateManager)));
}
@Override
public void execute() {
stateManager.getActiveDatabase().ifPresent(databaseContext -> {
final List<BibEntry> selectedEntries = stateManager.getSelectedEntries();
if (selectedEntries.size() != 1) {
dialogService.notify(Localization.lang("This operation requires exactly one item to be selected."));
return;
}
final BibEntry entry = selectedEntries.get(0);
LinkedFileViewModel linkedFileViewModel = new LinkedFileViewModel(
entry.getFiles().get(0),
entry,
databaseContext,
Globals.TASK_EXECUTOR,
dialogService,
preferencesService.getXmpPreferences(),
preferencesService.getFilePreferences(),
ExternalFileTypes.getInstance());
linkedFileViewModel.open();
});
}
}
|
ericyl/EricylUtils
|
example/src/main/java/com/ericyl/example/event/JumpUtilsTabEvent.java
|
<reponame>ericyl/EricylUtils
package com.ericyl.example.event;
import android.support.annotation.IdRes;
import com.ericyl.example.R;
public class JumpUtilsTabEvent {
@IdRes
private int idRes;
@IdRes
private int tabIdRes;
public JumpUtilsTabEvent() {
}
public JumpUtilsTabEvent(@IdRes int idRes) {
this.idRes = idRes;
this.tabIdRes = R.id.tab_ui_utils;
}
public JumpUtilsTabEvent(@IdRes int idRes, @IdRes int tabIdRes) {
this.idRes = idRes;
this.tabIdRes = tabIdRes;
}
public int getIdRes() {
return idRes;
}
public void setIdRes(int idRes) {
this.idRes = idRes;
}
public int getTabIdRes() {
return tabIdRes;
}
public void setTabIdRes(int tabIdRes) {
this.tabIdRes = tabIdRes;
}
}
|
justFooln/proj
|
java/DesignPatterns/AbstractFactory/src/com/kineticsnw/EnemyShip.java
|
<reponame>justFooln/proj
package com.kineticsnw;
public abstract class EnemyShip {
private String name;
// Newly defined objects that represent weapon & engine
// These can be changed easily by assigning new parts
// in UFOEnemyShipFactory or UFOBossEnemyShipFactory
ESWeapon weapon;
ESEngine engine;
public String getName() { return name; }
public void setName(String newName) { name = newName; }
abstract void makeShip();
// Because I defined the toString method in engine
// when it is printed the String defined in toString goes
// on the screen
public void followHeroShip(){
System.out.println(getName() + " is following the hero at " + engine );
}
public void displayEnemyShip(){
System.out.println(getName() + " is on the screen");
}
public void enemyShipShoots(){
System.out.println(getName() + " attacks and does " + weapon);
}
// If any EnemyShip object is printed to screen this shows up
public String toString(){
String infoOnShip = "The " + name + " has a top speed of " + engine +
" and an attack power of " + weapon;
return infoOnShip;
}
}
|
Ru-Xiang/x-deeplearning
|
xdl/test/data_io/packer/pack_cutoff_test.cc
|
<filename>xdl/test/data_io/packer/pack_cutoff_test.cc
/* Copyright (C) 2016-2018 Alibaba Group Holding Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "xdl/data_io/packer/pack_feature.h"
#include <cstdlib>
#include <ctime>
#include <iostream>
#include <map>
#include <string>
#include <vector>
#include "gtest/gtest.h"
#include "xdl/data_io/pool.h"
#include "xdl/core/framework/cpu_device.h"
namespace xdl {
namespace io {
class PackCutoffTest: public ::testing::Test {
static const size_t kBatchSize;
public:
static void SetUpTestCase();
static void TearDownTestCase();
static void TestStat();
static void TestSetup();
static void TestRun();
static PackFeature *pack_;
static Batch *batch_;
private:
static void CheckFeature();
static Device *dev_;
static Schema schema_;
static SampleGroup sg_;
};
const size_t PackCutoffTest::kBatchSize = 4;
Device *PackCutoffTest::dev_ = nullptr;
Schema PackCutoffTest::schema_;
PackFeature *PackCutoffTest::pack_ = nullptr;
Batch *PackCutoffTest::batch_ = nullptr;
SampleGroup PackCutoffTest::sg_;
void PackCutoffTest::SetUpTestCase() {
dev_ = new CpuDevice();
schema_.batch_size_ = kBatchSize;
for (auto fn: {"s", "l", "sr", "lr"}) {
FeatureOption *f = new FeatureOption();
f->set_name(fn);
f->set_type(kSparse);
f->set_table(0);
f->set_serialized(true);
int cutoff;
if (fn[0] == 's') {
cutoff = 3;
} else {
ASSERT_TRUE(fn[0] == 'l');
cutoff = 5;
}
if (strlen(fn) == 2) {
ASSERT_TRUE(fn[1] == 'r');
cutoff = -cutoff;
}
f->set_cutoff(cutoff);
schema_.Add(f);
}
auto ft = sg_.add_feature_tables();
for (int i = 0; i < kBatchSize; ++i) {
auto fl = ft->add_feature_lines();
for (auto fn: {"s", "l", "sr", "lr"}) {
auto f = fl->add_features();
f->set_name(fn);
f->set_type(kSparse);
for (int m = 1; m < 5; ++m) {
auto v = f->add_values();
v->set_key(m);
v->set_value(m*0.1);
}
}
}
pack_ = new PackFeature(dev_, &schema_);
}
void PackCutoffTest::TearDownTestCase() {
BatchPool::Get()->Release(batch_);
batch_ = nullptr;
delete pack_;
pack_ = nullptr;
}
void PackCutoffTest::CheckFeature() {
for (auto fn: {"s", "l", "sr", "lr"}) {
auto blk = batch_->Get(fn);
ASSERT_NE(nullptr, blk);
auto key = blk->ts_[Block::kKey];
auto value = blk->ts_[Block::kValue];
auto seg = blk->ts_[Block::kSegment];
uint64_t *keys = key->Raw<uint64_t>();
float *vals = value->Raw<float>();
uint32_t *segs = seg->Raw<uint32_t>();
int cutoff;
if (fn[0] == 's') {
cutoff = 3;
} else {
ASSERT_TRUE(fn[0] == 'l');
cutoff = 5;
}
if (strlen(fn) == 2) {
ASSERT_TRUE(fn[1] == 'r');
cutoff = -cutoff;
}
//std::cout << fn << " cutoff " << cutoff << std::endl;
for (int i = 0, n = 0, c = segs[n]; i < key->Shape()[0]; ++i, --c) {
if (c == 0) {
//std::cout << std::endl;
if (++ n == kBatchSize) { break; }
c = segs[n] - segs[n-1];
ASSERT_EQ(std::min(abs(cutoff), 4), c);
//std::cout << std::endl << "[" << n << "]";
}
//std::cout<< keys[i] << ":" << vals[i] << " ";
if (cutoff > 0) {
EXPECT_EQ(i - segs[n-1] + 1, keys[i]);
EXPECT_FLOAT_EQ((i - segs[n-1] + 1)*0.1, vals[i]);
} else {
//EXPECT_EQ(4 - (i - segs[n-1]), keys[i]);
//EXPECT_FLOAT_EQ((4 - (i - segs[n-1]))*0.1, vals[i]);
EXPECT_EQ((i - segs[n-1]) + std::max(0, 4+cutoff) + 1, keys[i]);
EXPECT_FLOAT_EQ(((i - segs[n-1]) + std::max(0, 4+cutoff) + 1)*0.1, vals[i]);
}
}
//std::cout << std::endl;
}
}
void PackCutoffTest::TestStat() {
PParam pparam;
pparam.begin_ = 0;
pparam.end_ = sg_.feature_tables(0).feature_lines_size();
pparam.ftable_ = &sg_.feature_tables(0);
pparam.ktable_ = 0;
pparam.isgroup_ = 0;
EXPECT_GE(pparam.begin_, 0);
EXPECT_GE(pparam.end_, kBatchSize);
//std::cout << "stat[" << pparam.isgroup_ << ", " << ktable << "] (0)" << pparam.begin_
// << " -> " << pparam.end_ << "(" << pparam.ftable_->feature_lines_size() << ")" << std::endl;
auto range = pack_->Stat(pparam);
pparam.begin_ = range.first;
pparam.end_ = range.second;
}
void PackCutoffTest::TestSetup() {
ASSERT_TRUE(pack_->Setup());
for (auto &it: schema_.feature_opts()) {
auto opt = it.second;
auto ktable = opt->table();
auto blk = batch_->GetMutable(opt->name());
ASSERT_NE(nullptr, blk);
ASSERT_NE(nullptr, blk->ts_[Block::kValue]);
ASSERT_NE(nullptr, blk->ts_[Block::kKey]);
ASSERT_NE(nullptr, blk->ts_[Block::kSegment]);
auto kdims = blk->ts_[Block::kKey]->Shape().Dims();
auto sdims = blk->ts_[Block::kSegment]->Shape().Dims();
auto fn = opt->name().c_str();
int cutoff;
if (fn[0] == 's') {
cutoff = 3;
} else {
ASSERT_TRUE(fn[0] == 'l');
cutoff = 5;
}
if (strlen(fn) == 2) {
ASSERT_TRUE(fn[1] == 'r');
cutoff = -cutoff;
}
ASSERT_EQ(cutoff, opt->cutoff());
ASSERT_EQ(kSparse, opt->type());
ASSERT_EQ(std::vector<size_t>({kBatchSize*std::min(4, abs(cutoff))}), kdims);
ASSERT_EQ(std::vector<size_t>({kBatchSize}), sdims);
ASSERT_EQ(3, blk->ts_count_);
}
ASSERT_EQ(12, batch_->ts_count_);
}
void PackCutoffTest::TestRun() {
PParam pparam;
pparam.begin_ = 0;
pparam.end_ = sg_.feature_tables(0).feature_lines_size();
pparam.ftable_ = &sg_.feature_tables(0);
pparam.ktable_ = 0;
pparam.isgroup_ = 0;
//std::cout << "run[" << pparam.isgroup_ << ", " << ktable << "] (0)" << pparam.begin_
// << " -> " << pparam.end_ << "(" << pparam.ftable_->feature_lines_size() << ")" << std::endl;
auto range = pack_->Run(pparam);
pparam.begin_ = range.first;
pparam.end_ = range.second;
CheckFeature();
EXPECT_EQ(12, batch_->ts_count_);
}
TEST_F(PackCutoffTest, Run) {
batch_ = BatchPool::Get()->Acquire();
EXPECT_NE(nullptr, batch_);
EXPECT_TRUE(pack_->Init(batch_));
TestStat();
TestSetup();
TestRun();
batch_->Reuse();
batch_ = nullptr;
//std::cout << "cycles: " << pack_->cycles_ << std::endl;
}
} // io
} // xdl
int main(int argc, char **argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
thinline72/cayenne
|
cayenne-server/src/test/java/org/apache/cayenne/testdo/inheritance_vertical/auto/_IvSub3.java
|
<filename>cayenne-server/src/test/java/org/apache/cayenne/testdo/inheritance_vertical/auto/_IvSub3.java<gh_stars>0
package org.apache.cayenne.testdo.inheritance_vertical.auto;
import org.apache.cayenne.exp.Property;
import org.apache.cayenne.testdo.inheritance_vertical.IvRoot;
/**
* Class _IvSub3 was generated by Cayenne.
* It is probably a good idea to avoid changing this class manually,
* since it may be overwritten next time code is regenerated.
* If you need to make any customizations, please use subclass.
*/
public abstract class _IvSub3 extends IvRoot {
private static final long serialVersionUID = 1L;
public static final String ID_PK_COLUMN = "ID";
public static final Property<IvRoot> IV_ROOT = new Property<>("ivRoot");
public IvRoot getIvRoot() {
return (IvRoot)readProperty("ivRoot");
}
}
|
ScalablyTyped/SlinkyTyped
|
w/watson-developer-cloud/src/main/scala/typingsSlinky/watsonDeveloperCloud/visualRecognitionV3GeneratedMod/UpdateClassifierParams.scala
|
package typingsSlinky.watsonDeveloperCloud.visualRecognitionV3GeneratedMod
import typingsSlinky.ibmCloudSdkCore.helperMod.FileObject
import typingsSlinky.node.Buffer
import typingsSlinky.node.NodeJS.ReadableStream
import typingsSlinky.std.Map
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/** Parameters for the `updateClassifier` operation. */
@js.native
trait UpdateClassifierParams extends StObject {
/** The ID of the classifier. */
var classifier_id: String = js.native
var headers: js.UndefOr[js.Object] = js.native
/** A .zip file of images that do not depict the visual subject of any of the classes of the new classifier. Must contain a minimum of 10 images. Encode special characters in the file name in UTF-8. */
var negative_examples: js.UndefOr[ReadableStream | FileObject | Buffer] = js.native
/** The filename for negative_examples. */
var negative_examples_filename: js.UndefOr[String] = js.native
/** A dictionary that contains the value for each classname. The value is a .zip file of images that depict the visual subject of a class in the classifier. The positive examples create or update classes in the classifier. You can include more than one positive example file in a call. Specify the parameter name by appending `_positive_examples` to the class name. For example, `goldenretriever_positive_examples` creates the class `goldenretriever`. Include at least 10 images in .jpg or .png format. The minimum recommended image resolution is 32X32 pixels. The maximum number of images is 10,000 images or 100 MB per .zip file. Encode special characters in the file name in UTF-8. */
var positive_examples: js.UndefOr[Map[String, ReadableStream | FileObject | Buffer]] = js.native
var return_response: js.UndefOr[Boolean] = js.native
}
object UpdateClassifierParams {
@scala.inline
def apply(classifier_id: String): UpdateClassifierParams = {
val __obj = js.Dynamic.literal(classifier_id = classifier_id.asInstanceOf[js.Any])
__obj.asInstanceOf[UpdateClassifierParams]
}
@scala.inline
implicit class UpdateClassifierParamsMutableBuilder[Self <: UpdateClassifierParams] (val x: Self) extends AnyVal {
@scala.inline
def setClassifier_id(value: String): Self = StObject.set(x, "classifier_id", value.asInstanceOf[js.Any])
@scala.inline
def setHeaders(value: js.Object): Self = StObject.set(x, "headers", value.asInstanceOf[js.Any])
@scala.inline
def setHeadersUndefined: Self = StObject.set(x, "headers", js.undefined)
@scala.inline
def setNegative_examples(value: ReadableStream | FileObject | Buffer): Self = StObject.set(x, "negative_examples", value.asInstanceOf[js.Any])
@scala.inline
def setNegative_examplesUndefined: Self = StObject.set(x, "negative_examples", js.undefined)
@scala.inline
def setNegative_examples_filename(value: String): Self = StObject.set(x, "negative_examples_filename", value.asInstanceOf[js.Any])
@scala.inline
def setNegative_examples_filenameUndefined: Self = StObject.set(x, "negative_examples_filename", js.undefined)
@scala.inline
def setPositive_examples(value: Map[String, ReadableStream | FileObject | Buffer]): Self = StObject.set(x, "positive_examples", value.asInstanceOf[js.Any])
@scala.inline
def setPositive_examplesUndefined: Self = StObject.set(x, "positive_examples", js.undefined)
@scala.inline
def setReturn_response(value: Boolean): Self = StObject.set(x, "return_response", value.asInstanceOf[js.Any])
@scala.inline
def setReturn_responseUndefined: Self = StObject.set(x, "return_response", js.undefined)
}
}
|
Ahdhn/lar-cc
|
test/py/largrid/test02.py
|
<reponame>Ahdhn/lar-cc
""" Test file """
from larlib import *
mod_1 = larSplit(1)(4), larGrid(4)(1)
squares = larModelProduct([mod_1,mod_1])
VIEW(EXPLODE(1.2,1.2,1.2)(MKPOLS(squares)))
cubes = larModelProduct([squares,mod_1])
VIEW(EXPLODE(1.2,1.2,1.2)(MKPOLS(cubes)))
|
HPCToolkit/hpctest
|
internal/notes/builtin-SAVE/packages/id3lib/package.py
|
<filename>internal/notes/builtin-SAVE/packages/id3lib/package.py
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Id3lib(AutotoolsPackage):
"""Library for manipulating ID3v1 and ID3v2 tags"""
homepage = "http://id3lib.sourceforge.net/"
url = "https://downloads.sourceforge.net/project/id3lib/id3lib/3.8.3/id3lib-3.8.3.tar.gz"
version('3.8.3', '19f27ddd2dda4b2d26a559a4f0f402a7')
depends_on('zlib')
# http://connie.slackware.com/~alien/slackbuilds/id3lib/build/id3lib-3.8.3_gcc4.diff
# this is due to some changes in the c++ standard library headers
patch("id3lib-3.8.3_gcc4.diff")
|
YiouZhu1010/container-service-extension
|
container_service_extension/minor_error_codes.py
|
<reponame>YiouZhu1010/container-service-extension
# container-service-extension
# Copyright (c) 2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: BSD-2-Clause
from enum import Enum
from enum import unique
@unique
class MinorErrorCode(int, Enum):
"""Collection of error code and related messages."""
DEFAULT_ERROR_CODE = -1
REQUEST_KEY_CLUSTER_NAME_MISSING = 4000
REQUEST_KEY_CLUSTER_NAME_INVALID = 4001
REQUEST_KEY_COMPUTE_POLICY_ACTION_MISSING = 4002
REQUEST_KEY_COMPUTE_POLICY_ACTION_INVALID = 4003
REQUEST_KEY_COMPUTE_POLICY_NAME_MISSING = 4004
REQUEST_KEY_COMPUTE_POLICY_NAME_INVALID = 4005
REQUEST_KEY_K8S_PROVIDER_MISSING = 4006
REQUEST_KEY_K8S_PROVIDER_INVALID = 4007
REQUEST_KEY_NETWORK_NAME_MISSING = 4008
REQUEST_KEY_NETWORK_NAME_INVALID = 4009
REQUEST_KEY_NODE_NAME_MISSING = 4010
REQUEST_KEY_NODE_NAME_INVALID = 4011
REQUEST_KEY_NODE_NAMES_LIST_MISSING = 4012
REQUEST_KEY_NODE_NAMES_LIST_INVALID = 4013
REQUEST_KEY_NUM_WORKERS_MISSING = 4014
REQUEST_KEY_NUM_WORKERS_INVALID = 4015
REQUEST_KEY_ORG_NAME_MISSING = 4016
REQUEST_KEY_ORG_NAME_INVALID = 4017
REQUEST_KEY_OVDC_ID_MISSING = 4018
REQUEST_KEY_OVDC_ID_INVALID = 4019
REQUEST_KEY_OVDC_NAME_MISSING = 4020
REQUEST_KEY_OVDC_NAME_INVALID = 4021
REQUEST_KEY_PKS_CLUSTER_DOMAIN_MISSING = 4022
REQUEST_KEY_PKS_CLUSTER_DOMAIN_INVALID = 4023
REQUEST_KEY_PKS_EXT_HOST_MISSING = 4024
REQUEST_KEY_PKS_EXT_HOST_INVALID = 4025
REQUEST_KEY_PKS_PLAN_NAME_MISSING = 4026
REQUEST_KEY_PKS_PLAN_NAME_INVALID = 4027
REQUEST_KEY_SERVER_ACTION_MISSING = 4028
REQUEST_KEY_SERVER_ACTION_INVALID = 4029
|
dirkgermany/cloudstepper
|
userService/src/main/java/com/dam/user/rest/message/UserRequest.java
|
<gh_stars>0
package com.dam.user.rest.message;
import com.dam.user.model.entity.User;
public class UserRequest extends RestRequest {
private User user = new User();
public UserRequest(String userName, String password, String requestorUserId) {
super("CS 0.0.1");
user.setUserName(userName);
user.setPassword(password);
setRequestorUserId(requestorUserId);
}
public User getUser() {
return user;
}
}
|
harbby/ashtarte
|
astarte-core/src/main/java/com/github/harbby/astarte/core/coders/SqlDateEncoder.java
|
/*
* Copyright (C) 2018 The Astarte Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.harbby.astarte.core.coders;
import com.github.harbby.astarte.core.api.function.Comparator;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.Date;
/**
* @author ivan
* @date 2021.02.09 10:01:00
* sql date Serialize
*/
public class SqlDateEncoder
implements Encoder<Date>
{
protected SqlDateEncoder() {}
@Override
public void encoder(Date value, DataOutput output)
throws IOException
{
if (value == null) {
output.writeLong(-1);
}
else {
output.writeLong(value.getTime());
}
}
@Override
public Date decoder(DataInput input)
throws IOException
{
final long l = input.readLong();
if (l == -1) {
return null;
}
else {
return new Date(l);
}
}
@Override
public Comparator<Date> comparator()
{
return Date::compareTo;
}
}
|
Celebrate-future/deeplearning4j
|
deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/DummyBlockDataSetIteratorTests.java
|
<reponame>Celebrate-future/deeplearning4j
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package org.deeplearning4j.datasets.iterator;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import lombok.var;
import org.deeplearning4j.BaseDL4JTest;
import org.deeplearning4j.datasets.iterator.tools.SimpleVariableGenerator;
import org.junit.Test;
import org.nd4j.linalg.dataset.api.DataSet;
import java.util.ArrayList;
import java.util.Arrays;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@Slf4j
public class DummyBlockDataSetIteratorTests extends BaseDL4JTest {
@Test
public void testBlock_1() throws Exception {
val simpleIterator = new SimpleVariableGenerator(123, 8, 3, 3, 3);
val iterator = new DummyBlockDataSetIterator(simpleIterator);
assertTrue(iterator.hasAnything());
val list = new ArrayList<DataSet>(8);
var datasets = iterator.next(3);
assertNotNull(datasets);
assertEquals(3, datasets.length);
list.addAll(Arrays.asList(datasets));
datasets = iterator.next(3);
assertNotNull(datasets);
assertEquals(3, datasets.length);
list.addAll(Arrays.asList(datasets));
datasets = iterator.next(3);
assertNotNull(datasets);
assertEquals(2, datasets.length);
list.addAll(Arrays.asList(datasets));
for (int e = 0; e < list.size(); e++) {
val dataset = list.get(e);
assertEquals(e, (int) dataset.getFeatures().getDouble(0));
assertEquals(e + 0.5, dataset.getLabels().getDouble(0), 1e-3);
}
}
}
|
chenqixu/jstorm
|
jstorm-core/src/main/java/com/alibaba/jstorm/client/WorkerAssignment.java
|
<reponame>chenqixu/jstorm<gh_stars>1000+
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.jstorm.client;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import org.json.simple.JSONAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.scheduler.WorkerSlot;
import backtype.storm.utils.Utils;
import com.alibaba.jstorm.utils.JStormUtils;
public class WorkerAssignment extends WorkerSlot implements Serializable, JSONAware {
private static final Logger LOG = LoggerFactory.getLogger(WorkerAssignment.class);
private static final long serialVersionUID = -3483047434535537861L;
private Map<String, Integer> componentToNum = new HashMap<>();
private long mem;
private int cpu;
private String hostName;
private String jvm;
private static final String COMPONENTTONUM_TAG = "componentToNum";
private static final String MEM_TAG = "mem";
private static final String CPU_TAG = "cpu";
private static final String HOSTNAME_TAG = "hostName";
private static final String JVM_TAG = "jvm";
private static final String NODEID_TAG = "nodeId";
private static final String PORT_TAG = "port";
public WorkerAssignment(String nodeId, Number port) {
super(nodeId, port);
}
public WorkerAssignment() {
}
public void addComponent(String componentName, Integer num) {
componentToNum.put(componentName, num);
}
public Map<String, Integer> getComponentToNum() {
return componentToNum;
}
public String getHostName() {
return hostName;
}
public void setHostName(String hostName) {
this.hostName = hostName;
}
public void setJvm(String jvm) {
this.jvm = jvm;
}
public String getJvm() {
return jvm;
}
public long getMem() {
return mem;
}
public void setMem(long mem) {
this.mem = mem;
}
public int getCpu() {
return cpu;
}
public void setCpu(int cpu) {
this.cpu = cpu;
}
@Override
public String toJSONString() {
Map<String, String> map = new HashMap<>();
map.put(COMPONENTTONUM_TAG, Utils.to_json(componentToNum));
map.put(MEM_TAG, String.valueOf(mem));
map.put(CPU_TAG, String.valueOf(cpu));
map.put(HOSTNAME_TAG, hostName);
map.put(JVM_TAG, jvm);
map.put(NODEID_TAG, getNodeId());
map.put(PORT_TAG, String.valueOf(getPort()));
return Utils.to_json(map);
}
public static WorkerAssignment parseFromObj(Object obj) {
if (obj == null) {
return null;
}
if (!(obj instanceof Map)) {
return null;
}
try {
Map<String, String> map = (Map<String, String>) obj;
String supervisorId = map.get(NODEID_TAG);
String hostname = map.get(HOSTNAME_TAG);
Integer port = JStormUtils.parseInt(map.get(PORT_TAG));
String jvm = map.get(JVM_TAG);
Long mem = JStormUtils.parseLong(map.get(MEM_TAG));
Integer cpu = JStormUtils.parseInt(map.get(CPU_TAG));
Map<String, Object> componentToNum = (Map<String, Object>) Utils.from_json(map.get(COMPONENTTONUM_TAG));
WorkerAssignment ret = new WorkerAssignment(supervisorId, port);
ret.hostName = hostname;
ret.setNodeId(supervisorId);
ret.setJvm(jvm);
if (port != null) {
ret.setPort(port);
}
if (mem != null) {
ret.setMem(mem);
}
if (cpu != null) {
ret.setCpu(cpu);
}
for (Entry<String, Object> entry : componentToNum.entrySet()) {
ret.addComponent(entry.getKey(), JStormUtils.parseInt(entry.getValue()));
}
return ret;
} catch (Exception e) {
LOG.error("Failed to convert to WorkerAssignment, raw:" + obj, e);
return null;
}
}
public static String getStringFromJson(String text) {
return text.equals("null") ? null : text;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((componentToNum == null) ? 0 : componentToNum.hashCode());
result = prime * result + cpu;
result = prime * result + ((hostName == null) ? 0 : hostName.hashCode());
result = prime * result + ((jvm == null) ? 0 : jvm.hashCode());
result = prime * result + (int) (mem ^ (mem >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
WorkerAssignment other = (WorkerAssignment) obj;
if (componentToNum == null) {
if (other.componentToNum != null)
return false;
} else if (!componentToNum.equals(other.componentToNum))
return false;
if (cpu != other.cpu)
return false;
if (hostName == null) {
if (other.hostName != null)
return false;
} else if (!hostName.equals(other.hostName))
return false;
if (jvm == null) {
if (other.jvm != null)
return false;
} else if (!jvm.equals(other.jvm))
return false;
if (mem != other.mem)
return false;
return true;
}
public static void main(String[] args) {
WorkerAssignment input = new WorkerAssignment();
input.setJvm("sb");
input.setCpu(1);
input.setMem(2);
input.addComponent("2b", 2);
String outString = Utils.to_json(input);
System.out.println(input);
// String outString =
// "[componentToNum={},mem=1610612736,cpu=1,hostName=mobilejstorm-60-1,jvm=<null>,nodeId=<null>,port=0]";
Object object = Utils.from_json(outString);
System.out.println(object);
System.out.println(parseFromObj(object));
System.out.print(input.equals(parseFromObj(object)));
}
}
|
develar/chromedevtools
|
wip/protocol-model/generated/org/jetbrains/wip/protocol/dom/SetAttributeValue.java
|
// Generated source
package org.jetbrains.wip.protocol.dom;
/**
* Sets attribute for an element with given id.
*/
public final class SetAttributeValue extends org.jetbrains.wip.protocol.WipRequest {
/**
* @param nodeId Id of the element to set attribute for.
* @param name Attribute name.
* @param value Attribute value.
*/
public SetAttributeValue(int nodeId, String name, String value) {
writeInt("nodeId", nodeId);
writeString("name", name);
writeString("value", value);
}
@Override
public String getMethodName() {
return "DOM.setAttributeValue";
}
}
|
drtrigon/sketchbook
|
hardware/AlhambraII/picorv32/cores/picorv32/Arduino.h
|
<reponame>drtrigon/sketchbook<filename>hardware/AlhambraII/picorv32/cores/picorv32/Arduino.h
#ifndef Arduino_h
#define Arduino_h
#include <stdint.h>
#include <stdbool.h>
// picorv32: work-a-round; for size_t only - original Arduino.h does NOT have this!
#include <cstddef>
/*#ifdef __cplusplus
extern "C"{
#endif
void yield(void);*/
#define reg_spictrl (*(volatile uint32_t*)0x02000000)
#define reg_uart_clkdiv (*(volatile uint32_t*)0x02000004)
#define reg_uart_data (*(volatile uint32_t*)0x02000008)
#define reg_outp (*(volatile uint32_t*)0x03000000) // contains reg_leds also
//#define reg_inp_zero (*(volatile uint32_t*)0x04000000)
#define reg_inp (*(volatile uint32_t*)0x05000000)
//#define reg_outp2 (*(volatile uint32_t*)0x06000000) // more output...
#define F_CPU 12000000 // 12MHz
#define clk_div_s (F_CPU) // 1s
#define clk_div_ms (F_CPU/1000) // 1ms
#define clk_div_us (F_CPU/1000000) // 1us
#define HIGH 0x1
#define LOW 0x0
#define INPUT 0x0
#define OUTPUT 0x1
#define INPUT_PULLUP 0x2
#define PI 3.1415926535897932384626433832795
#define HALF_PI 1.5707963267948966192313216916398
#define TWO_PI 6.283185307179586476925286766559
#define DEG_TO_RAD 0.017453292519943295769236907684886
#define RAD_TO_DEG 57.295779513082320876798154814105
#define EULER 2.718281828459045235360287471352
/*#define SERIAL 0x0
#define DISPLAY 0x1
#define LSBFIRST 0
#define MSBFIRST 1
#define CHANGE 1
#define FALLING 2
#define RISING 3
#if defined(__AVR_ATtiny24__) || defined(__AVR_ATtiny44__) || defined(__AVR_ATtiny84__)
#define DEFAULT 0
#define EXTERNAL 1
#define INTERNAL1V1 2
#define INTERNAL INTERNAL1V1
#elif defined(__AVR_ATtiny25__) || defined(__AVR_ATtiny45__) || defined(__AVR_ATtiny85__)
#define DEFAULT 0
#define EXTERNAL 4
#define INTERNAL1V1 8
#define INTERNAL INTERNAL1V1
#define INTERNAL2V56 9
#define INTERNAL2V56_EXTCAP 13
#else
#if defined(__AVR_ATmega1280__) || defined(__AVR_ATmega2560__) || defined(__AVR_ATmega1284__) || defined(__AVR_ATmega1284P__) || defined(__AVR_ATmega644__) || defined(__AVR_ATmega644A__) || defined(__AVR_ATmega644P__) || defined(__AVR_ATmega644PA__)
#define INTERNAL1V1 2
#define INTERNAL2V56 3
#else
#define INTERNAL 3
#endif
#define DEFAULT 1
#define EXTERNAL 0
#endif
// undefine stdlib's abs if encountered
#ifdef abs
#undef abs
#endif*/
#define min(a,b) ((a)<(b)?(a):(b))
#define max(a,b) ((a)>(b)?(a):(b))
#define abs(x) ((x)>0?(x):-(x))
#define constrain(amt,low,high) ((amt)<(low)?(low):((amt)>(high)?(high):(amt)))
#define round(x) ((x)>=0?(long)((x)+0.5):(long)((x)-0.5))
#define radians(deg) ((deg)*DEG_TO_RAD)
#define degrees(rad) ((rad)*RAD_TO_DEG)
#define sq(x) ((x)*(x))
/*#define interrupts() sei()
#define noInterrupts() cli()*/
#define clockCyclesPerMicrosecond() ( F_CPU / 1000000L )
/*#define clockCyclesToMicroseconds(a) ( (a) / clockCyclesPerMicrosecond() )
#define microsecondsToClockCycles(a) ( (a) * clockCyclesPerMicrosecond() )
#define lowByte(w) ((uint8_t) ((w) & 0xff))
#define highByte(w) ((uint8_t) ((w) >> 8))
#define bitRead(value, bit) (((value) >> (bit)) & 0x01)
#define bitSet(value, bit) ((value) |= (1UL << (bit)))
#define bitClear(value, bit) ((value) &= ~(1UL << (bit)))
#define bitWrite(value, bit, bitvalue) (bitvalue ? bitSet(value, bit) : bitClear(value, bit))
// avr-libc defines _NOP() since 1.6.2
#ifndef _NOP
#define _NOP() do { __asm__ volatile ("nop"); } while (0)
#endif
typedef unsigned int word;
#define bit(b) (1UL << (b))
typedef bool boolean;
typedef uint8_t byte;
void init(void);
void initVariant(void);
int atexit(void (*func)()) __attribute__((weak));*/
typedef uint8_t byte;
void pinMode(uint8_t, uint8_t);
void digitalWrite(uint8_t, uint8_t);
int digitalRead(uint8_t);
int analogRead(uint8_t);
//void analogReference(uint8_t mode);
void analogWrite(uint8_t, int);
unsigned long millis(void);
unsigned long micros(void);
void delay(unsigned long);
/*inline void delayMicroseconds(unsigned int us) // picorv32 work-a-round; inline for speedup, still slow
{
// delays below 66 and 113 microseconds are not possible and the resolution
// is about 16-21 microseconds. the type and value of us passed may add cycles too.
#if F_CPU >= 12000000L
// numbers here are empiric estimates
if (us<=92) return;
us /= 16;
us -= 4; // important to have us >= 1 for next step (asm loop)
// busy wait (1x is 256 cycles, every further iteration is +197 cycles -> steps of ~16-21 us)
__asm__ __volatile__ (
"1: addi %0,%0,-1" "\n\t" // ? cycles
"bnez %0,1b" : "=r" (us) : "r" (us) // ? cycles
);
#else
#warning "delayMicroseconds supports 12MHz clock only"
#endif
// busy wait
// ...
return;
}*/
/*unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout);
void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val);
uint8_t shiftIn(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder);
void attachInterrupt(uint8_t, void (*)(void), int mode);
void detachInterrupt(uint8_t);*/
//#define ASSERT_NOT_IMPLEMENTED // picorv32 work-a-round
//#define HALT_ON_ASSERT // picorv32 work-a-round - strict; not compatible with some examples
#if defined(ASSERT_NOT_IMPLEMENTED)
#warning ASSERT_NOT_IMPLEMENTED enabled. Check serial output for assertions. Baudrate is the one choosen or 115200 per default.
void assert(const bool, const char *); // picorv32 work-a-round
#endif
void setup(void);
void loop(void);
/*// Get the bit location within the hardware port of the given virtual pin.
// This comes from the pins_*.c file for the active board configuration.
#define analogInPinToBit(P) (P)
// On the ATmega1280, the addresses of some of the port registers are
// greater than 255, so we can't store them in uint8_t's.
extern const uint16_t PROGMEM port_to_mode_PGM[];
extern const uint16_t PROGMEM port_to_input_PGM[];
extern const uint16_t PROGMEM port_to_output_PGM[];
extern const uint8_t PROGMEM digital_pin_to_port_PGM[];
// extern const uint8_t PROGMEM digital_pin_to_bit_PGM[];
extern const uint8_t PROGMEM digital_pin_to_bit_mask_PGM[];
extern const uint8_t PROGMEM digital_pin_to_timer_PGM[];
// Get the bit location within the hardware port of the given virtual pin.
// This comes from the pins_*.c file for the active board configuration.
//
// These perform slightly better as macros compared to inline functions
//
#define digitalPinToPort(P) ( pgm_read_byte( digital_pin_to_port_PGM + (P) ) )
#define digitalPinToBitMask(P) ( pgm_read_byte( digital_pin_to_bit_mask_PGM + (P) ) )
#define digitalPinToTimer(P) ( pgm_read_byte( digital_pin_to_timer_PGM + (P) ) )
#define analogInPinToBit(P) (P)
#define portOutputRegister(P) ( (volatile uint8_t *)( pgm_read_word( port_to_output_PGM + (P))) )
#define portInputRegister(P) ( (volatile uint8_t *)( pgm_read_word( port_to_input_PGM + (P))) )
#define portModeRegister(P) ( (volatile uint8_t *)( pgm_read_word( port_to_mode_PGM + (P))) )
#define NOT_A_PIN 0
#define NOT_A_PORT 0
#define NOT_AN_INTERRUPT -1
#ifdef ARDUINO_MAIN
#define PA 1
#define PB 2
#define PC 3
#define PD 4
#define PE 5
#define PF 6
#define PG 7
#define PH 8
#define PJ 10
#define PK 11
#define PL 12
#endif
#define NOT_ON_TIMER 0
#define TIMER0A 1
#define TIMER0B 2
#define TIMER1A 3
#define TIMER1B 4
#define TIMER1C 5
#define TIMER2 6
#define TIMER2A 7
#define TIMER2B 8
#define TIMER3A 9
#define TIMER3B 10
#define TIMER3C 11
#define TIMER4A 12
#define TIMER4B 13
#define TIMER4C 14
#define TIMER4D 15
#define TIMER5A 16
#define TIMER5B 17
#define TIMER5C 18
#ifdef __cplusplus
} // extern "C"
#endif*/
#ifdef __cplusplus
//#include "WCharacter.h"
//#include "WString.h"
#include "HardwareSerial.h"
//#include "USBAPI.h"
#if defined(HAVE_HWSERIAL0) && defined(HAVE_CDCSERIAL)
#error "Targets with both UART0 and CDC serial not supported"
#endif
/*uint16_t makeWord(uint16_t w);
uint16_t makeWord(byte h, byte l);
#define word(...) makeWord(__VA_ARGS__)
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
void tone(uint8_t _pin, unsigned int frequency, unsigned long duration = 0);
void noTone(uint8_t _pin);
// WMath prototypes
long random(long);
long random(long, long);
void randomSeed(unsigned long);*/
long map(long, long, long, long, long);
#endif
#include "pins_arduino.h"
#endif
|
react-vancouver/website
|
src/components/constructs/EventCard/EventCard.styles.js
|
<filename>src/components/constructs/EventCard/EventCard.styles.js
import S from '@symbols';
const { calcSpace } = S;
import { area, card } from '@blocks/interactive';
const base = {
position: 'relative',
};
export const rootStyles = () => [base, area, card];
export const headerStyles = {
height: calcSpace(21),
backgroundColor: S.COLOR_THEME.SECONDARY_D,
borderRadius: `${S.LINE_BORDER_RADIUS_4} ${S.LINE_BORDER_RADIUS_4} 0 0 `,
};
export const textStyles = {
width: '34rem',
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
};
export const footerStyles = {
height: calcSpace(15),
overflow: 'hidden',
};
export const speakersStyles = {
position: 'absolute',
top: calcSpace(21 - 2.5), // Height of the header + 1/2 of the avatar height
width: '100%',
display: 'flex',
justifyContent: 'flex-end',
};
|
sguazt/olio
|
webapp/rails/trunk/vendor/plugins/rspec/spec/spec/story/runner/plain_text_story_runner_spec.rb
|
<gh_stars>1-10
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.dirname(__FILE__) + '/../story_helper'
module Spec
module Story
module Runner
describe PlainTextStoryRunner do
before(:each) do
StoryParser.stub!(:new).and_return(@parser = mock("parser"))
@parser.stub!(:parse).and_return([])
File.stub!(:read).with("path").and_return("this\nand that")
end
it "should provide access to steps" do
runner = PlainTextStoryRunner.new("path")
runner.steps do |add|
add.given("baz") {}
end
runner.steps.find(:given, "baz").should_not be_nil
end
it "should parse a story file" do
runner = PlainTextStoryRunner.new("path")
@parser.should_receive(:parse).with(["this", "and that"])
runner.run(mock('runner'))
end
it "should build up a mediator with its own steps and the singleton story_runner" do
@story_runner = mock('story runner', :null_object => true)
runner = PlainTextStoryRunner.new("path")
Spec::Story::Runner::StoryMediator.should_receive(:new).with(
runner.steps, @story_runner, {}
).and_return(mediator = stub("mediator", :run_stories => nil))
runner.run(@story_runner)
end
it "should build up a parser with the mediator" do
runner = PlainTextStoryRunner.new("path")
Spec::Story::Runner::StoryMediator.should_receive(:new).and_return(mediator = stub("mediator", :run_stories => nil))
Spec::Story::Runner::StoryParser.should_receive(:new).with(mediator).and_return(@parser)
runner.run(stub("story_runner"))
end
it "should tell the mediator to run the stories" do
runner = PlainTextStoryRunner.new("path")
mediator = mock("mediator")
Spec::Story::Runner::StoryMediator.should_receive(:new).and_return(mediator)
mediator.should_receive(:run_stories)
runner.run(mock('runner'))
end
it "should accept a block instead of a path" do
runner = PlainTextStoryRunner.new do |runner|
runner.load("path/to/story")
end
File.should_receive(:read).with("path/to/story").and_return("this\nand that")
runner.run(mock('runner'))
end
it "should tell you if you try to run with no path set" do
runner = PlainTextStoryRunner.new
lambda {
runner.run(mock('runner'))
}.should raise_error(RuntimeError, "You must set a path to the file with the story. See the RDoc.")
end
it "should pass options to the mediator" do
runner = PlainTextStoryRunner.new("path", :foo => :bar)
Spec::Story::Runner::StoryMediator.should_receive(:new).
with(anything, anything, :foo => :bar).
and_return(mediator = stub("mediator", :run_stories => nil))
runner.run(mock('runner'))
end
it "should provide access to its options" do
runner = PlainTextStoryRunner.new("path")
runner[:foo] = :bar
Spec::Story::Runner::StoryMediator.should_receive(:new).
with(anything, anything, :foo => :bar).
and_return(mediator = stub("mediator", :run_stories => nil))
runner.run mock('runner')
end
end
end
end
end
|
didim99/TSTU-new
|
app/src/main/java/ru/didim99/tstu/core/itheory/compression/HuffmanCompressor.java
|
<gh_stars>0
package ru.didim99.tstu.core.itheory.compression;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
import ru.didim99.tstu.core.itheory.compression.utils.BitStream;
import ru.didim99.tstu.core.itheory.compression.utils.HuffmanCharTable;
import ru.didim99.tstu.core.itheory.compression.utils.HuffmanTreeEntry;
/**
* Created by didim99 on 28.02.20.
*/
public class HuffmanCompressor extends Compressor {
private static final byte[] HEADER = {0x48, 0x46, 0x4D};
@Override
public byte[] compress(String data) throws IOException {
HuffmanCharTable table = new HuffmanCharTable(getFrequency(data));
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
BitStream bitStream = new BitStream(buffer, null);
DataOutputStream stream = new DataOutputStream(buffer);
StringBuilder infoBuilder = new StringBuilder();
StringBuilder msgBuilder = new StringBuilder();
buffer.write(HEADER);
table.serializeTo(stream);
stream.writeInt(data.length());
for (char c : data.toCharArray()) {
msgBuilder.append(table.getCodeStr(c)).append(' ');
for (Boolean b : table.getCode(c))
bitStream.pushBit(b ? 1 : 0);
}
bitStream.flush();
int compSize = bitStream.size();
int compSizeTree = buffer.size();
describe(infoBuilder, data, compSize, compSizeTree, table);
compressed = msgBuilder.toString().trim();
info = infoBuilder.toString().trim();
return buffer.toByteArray();
}
@Override
public String decompress(byte[] data) throws IOException {
ByteArrayInputStream buffer = new ByteArrayInputStream(data);
BitStream bitStream = new BitStream(buffer, null);
DataInputStream stream = new DataInputStream(buffer);
StringBuilder infoBuilder = new StringBuilder();
StringBuilder msgBuilder = new StringBuilder();
StringBuilder outBuilder = new StringBuilder();
int compSizeTree = data.length;
byte[] header = new byte[HEADER.length];
if (buffer.read(header) < HEADER.length)
throw new IOException("Unexpected end of file");
if (!Arrays.equals(header, HEADER))
throw new IOException("Invalid file header");
HuffmanCharTable table = new HuffmanCharTable(stream);
int totalLength = stream.readInt();
int compSize = buffer.available();
HuffmanTreeEntry entry = table.getRoot();
while (totalLength > 0) {
if (entry.isLeaf()) {
char c = entry.getCharacter();
msgBuilder.append(table.getCodeStr(c)).append(' ');
outBuilder.append(c);
entry = table.getRoot();
totalLength--;
}
entry = entry.getChild(bitStream.pullBit());
}
String message = outBuilder.toString();
describe(infoBuilder, message, compSize, compSizeTree, table);
compressed = msgBuilder.toString().trim();
info = infoBuilder.toString().trim();
return message;
}
}
|
muhdharun/Main
|
test/java/seedu/addressbook/logic/LogicTest.java
|
<gh_stars>0
package seedu.addressbook.logic;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import seedu.addressbook.commands.*;
import seedu.addressbook.common.Messages;
import seedu.addressbook.data.AddressBook;
import seedu.addressbook.data.person.*;
import seedu.addressbook.password.Password;
import seedu.addressbook.storage.StorageFile;
import java.util.*;
import java.util.concurrent.ThreadLocalRandom;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertFalse;
import static seedu.addressbook.common.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.addressbook.common.Messages.MESSAGE_TIMESTAMPS_LISTED_OVERVIEW;
import static seedu.addressbook.password.Password.MESSAGE_ENTER_PASSWORD;
import static seedu.addressbook.password.Password.unlockHQP;
import static seedu.addressbook.password.Password.unlockPO;
public class LogicTest {
/**
* See https://github.com/junit-team/junit4/wiki/rules#temporaryfolder-rule
*/
@Rule
public TemporaryFolder saveFolder = new TemporaryFolder();
private StorageFile saveFile;
private AddressBook addressBook;
private Logic logic;
@Before
public void setup() throws Exception {
saveFile = new StorageFile(saveFolder.newFile("testSaveFile.txt").getPath());
addressBook = new AddressBook();
saveFile.save(addressBook);
logic = new Logic(saveFile, addressBook);
}
@Test
public void constructor() {
//Constructor is called in the setup() method which executes before every test, no need to call it here again.
//Confirm the last shown list is empty
assertEquals(Collections.emptyList(), logic.getLastShownList());
}
@Test
public void execute_invalid() throws Exception {
String invalidCommand = " ";
assertCommandBehavior(invalidCommand,
String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
/**
* Executes the command and confirms that the result message is correct.
* Both the 'address book' and the 'last shown list' are expected to be empty.
* @see #assertCommandBehavior(String, String, AddressBook, boolean, List)
*/
private void assertCommandBehavior(String inputCommand, String expectedMessage) throws Exception {
assertCommandBehavior(inputCommand, expectedMessage, AddressBook.empty(),false, Collections.emptyList());
}
/**
* Executes the command and confirms that the result message is correct and
* also confirms that the following three parts of the Logic object's state are as expected:<br>
* - the internal address book data are same as those in the {@code expectedAddressBook} <br>
* - the internal 'last shown list' matches the {@code expectedLastList} <br>
* - the storage file content matches data in {@code expectedAddressBook} <br>
*/
private void assertCommandBehavior(String inputCommand,
String expectedMessage,
AddressBook expectedAddressBook,
boolean isRelevantPersonsExpected,
List<? extends ReadOnlyPerson> lastShownList) throws Exception {
//Execute the command
CommandResult r = logic.execute(inputCommand);
//Confirm the result contains the right data
assertEquals(expectedMessage, r.feedbackToUser);
assertEquals(r.getRelevantPersons().isPresent(), isRelevantPersonsExpected);
if(isRelevantPersonsExpected){
assertEquals(lastShownList, r.getRelevantPersons().get());
}
//Confirm the state of data is as expected
assertEquals(expectedAddressBook, addressBook);
assertEquals(lastShownList, logic.getLastShownList());
assertEquals(addressBook, saveFile.load());
}
//@@author iamputradanish
@Test
public void execute_unknownCommandWord_forHQP() throws Exception {
String unknownCommand = "uicfhmowqewca";
Password.unlockHQP();
assertCommandBehavior(unknownCommand, HelpCommand.MESSAGE_ALL_USAGES);
Password.lockIsHQP();
}
@Test
public void execute_help_forHQP() throws Exception {
Password.unlockHQP();
assertCommandBehavior("help", HelpCommand.MESSAGE_ALL_USAGES);
Password.lockIsHQP();
}
@Test
public void execute_unknownCommandWord_forPO() throws Exception {
String unknownCommand = "uicfhmowqewca";
unlockPO();
assertCommandBehavior(unknownCommand, HelpCommand.MESSAGE_PO_USAGES);
Password.lockIsPO();
}
@Test
public void execute_help_forPO() throws Exception {
unlockPO();
assertCommandBehavior("help", HelpCommand.MESSAGE_PO_USAGES);
Password.lockIsPO();
}
//@@author
@Test
public void execute_exit() throws Exception {
assertCommandBehavior("shutdown", ExitCommand.MESSAGE_EXIT_ACKNOWEDGEMENT);
}
@Test
public void execute_clear() throws Exception {
TestDataHelper helper = new TestDataHelper();
addressBook.addPerson(helper.generatePerson(1));
addressBook.addPerson(helper.generatePerson(2));
addressBook.addPerson(helper.generatePerson(3));
assertCommandBehavior("clear", ClearCommand.MESSAGE_SUCCESS, AddressBook.empty(), false, Collections.emptyList());
}
//@@author muhdharun -reused
@Test
public void execute_add_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE);
assertCommandBehavior(
"add wrong args wrong args wrong args", expectedMessage);
assertCommandBehavior(
"add Valid Name s1234567a d/1980 p/123456 s/clear w/none", expectedMessage);
assertCommandBehavior(
"add Valid Name n/s1234567a 1980 p/123456 s/clear w/none", expectedMessage);
assertCommandBehavior(
"add Valid Name n/s1234567a d/1980 123456 s/clear w/none", expectedMessage);
assertCommandBehavior(
"add Valid Name n/s1234567a d/1980 p/123456 clear w/none", expectedMessage);
assertCommandBehavior(
"add Valid Name n/s1234567a d/1980 123456 s/clear none", expectedMessage);
}
@Test
public void execute_add_invalidPersonData() throws Exception {
assertCommandBehavior(
"add []\\[;] n/s1234567a d/1980 p/123456 s/clear w/none", Name.MESSAGE_NAME_CONSTRAINTS);
assertCommandBehavior(
"add Valid Name n/s123457a d/1980 p/123456 s/clear w/none", NRIC.MESSAGE_NAME_CONSTRAINTS);
assertCommandBehavior(
"add Valid Name n/s1234567a d/188 p/123456 s/clear w/none", DateOfBirth.MESSAGE_DATE_OF_BIRTH_CONSTRAINTS);
assertCommandBehavior(
"add Valid Name n/s1234567a d/1980 p/13456 s/clear w/none", PostalCode.MESSAGE_NAME_CONSTRAINTS);
assertCommandBehavior(
"add Valid Name n/s1234567a d/1980 p/123456 s/xc w/none o/rob", Offense.MESSAGE_OFFENSE_INVALID);
assertCommandBehavior(
"add Valid Name n/s1234567a d/1980 p/123456 s/wanted w/none o/none", Person.WANTED_FOR_WARNING);
}
//@@author
@Test
public void execute_add_successful() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Person toBeAdded = helper.adam();
AddressBook expectedAB = new AddressBook();
expectedAB.addPerson(toBeAdded);
// execute command and verify result
assertCommandBehavior(helper.generateAddCommand(toBeAdded),
String.format(AddCommand.MESSAGE_SUCCESS, toBeAdded),
expectedAB,
false,
Collections.emptyList());
}
@Test
public void execute_addDuplicate_notAllowed() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Person toBeAdded = helper.adam();
AddressBook expectedAB = new AddressBook();
expectedAB.addPerson(toBeAdded);
// setup starting state
addressBook.addPerson(toBeAdded); // person already in internal address book
// execute command and verify result
assertCommandBehavior(
helper.generateAddCommand(toBeAdded),
AddCommand.MESSAGE_DUPLICATE_PERSON,
expectedAB,
false,
Collections.emptyList());
}
@Test
public void execute_list_showsAllPersons() throws Exception {
// prepare expectations
TestDataHelper helper = new TestDataHelper();
AddressBook expectedAB = helper.generateAddressBook(false, false);
List<? extends ReadOnlyPerson> expectedList = expectedAB.getAllPersons().immutableListView();
// prepare address book state
helper.addToAddressBook(addressBook, false, false);
assertCommandBehavior("list",
Command.getMessageForPersonListShownSummary(expectedList),
expectedAB,
true,
expectedList);
}
/*
@Test
public void execute_view_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, ViewAllCommand.MESSAGE_USAGE);
assertCommandBehavior("view ", expectedMessage);
assertCommandBehavior("view arg not number", expectedMessage);
}
*/
/*@Test
public void execute_view_invalidIndex() throws Exception {
assertInvalidIndexBehaviorForCommand("view");
}*/
/**
* Confirms the 'invalid argument index number behaviour' for the given command
* targeting a single person in the last shown list, using visible index.
* @param commandWord to test assuming it targets a single person in the last shown list based on visible index.
*/
private void assertInvalidIndexBehaviorForCommand(String commandWord) throws Exception {
String expectedMessage = Messages.MESSAGE_INVALID_PERSON_DISPLAYED_INDEX;
TestDataHelper helper = new TestDataHelper();
List<Person> lastShownList = helper.generatePersonList(false, true);
logic.setLastShownList(lastShownList);
assertCommandBehavior(commandWord + " -1", expectedMessage, AddressBook.empty(), false, lastShownList);
assertCommandBehavior(commandWord + " 0", expectedMessage, AddressBook.empty(), false, lastShownList);
assertCommandBehavior(commandWord + " 3", expectedMessage, AddressBook.empty(), false, lastShownList);
}
private void assertInvalidCommandFormatBehaviorForCommand(String commandWord) throws Exception {
String expectedMessage = NRIC.MESSAGE_NAME_CONSTRAINTS;
TestDataHelper helper = new TestDataHelper();
List<Person> lastShownList = helper.generatePersonList(false, true);
logic.setLastShownList(lastShownList);
assertCommandBehavior(commandWord + " -1", expectedMessage, AddressBook.empty(), false, lastShownList);
assertCommandBehavior(commandWord + " 0", expectedMessage, AddressBook.empty(), false, lastShownList);
assertCommandBehavior(commandWord + " 3", expectedMessage, AddressBook.empty(), false, lastShownList);
}
//@Test
/*
public void execute_view_onlyShowsNonPrivate() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person p1 = helper.generatePerson(1);
Person p2 = helper.generatePerson(2);
List<Person> lastShownList = helper.generatePersonList(p1, p2);
AddressBook expectedAB = helper.generateAddressBook(lastShownList);
helper.addToAddressBook(addressBook, lastShownList);
logic.setLastShownList(lastShownList);
assertCommandBehavior("view 1",
String.format(ViewCommand.MESSAGE_VIEW_PERSON_DETAILS, p1.getAsTextHidePrivate()),
expectedAB,
false,
lastShownList);
assertCommandBehavior("view 2",
String.format(ViewCommand.MESSAGE_VIEW_PERSON_DETAILS, p2.getAsTextHidePrivate()),
expectedAB,
false,
lastShownList);
}
*/
/*
@Test
public void execute_tryToViewMissingPerson_errorMessage() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person p1 = helper.generatePerson(1);
Person p2 = helper.generatePerson(2);
List<Person> lastShownList = helper.generatePersonList(p1, p2);
AddressBook expectedAB = new AddressBook();
expectedAB.addPerson(p2);
addressBook.addPerson(p2);
logic.setLastShownList(lastShownList);
assertCommandBehavior("view 1",
Messages.MESSAGE_PERSON_NOT_IN_ADDRESSBOOK,
expectedAB,
false,
lastShownList);
}
*/
@Test
public void execute_request_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, RequestHelp.MESSAGE_USAGE);
assertCommandBehavior("request ", expectedMessage);
assertCommandBehavior("request gun", expectedMessage);
}
@Test
public void execute_request_invalidOffense() throws Exception {
String expectedMessage = String.format(Offense.MESSAGE_OFFENSE_INVALID);
assertCommandBehavior("request bobo help", expectedMessage);
assertCommandBehavior("request lala help", expectedMessage);
}
@Test
public void execute_viewAll_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, ViewAllCommand.MESSAGE_USAGE);
assertCommandBehavior("viewall ", expectedMessage);
assertCommandBehavior("viewall arg not number", expectedMessage);
}
@Test
public void execute_viewAll_invalidIndex() throws Exception {
assertInvalidIndexBehaviorForCommand("viewall");
}
@Test
public void execute_viewAll_alsoShowsPrivate() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person p1 = helper.generatePerson(1);
Person p2 = helper.generatePerson(2);
List<Person> lastShownList = helper.generatePersonList(p1, p2);
AddressBook expectedAB = helper.generateAddressBook(lastShownList);
helper.addToAddressBook(addressBook, lastShownList);
logic.setLastShownList(lastShownList);
assertCommandBehavior("viewall 1",
String.format(ViewAllCommand.MESSAGE_VIEW_PERSON_DETAILS, p1.getAsTextShowAll()),
expectedAB,
false,
lastShownList);
assertCommandBehavior("viewall 2",
String.format(ViewAllCommand.MESSAGE_VIEW_PERSON_DETAILS, p2.getAsTextShowAll()),
expectedAB,
false,
lastShownList);
}
@Test
public void execute_tryToViewAllPersonMissingInAddressBook_errorMessage() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person p1 = helper.generatePerson(1);
Person p2 = helper.generatePerson(2);
List<Person> lastShownList = helper.generatePersonList(p1, p2);
AddressBook expectedAB = new AddressBook();
expectedAB.addPerson(p1);
addressBook.addPerson(p1);
logic.setLastShownList(lastShownList);
assertCommandBehavior("viewall 2",
Messages.MESSAGE_PERSON_NOT_IN_ADDRESSBOOK,
expectedAB,
false,
lastShownList);
}
/*@Test
public void execute_delete_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE);
assertCommandBehavior("delete ", expectedMessage);
assertCommandBehavior("delete arg not number", expectedMessage);
}*/
@Test
public void execute_delete_invalidCommandFormat() throws Exception {
assertInvalidCommandFormatBehaviorForCommand("delete");
}
//@@author muhdharun -reused
@Test
public void execute_delete_removesCorrectPerson() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person p1 = helper.generatePerson(1);
Person p2 = helper.generatePerson(2);
Person p3 = helper.generatePerson(3);
List<Person> threePersons = helper.generatePersonList(p1, p2, p3);
AddressBook expectedAB = helper.generateAddressBook(threePersons);
expectedAB.removePerson(p2);
helper.addToAddressBook(addressBook, threePersons);
logic.setLastShownList(threePersons);
assertCommandBehavior("delete g9999992t",
String.format(DeleteCommand.MESSAGE_DELETE_PERSON_SUCCESS, p2),
expectedAB,
false,
threePersons);
}
@Test
public void execute_delete_missingInAddressBook() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person p1 = helper.generatePerson(1);
Person p2 = helper.generatePerson(2);
Person p3 = helper.generatePerson(3);
List<Person> threePersons = helper.generatePersonList(p1, p2, p3);
AddressBook expectedAB = helper.generateAddressBook(threePersons);
expectedAB.removePerson(p2);
helper.addToAddressBook(addressBook, threePersons);
addressBook.removePerson(p2);
logic.setLastShownList(threePersons);
assertCommandBehavior("delete g9999992t",
Messages.MESSAGE_PERSON_NOT_IN_ADDRESSBOOK,
expectedAB,
false,
threePersons);
}
//@@author
@Test
public void execute_edit_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE);
assertCommandBehavior("edit ", expectedMessage);
}
//@@author muhdharun -reused
@Test
public void execute_find_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE);
assertCommandBehavior("find S1234567A", expectedMessage);
}
@Test
public void execute_find_onlyMatchesFullNric() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person pTarget1 = helper.generatePersonWithNric("s1234567a");
Person pTarget2 = helper.generatePersonWithNric("s1234567b");
Person p1 = helper.generatePersonWithNric("s1234567c");
Person p2 = helper.generatePersonWithNric("s1234567d");
List<Person> fourPersons = helper.generatePersonList(p1, pTarget1, p2, pTarget2);
Person expectedPerson = pTarget2;
helper.addToAddressBook(addressBook, fourPersons);
String inputCommand = "find " + pTarget2.getNric().getIdentificationNumber();
CommandResult r = logic.execute(inputCommand);
assertEquals(Command.getMessageForPersonShownSummary(expectedPerson), r.feedbackToUser);
}
@Test
public void execute_find_isCaseSensitive() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person pTarget1 = helper.generatePersonWithNric("s1234567b");
Person pTarget2 = helper.generatePersonWithNric("s1234567c");
Person p1 = helper.generatePersonWithNric("s1234567d");
Person p2 = helper.generatePersonWithNric("s1234567e");
List<Person> fourPersons = helper.generatePersonList(p1, pTarget1, p2, pTarget2);
AddressBook expectedAB = helper.generateAddressBook(fourPersons);
List<Person> expectedList = helper.generatePersonList(pTarget1, pTarget2);
helper.addToAddressBook(addressBook, fourPersons);
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE);
CommandResult r = logic.execute("find " + "S1234567B");
assertEquals(expectedMessage,r.feedbackToUser);
}
//@@author muhdharun
@Test
public void execute_check_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, CheckCommand.MESSAGE_USAGE);
assertCommandBehavior("check S1234567A", expectedMessage);
assertCommandBehavior("check s12345a", expectedMessage);
assertCommandBehavior("check ", expectedMessage);
}
@Test
public void execute_check_validNric() throws Exception {
TestDataHelper helper = new TestDataHelper();
Person toBeAdded = helper.generateDummyPerson();
String nric = toBeAdded.getNric().getIdentificationNumber();
logic.execute(helper.generateAddCommand(toBeAdded));
CommandResult r = logic.execute("check " + nric);
String message = r.feedbackToUser.trim();
String expectedMessage = String.format(MESSAGE_TIMESTAMPS_LISTED_OVERVIEW,nric,0);
assertEquals(expectedMessage,message);
logic.execute("delete " + nric);
}
//@@author
// @Test
// public void execute_autocorrect_command() throws Exception {
// CommandResult r =
//
// }
//@@author iamputradanish
@Test
public void execute_unlockHQP() throws Exception {
String result = Password.unlockDevice("papa123",5);
assertEquals(String.format(Password.MESSAGE_WELCOME , Password.MESSAGE_HQP)
+ "\n" + Password.MESSAGE_ENTER_COMMAND , result);
Password.lockIsHQP();
}
@Test
public void execute_unlockPO() throws Exception {
String result = Password.unlockDevice("popo1",5);
assertEquals(String.format(Password.MESSAGE_WELCOME , Password.MESSAGE_PO + Password.MESSAGE_ONE)
+ "\n" + Password.MESSAGE_UNAUTHORIZED
+ "\n" + Password.MESSAGE_ENTER_COMMAND , result);
Password.lockIsPO();
}
@Test
public void execute_wrongPassword_firstTime() throws Exception{
Password.lockIsPO();
Password.lockIsHQP();
String wrongPassword = "<PASSWORD>";
int numberOfAttemptsLeft = 5;
Password.setWrongPasswordCounter(numberOfAttemptsLeft);
String result = Password.unlockDevice(wrongPassword, numberOfAttemptsLeft);
assertEquals(Password.MESSAGE_INCORRECT_PASSWORD
+ "\n" + String.format(Password.MESSAGE_ATTEMPTS_LEFT, numberOfAttemptsLeft)
+ "\n" + MESSAGE_ENTER_PASSWORD,result);
Password.setWrongPasswordCounter(5);
}
@Test
public void execute_wrongPassword_fourthTime() throws Exception{
Password.lockIsPO();
Password.lockIsHQP();
String wrongPassword = "<PASSWORD>";
int numberOfAttemptsLeft = 1;
Password.setWrongPasswordCounter(numberOfAttemptsLeft);
String result = Password.unlockDevice(wrongPassword, numberOfAttemptsLeft);
assertEquals(Password.MESSAGE_INCORRECT_PASSWORD
+ "\n" + String.format(Password.MESSAGE_ATTEMPT_LEFT, numberOfAttemptsLeft)
+ "\n" + Password.MESSAGE_SHUTDOWN_WARNING,result);
}
@Test
public void execute_wrongPassword_lastTime() throws Exception{
Password.lockIsPO();
Password.lockIsHQP();
String wrongPassword = "<PASSWORD>";
int numberOfAttemptsLeft = 0;
Password.setWrongPasswordCounter(numberOfAttemptsLeft);
String result = Password.unlockDevice(wrongPassword, numberOfAttemptsLeft);
assertEquals(Password.MESSAGE_SHUTDOWN, result);
}
@Test
public void execute_setWrongPasswordCounter_toPositiveNumber() {
int randomNumber = ThreadLocalRandom.current().nextInt(0, 6);
Password.setWrongPasswordCounter(randomNumber);
assertEquals(randomNumber, Password.getWrongPasswordCounter());
}
@Test
public void execute_setWrongPasswordCounter_toNegativeNumber() {
int randomNumber = ThreadLocalRandom.current().nextInt(-6, 0);
Password.setWrongPasswordCounter(randomNumber);
int result = Password.getWrongPasswordCounter();
assertEquals(0, result);
}
@Test
public void execute_unlockHQPUser(){
unlockHQP();
boolean result = Password.isHQPUser();
assertTrue(result);
}
@Test
public void execute_unlockPOUser(){
unlockPO();
boolean result = Password.isPO();
assertTrue(result);
}
@Test
public void execute_lockHQPUser(){
Password.lockIsHQP();
boolean result = Password.isHQPUser();
assertFalse(result);
}
@Test
public void execute_lockPOUser(){
Password.lockIsPO();
boolean result = Password.isPO();
assertFalse(result);
}
//@@author
/**
* A utility class to generate test data.
*/
class TestDataHelper{
//@@author muhdharun -reused
Person adam() throws Exception {
Name name = new Name("<NAME>");
NRIC nric = new NRIC("f1234567j");
DateOfBirth dateOfBirth = new DateOfBirth("1900");
PostalCode postalCode = new PostalCode("444444");
Status status = new Status("xc");
Offense wantedFor = new Offense();
Offense tag1 = new Offense("drugs");
Offense tag2 = new Offense("riot");
Set<Offense> tags = new HashSet<>(Arrays.asList(tag1, tag2));
return new Person(name, nric, dateOfBirth, postalCode, status, wantedFor, tags);
}
/**
* Generates a valid person using the given seed.
* Running this function with the same parameter values guarantees the returned person will have the same state.
* Each unique seed will generate a unique Person object.
*
* @param seed used to generate the person data field values
*
*/
Person generatePerson(int seed) throws Exception {
return new Person(
new Name("Person " + seed),
new NRIC("g999999" + Math.abs(seed) + "t"),
new DateOfBirth(Integer.toString(seed + Integer.parseInt("1901"))),
new PostalCode("77777" + seed),
new Status("xc"),
new Offense(),
new HashSet<>(Arrays.asList(new Offense("theft" + Math.abs(seed)), new Offense("theft" + Math.abs(seed + 1))))
);
}
//@@author muhdharun
Person generateDummyPerson() throws Exception {
return new Person(
new Name("Not a human"),
new NRIC("f0000000z"),
new DateOfBirth("1900"),
new PostalCode("777777"),
new Status("xc"),
new Offense(),
new HashSet<>(Arrays.asList(new Offense("theft")))
);
}
//@@author muhdharun -reused
/** Generates the correct add command based on the person given */
String generateAddCommand(Person p) {
StringJoiner cmd = new StringJoiner(" ");
cmd.add("add");
cmd.add(p.getName().toString());
cmd.add("n/" + p.getNric());
cmd.add("d/" + p.getDateOfBirth().getDOB());
cmd.add("p/" + p.getPostalCode());
cmd.add("s/" + p.getStatus());
cmd.add("w/" + p.getWantedFor().getOffense());
Set<Offense> tags = p.getPastOffenses();
for(Offense t: tags){
cmd.add("o/" + t.getOffense());
}
return cmd.toString();
}
//@@author
/**
* Generates an AddressBook with auto-generated persons.
* @param isPrivateStatuses flags to indicate if all contact details of respective persons should be set to
* private.
*/
AddressBook generateAddressBook(Boolean... isPrivateStatuses) throws Exception{
AddressBook addressBook = new AddressBook();
addToAddressBook(addressBook, isPrivateStatuses);
return addressBook;
}
/**
* Generates an AddressBook based on the list of Persons given.
*/
AddressBook generateAddressBook(List<Person> persons) throws Exception{
AddressBook addressBook = new AddressBook();
addToAddressBook(addressBook, persons);
return addressBook;
}
/**
* Adds auto-generated Person objects to the given AddressBook
* @param addressBook The AddressBook to which the Persons will be added
* @param isPrivateStatuses flags to indicate if all contact details of generated persons should be set to
* private.
*/
void addToAddressBook(AddressBook addressBook, Boolean... isPrivateStatuses) throws Exception{
addToAddressBook(addressBook, generatePersonList(isPrivateStatuses));
}
/**
* Adds the given list of Persons to the given AddressBook
*/
void addToAddressBook(AddressBook addressBook, List<Person> personsToAdd) throws Exception{
for(Person p: personsToAdd){
addressBook.addPerson(p);
}
}
/**
* Creates a list of Persons based on the give Person objects.
*/
List<Person> generatePersonList(Person... persons) throws Exception{
List<Person> personList = new ArrayList<>();
for(Person p: persons){
personList.add(p);
}
return personList;
}
/**
* Generates a list of Persons based on the flags.
* @param isPrivateStatuses flags to indicate if all contact details of respective persons should be set to
* private.
*/
List<Person> generatePersonList(Boolean... isPrivateStatuses) throws Exception{
List<Person> persons = new ArrayList<>();
int i = 1;
for(Boolean p: isPrivateStatuses){
persons.add(generatePerson(i++));
}
return persons;
}
/**
* Generates a random NRIC
*/
//@@author muhdharun
String generateRandomNric() {
int min = 1111111;
int max = 9999999;
Random r = new Random();
return "s"+Integer.toString(r.nextInt((max - min) + 1) + min)+"a";
}
/**
* Generates a Person object with given nric. Other fields will have some dummy values.
*/
Person generatePersonWithNric(String nric) throws Exception {
return new Person(
new Name("Bob"),
new NRIC(nric),
new DateOfBirth("2005"),
new PostalCode("123456"),
new Status("xc"),
new Offense(),
Collections.singleton(new Offense("riot"))
);
}
//@@author muhdharun -reused
/**
* Generates a Person object with given name. Other fields will have some dummy values.
*/
Person generatePersonWithName(String name) throws Exception {
String randomNric = generateRandomNric();
return new Person(
new Name(name),
new NRIC(randomNric),
new DateOfBirth("2005"),
new PostalCode("123456"),
new Status("xc"),
new Offense(),
Collections.singleton(new Offense("riot"))
);
}
}
}
|
zgmurder/system
|
src/lib/Client/models/statistics/trainer/TrainerStatistics.js
|
<gh_stars>0
import Parse from '../../../parse';
import parseUtils from '../../../../utils/parseUtils';
import { RoleName } from '../../../../Constants';
import Organization from '../../resource/Organization';
// 教练员数据统计
// const TrainerStatisticsSchema = new Schema({
// updateDate: { type: Date, required: true }, // date类型(每天0时)
// month: number, // 月
// year: number, // 年
// organization: { type: Schema.Types.ObjectId, ref: 'Organization' }, // 单位
// orgCode: String, // 单位编码,自动从单位继承
// parentOrg: { type: Schema.Types.ObjectId, ref: 'Organization' }, // 关联父组织单位
// standardCount: Number, // 四会数量
// excellentCount: Number, // 优秀数量
// pacesetterCount: Number, // 标兵数量
// officerCount: Number, // 干部数量
// trainerCourseCount: Number, // 已有教练员科目数
// requiredCourseCount: Number, // 应训科目数
// coverRate: Number, // 教员覆盖率
// standardRate: Number, // 四会率
// excellentRate: Number, // 优秀率
// pacesetterRate: Number, // 标兵率
// standardCourseCount: Number, // 四会教练员所教课目数
// standardCoursePassCount: Number, // 四会教练员所教课目达标数
// standardWorkRate: Number, // 四会责任落实率
// excellentCourseCount: Number, // 优秀教练员所教课目数
// excellentCourseGoodCount: Number, // 优秀教练员所教课目达标数
// excellentWorkRate: Number, // 优秀责任落实率
// pacesetterCourseCount: Number, // 标兵教练员所教课目数
// pacesetterCourseExcellentCount: Number, // 标兵教练员所教课目达标数
// pacesetterWorkRate: Number, // 标兵责任落实率
// workRate: Number // 责任落实率
// totalRate: Number // 总体落实率
// });
const TrainerStatistics = Parse.Object.extend("TrainerStatistics", {
// Instance properties go in an initialize method
initialize: function (attrs, options) {
}
}, {
// Class methods
fromObject: function(obj) {
var item = new TrainerStatistics();
parseUtils.object2ParseObject(obj, item);
if (obj.organization) {
item.set('orgCode', obj.organization.orgCode);
let parseOrganization = Organization.fromObject(parseUtils.fixObject(obj.organization));
item.set('organization', parseOrganization);
}
if (obj.parentOrg) {
let parseOrganization = Organization.fromObject(parseUtils.fixObject(obj.parentOrg));
item.set('parentOrg', parseOrganization);
}
if (!obj.objectId) {
var acl = new Parse.ACL();
acl.setPublicReadAccess(true);
acl.setRoleWriteAccess(RoleName.Administrator, true);
item.setACL(acl);
}
return item;
},
getIncludes: function() {
return ['organization'];
}
});
module.exports = TrainerStatistics;
|
palladius/gae-ferris-ricc
|
ferris/tests/test_controller.py
|
<gh_stars>1-10
from ferrisnose import AppEngineWebTest
import wtforms
import json
from ferris.core.controller import Controller, route, route_with
from ferris.core.json_util import DatastoreEncoder
from google.appengine.ext import ndb
# Decorators that make sure @route works correctly even for decorated functions
def std_decorator(f):
def std_wrapper(*args, **kwargs):
return f(*args, **kwargs)
return std_wrapper
def wraps_decorator(f):
from functools import wraps
@wraps(f)
def wraps_wrapper(*args, **kwargs):
return f(*args, **kwargs)
return wraps_wrapper
class TestModel(ndb.Model):
field1 = ndb.StringProperty()
field2 = ndb.StringProperty()
class TestForm(wtforms.Form):
field1 = wtforms.TextField()
field2 = wtforms.TextField()
class TestComponent(object):
def __init__(self, handler):
self.handler = handler
def present(self):
return 'si'
class TestController(Controller):
class Meta:
prefixes = ('monster',)
components = (TestComponent,)
def list(self):
return 'list'
def view(self, key):
return 'view'
def add(self):
return 'add'
def edit(self, key):
return 'edit'
def delete(self, key):
return 204
def monster_list(self):
return 'monster_list'
@route
@wraps_decorator
def monkey(self, key):
return 'monkey-%s' % key
@route
@std_decorator
def monster_monkey(self, key):
return 'monster_monkey-%s' % key
@route_with('/test_controller/monet')
def degas(self):
return 'degas'
@route
def urls(self):
assert self.uri(action='list') == '/test_controller'
assert self.uri(prefix='monster', action='list') == '/monster/test_controller'
assert self.uri(action='edit', key=12) == '/test_controller/%3A12/edit'
assert self.uri('test_controller:list') == '/test_controller'
assert self.uri('monster:test_controller:list') == '/monster/test_controller'
assert self.uri('test_controller:monkey', key=13) == '/test_controller/monkey/13'
return 'success'
@route
def component(self):
return self.components.test_component.present()
@route
def numeric(self):
return 401
@route
def custom_content(self):
self.response.content_type = 'application/json'
return '[1, 2, 3]'
@route
def self_response(self):
self.response.status_int = 401
self.response.body = 'lolidk'
return self.response
@route
def do_redirect(self):
return self.redirect(self.uri('test_controller:list'))
@route
def monster_template_names(self):
return str(self.meta.view.get_template_names())
@route
def form(self):
form = TestForm()
self.parse_request(container=form)
return str(form.data)
class ControllerTest(AppEngineWebTest):
def setUp(self):
super(ControllerTest, self).setUp()
TestController._build_routes(self.testapp.app.router)
def testCrudRoutes(self):
response = self.testapp.get('/test_controller')
self.assertEqual(response.body, 'list')
response = self.testapp.get('/test_controller/add')
self.assertEqual(response.body, 'add')
response = self.testapp.get('/test_controller/:abcd')
self.assertEqual(response.body, 'view')
response = self.testapp.get('/test_controller/:abcd/edit')
self.assertEqual(response.body, 'edit')
response = self.testapp.get('/test_controller/:abcd/delete', status=204)
def testRestRoutes(self):
response = self.testapp.get('/test_controller')
self.assertEqual(response.body, 'list')
response = self.testapp.post('/test_controller')
self.assertEqual(response.body, 'add')
response = self.testapp.get('/test_controller/:abcd')
self.assertEqual(response.body, 'view')
response = self.testapp.put('/test_controller/:abcd')
self.assertEqual(response.body, 'edit')
response = self.testapp.delete('/test_controller/:abcd', status=204)
def testPrefixRoutes(self):
response = self.testapp.get('/monster/test_controller')
self.assertEqual(response.body, 'monster_list')
def testRouteDecorator(self):
response = self.testapp.get('/test_controller/monkey/3')
self.assertEqual(response.body, 'monkey-3')
response = self.testapp.get('/monster/test_controller/monkey/3')
self.assertEqual(response.body, 'monster_monkey-3')
def testRouteWithDecorator(self):
response = self.testapp.get('/test_controller/monet')
self.assertEqual(response.body, 'degas')
def testUrlGeneration(self):
response = self.testapp.get('/test_controller/urls')
self.assertEqual(response.body, 'success')
def testComponents(self):
response = self.testapp.get('/test_controller/component')
self.assertEqual(response.body, 'si')
def testReturnValues(self):
response = self.testapp.get('/test_controller')
assert 'text/html' in response.headers['Content-Type']
self.assertEqual(response.body, 'list')
response = self.testapp.get('/test_controller/numeric', status=401)
response = self.testapp.get('/test_controller/custom_content', status=200)
assert 'application/json' in response.headers['Content-Type']
response = self.testapp.get('/test_controller/self_response', status=401)
self.assertEqual(response.body, 'lolidk')
response = self.testapp.get('/test_controller/do_redirect', status=302)
self.assertEqual(response.headers['Location'], 'http://localhost/test_controller')
def testTemplateNames(self):
response = self.testapp.get('/monster/test_controller/template_names')
self.assertEqual(response.body, str(['test_controller/monster_template_names.html', 'test_controller/template_names.html']))
def testFormDataProcessor(self):
data = {'field2': u'f2', 'field1': u'f1'}
response = self.testapp.post('/test_controller/form', data)
self.assertEqual(response.body, str(data))
data['field3'] = u'f3'
response = self.testapp.post('/test_controller/form', data)
self.assertNotEqual(response.body, str(data), 'Field3 should not be in data')
del data['field3']
data = json.dumps(data, cls=DatastoreEncoder)
response = self.testapp.post('/test_controller/form', data, headers={'Content-Type': 'application/json'})
self.assertTrue('f1' in response)
self.assertTrue('f2' in response)
|
rudylee/expo
|
ios/Pods/boost-for-react-native/boost/metaparse/v1/next_char.hpp
|
#ifndef BOOST_METAPARSE_V1_NEXT_CHAR_HPP
#define BOOST_METAPARSE_V1_NEXT_CHAR_HPP
// Copyright <NAME> (<EMAIL>) 2011.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <boost/metaparse/v1/fwd/next_char.hpp>
#include <boost/mpl/tag.hpp>
namespace boost
{
namespace metaparse
{
namespace v1
{
template <class P, class Ch>
struct next_char :
next_char_impl<
typename boost::mpl::tag<typename P::type>::type
>::template apply<typename P::type, typename Ch::type>
{};
}
}
}
#endif
|
sudheerDev/origin
|
mobile/src/components/disclaimer.js
|
<gh_stars>0
'use strict'
import React from 'react'
import { View, Text, StyleSheet } from 'react-native'
const Disclaimer = props => (
<View style={styles.container}>
<Text style={styles.text}>{props.children}</Text>
</View>
)
export default Disclaimer
const styles = StyleSheet.create({
container: {
fontSize: 14,
paddingVertical: 10
},
text: {
textAlign: 'center',
color: '#98a7b4',
fontFamily: 'Lato'
}
})
|
jscrdev/LeetCode-in-Java
|
src.save/main/java/g1201_1300/s1227_airplane_seat_assignment_probability/Solution.java
|
package g1201_1300.s1227_airplane_seat_assignment_probability;
// #Medium #Dynamic_Programming #Math #Brainteaser #Probability_and_Statistics
// #2022_03_12_Time_1_ms_(15.63%)_Space_41.7_MB_(12.95%)
public class Solution {
public double nthPersonGetsNthSeat(int n) {
if (n == 1) {
return 1.0D;
}
return 0.5D;
}
}
|
kumasento/deacon
|
src/com/custom_computing_ic/maxdeep/kernel/conv2d/lib/BinaryPackKernel.java
|
<reponame>kumasento/deacon
package com.custom_computing_ic.maxdeep.kernel.conv2d.lib;
import com.maxeler.maxcompiler.v2.kernelcompiler.Kernel;
import com.maxeler.maxcompiler.v2.kernelcompiler.KernelParameters;
import com.maxeler.maxcompiler.v2.kernelcompiler.types.base.DFEType;
import com.maxeler.maxcompiler.v2.kernelcompiler.types.base.DFEVar;
import com.maxeler.maxcompiler.v2.kernelcompiler.types.composite.DFEVector;
import com.maxeler.maxcompiler.v2.kernelcompiler.types.composite.DFEVectorType;
public class BinaryPackKernel extends Kernel {
public static final String INP_NAME = "PACK_INP";
public static final String OUT_NAME = "PACK_OUT";
public static final int BIT_WIDTH = 8;
public BinaryPackKernel(KernelParameters p) {
super(p);
DFEVectorType<DFEVar> inpT =
new DFEVectorType<DFEVar>(dfeUInt(1), BIT_WIDTH);
DFEType outT = dfeUInt(BIT_WIDTH);
DFEVector<DFEVar> inp = io.input(INP_NAME, inpT);
DFEVar out = inp.pack().cast(outT);
io.output(OUT_NAME, outT).connect(out);
}
}
|
sarvex/StyleCop
|
Tools/VisualStudioSDK/2013.RTM/VisualStudioIntegration/Common/Source/CPP/VSL/MockInterfaces/VSLMockIVsLibrary2Ex.h
|
/***************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
This code is licensed under the Visual Studio SDK license terms.
THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
This code is a part of the Visual Studio Library.
***************************************************************************/
#ifndef IVSLIBRARY2EX_H_10C49CA1_2F46_11D3_A504_00C04F5E0BA5
#define IVSLIBRARY2EX_H_10C49CA1_2F46_11D3_A504_00C04F5E0BA5
#if _MSC_VER > 1000
#pragma once
#endif
#include "vsshell80.h"
#pragma warning(push)
#pragma warning(disable : 4510) // default constructor could not be generated
#pragma warning(disable : 4610) // can never be instantiated - user defined constructor required
#pragma warning(disable : 4512) // assignment operator could not be generated
#pragma warning(disable : 6011) // Dereferencing NULL pointer (a NULL derference is just another kind of failure for a unit test
namespace VSL
{
class IVsLibrary2ExNotImpl :
public IVsLibrary2Ex
{
VSL_DECLARE_NONINSTANTIABLE_BASE_CLASS(IVsLibrary2ExNotImpl)
public:
typedef IVsLibrary2Ex Interface;
STDMETHOD(ProfileSettingsChanged)()VSL_STDMETHOD_NOTIMPL
STDMETHOD(GetNavInfoContainerData)(
/*[in]*/ IVsNavInfo* /*pNavInfo*/,
/*[out]*/ VSCOMPONENTSELECTORDATA* /*pcsdComponent*/)VSL_STDMETHOD_NOTIMPL
STDMETHOD(DoIdle)()VSL_STDMETHOD_NOTIMPL
STDMETHOD(SetContainerAsUnchanging)(
/*[in]*/ VSCOMPONENTSELECTORDATA* /*pcsdComponent*/,
/*[in]*/ BOOL /*fUnchanging*/)VSL_STDMETHOD_NOTIMPL
};
class IVsLibrary2ExMockImpl :
public IVsLibrary2Ex,
public MockBase
{
VSL_DECLARE_NONINSTANTIABLE_BASE_CLASS(IVsLibrary2ExMockImpl)
public:
VSL_DEFINE_MOCK_CLASS_TYPDEFS(IVsLibrary2ExMockImpl)
typedef IVsLibrary2Ex Interface;
struct ProfileSettingsChangedValidValues
{
HRESULT retValue;
};
STDMETHOD(ProfileSettingsChanged)()
{
VSL_DEFINE_MOCK_METHOD_NOARGS(ProfileSettingsChanged)
VSL_RETURN_VALIDVALUES();
}
struct GetNavInfoContainerDataValidValues
{
/*[in]*/ IVsNavInfo* pNavInfo;
/*[out]*/ VSCOMPONENTSELECTORDATA* pcsdComponent;
HRESULT retValue;
};
STDMETHOD(GetNavInfoContainerData)(
/*[in]*/ IVsNavInfo* pNavInfo,
/*[out]*/ VSCOMPONENTSELECTORDATA* pcsdComponent)
{
VSL_DEFINE_MOCK_METHOD(GetNavInfoContainerData)
VSL_CHECK_VALIDVALUE_INTERFACEPOINTER(pNavInfo);
VSL_SET_VALIDVALUE(pcsdComponent);
VSL_RETURN_VALIDVALUES();
}
struct DoIdleValidValues
{
HRESULT retValue;
};
STDMETHOD(DoIdle)()
{
VSL_DEFINE_MOCK_METHOD_NOARGS(DoIdle)
VSL_RETURN_VALIDVALUES();
}
struct SetContainerAsUnchangingValidValues
{
/*[in]*/ VSCOMPONENTSELECTORDATA* pcsdComponent;
/*[in]*/ BOOL fUnchanging;
HRESULT retValue;
};
STDMETHOD(SetContainerAsUnchanging)(
/*[in]*/ VSCOMPONENTSELECTORDATA* pcsdComponent,
/*[in]*/ BOOL fUnchanging)
{
VSL_DEFINE_MOCK_METHOD(SetContainerAsUnchanging)
VSL_CHECK_VALIDVALUE_POINTER(pcsdComponent);
VSL_CHECK_VALIDVALUE(fUnchanging);
VSL_RETURN_VALIDVALUES();
}
};
} // namespace VSL
#pragma warning(pop)
#endif // IVSLIBRARY2EX_H_10C49CA1_2F46_11D3_A504_00C04F5E0BA5
|
simone-sanfratello/peekaboo-server
|
src/plugins/cors.js
|
<reponame>simone-sanfratello/peekaboo-server
'use strict'
/**
* @param {Fastify} - fastify instance
*/
const cors = function (fastify, settings) {
fastify.register(require('fastify-cors'), settings.cors)
}
module.exports = cors
|
dinvlad/single_cell_portal_core
|
app/controllers/users/omniauth_callbacks_controller.rb
|
class Users::OmniauthCallbacksController < Devise::OmniauthCallbacksController
###
#
# This is the OAuth2 endpoint for receiving callbacks from Google after successful authentication
#
###
def google_oauth2
# You need to implement the method below in your model (e.g. app/models/user.rb)
@user = User.from_omniauth(request.env["omniauth.auth"])
if @user.persisted?
@user.update(authentication_token: Devise.friendly_token(32))
@user.generate_access_token
# update a user's FireCloud status
@user.delay.update_firecloud_status
sign_in(@user)
if TosAcceptance.accepted?(@user)
redirect_to request.env['omniauth.origin'] || site_path
else
redirect_to accept_tos_path(@user.id)
end
else
redirect_to new_user_session_path
end
end
end
|
sxxlearn2rock/AgileJavaStudy
|
src/cn/sxx/agilejava/courseinfo/CourseSessionTest.java
|
<reponame>sxxlearn2rock/AgileJavaStudy<gh_stars>0
package cn.sxx.agilejava.courseinfo;
import static org.junit.Assert.*;
import java.util.Date;
import org.junit.Before;
import org.junit.Test;
import cn.sxx.agilejava.studentinfo.Student;
import cn.sxx.agilejava.util.DateUtil;
public class CourseSessionTest extends SessionTest
{
@Override
protected Session createSession(Course course, Date startDate)
{
return CourseSession.create(course, startDate);
}
@Test
public void testCourseDates()
{
Session session = createSession(createCourse(), DateUtil.createDate(2003, 1, 6));
assertEquals(DateUtil.createDate(2003, 4, 25), session.getEndDate());
}
private Course createCourse()
{
return new Course("ENGL", "101");
}
}
|
cfsengineering/tigl
|
thirdparty/boost_1_67_0/boost/predef/make.h
|
<reponame>cfsengineering/tigl
/*
Copyright <NAME> 2008-2015
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
#include <boost/predef/detail/test.h>
#ifndef BOOST_PREDEF_MAKE_H
#define BOOST_PREDEF_MAKE_H
/*
Shorthands for the common version number formats used by vendors...
*/
/*`
[heading `BOOST_PREDEF_MAKE_..` macros]
These set of macros decompose common vendor version number
macros which are composed version, revision, and patch digits.
The naming convention indicates:
* The base of the specified version number. "`BOOST_PREDEF_MAKE_0X`" for
hexadecimal digits, and "`BOOST_PREDEF_MAKE_10`" for decimal digits.
* The format of the vendor version number. Where "`V`" indicates the version digits,
"`R`" indicates the revision digits, "`P`" indicates the patch digits, and "`0`"
indicates an ignored digit.
Macros are:
*/
/*` `BOOST_PREDEF_MAKE_0X_VRP(V)` */
#define BOOST_PREDEF_MAKE_0X_VRP(V) BOOST_VERSION_NUMBER((V&0xF00)>>8,(V&0xF0)>>4,(V&0xF))
/*` `BOOST_PREDEF_MAKE_0X_VVRP(V)` */
#define BOOST_PREDEF_MAKE_0X_VVRP(V) BOOST_VERSION_NUMBER((V&0xFF00)>>8,(V&0xF0)>>4,(V&0xF))
/*` `BOOST_PREDEF_MAKE_0X_VRPP(V)` */
#define BOOST_PREDEF_MAKE_0X_VRPP(V) BOOST_VERSION_NUMBER((V&0xF000)>>12,(V&0xF00)>>8,(V&0xFF))
/*` `BOOST_PREDEF_MAKE_0X_VVRR(V)` */
#define BOOST_PREDEF_MAKE_0X_VVRR(V) BOOST_VERSION_NUMBER((V&0xFF00)>>8,(V&0xFF),0)
/*` `BOOST_PREDEF_MAKE_0X_VRRPPPP(V)` */
#define BOOST_PREDEF_MAKE_0X_VRRPPPP(V) BOOST_VERSION_NUMBER((V&0xF000000)>>24,(V&0xFF0000)>>16,(V&0xFFFF))
/*` `BOOST_PREDEF_MAKE_0X_VVRRP(V)` */
#define BOOST_PREDEF_MAKE_0X_VVRRP(V) BOOST_VERSION_NUMBER((V&0xFF000)>>12,(V&0xFF0)>>4,(V&0xF))
/*` `BOOST_PREDEF_MAKE_0X_VRRPP000(V)` */
#define BOOST_PREDEF_MAKE_0X_VRRPP000(V) BOOST_VERSION_NUMBER((V&0xF0000000)>>28,(V&0xFF00000)>>20,(V&0xFF000)>>12)
/*` `BOOST_PREDEF_MAKE_0X_VVRRPP(V)` */
#define BOOST_PREDEF_MAKE_0X_VVRRPP(V) BOOST_VERSION_NUMBER((V&0xFF0000)>>16,(V&0xFF00)>>8,(V&0xFF))
/*` `BOOST_PREDEF_MAKE_10_VPPP(V)` */
#define BOOST_PREDEF_MAKE_10_VPPP(V) BOOST_VERSION_NUMBER(((V)/1000)%10,0,(V)%1000)
/*` `BOOST_PREDEF_MAKE_10_VRP(V)` */
#define BOOST_PREDEF_MAKE_10_VRP(V) BOOST_VERSION_NUMBER(((V)/100)%10,((V)/10)%10,(V)%10)
/*` `BOOST_PREDEF_MAKE_10_VRP000(V)` */
#define BOOST_PREDEF_MAKE_10_VRP000(V) BOOST_VERSION_NUMBER(((V)/100000)%10,((V)/10000)%10,((V)/1000)%10)
/*` `BOOST_PREDEF_MAKE_10_VRPPPP(V)` */
#define BOOST_PREDEF_MAKE_10_VRPPPP(V) BOOST_VERSION_NUMBER(((V)/100000)%10,((V)/10000)%10,(V)%10000)
/*` `BOOST_PREDEF_MAKE_10_VRPP(V)` */
#define BOOST_PREDEF_MAKE_10_VRPP(V) BOOST_VERSION_NUMBER(((V)/1000)%10,((V)/100)%10,(V)%100)
/*` `BOOST_PREDEF_MAKE_10_VRR(V)` */
#define BOOST_PREDEF_MAKE_10_VRR(V) BOOST_VERSION_NUMBER(((V)/100)%10,(V)%100,0)
/*` `BOOST_PREDEF_MAKE_10_VRRPP(V)` */
#define BOOST_PREDEF_MAKE_10_VRRPP(V) BOOST_VERSION_NUMBER(((V)/10000)%10,((V)/100)%100,(V)%100)
/*` `BOOST_PREDEF_MAKE_10_VRR000(V)` */
#define BOOST_PREDEF_MAKE_10_VRR000(V) BOOST_VERSION_NUMBER(((V)/100000)%10,((V)/1000)%100,0)
/*` `BOOST_PREDEF_MAKE_10_VV00(V)` */
#define BOOST_PREDEF_MAKE_10_VV00(V) BOOST_VERSION_NUMBER(((V)/100)%100,0,0)
/*` `BOOST_PREDEF_MAKE_10_VVRR(V)` */
#define BOOST_PREDEF_MAKE_10_VVRR(V) BOOST_VERSION_NUMBER(((V)/100)%100,(V)%100,0)
/*` `BOOST_PREDEF_MAKE_10_VVRRPP(V)` */
#define BOOST_PREDEF_MAKE_10_VVRRPP(V) BOOST_VERSION_NUMBER(((V)/10000)%100,((V)/100)%100,(V)%100)
/*` `BOOST_PREDEF_MAKE_10_VVRRPPP(V)` */
#define BOOST_PREDEF_MAKE_10_VVRRPPP(V) BOOST_VERSION_NUMBER(((V)/100000)%100,((V)/1000)%100,(V)%1000)
/*` `BOOST_PREDEF_MAKE_10_VVRR0PP00(V)` */
#define BOOST_PREDEF_MAKE_10_VVRR0PP00(V) BOOST_VERSION_NUMBER(((V)/10000000)%100,((V)/100000)%100,((V)/100)%100)
/*` `BOOST_PREDEF_MAKE_10_VVRR0PPPP(V)` */
#define BOOST_PREDEF_MAKE_10_VVRR0PPPP(V) BOOST_VERSION_NUMBER(((V)/10000000)%100,((V)/100000)%100,(V)%10000)
/*` `BOOST_PREDEF_MAKE_10_VVRR00PP00(V)` */
#define BOOST_PREDEF_MAKE_10_VVRR00PP00(V) BOOST_VERSION_NUMBER(((V)/100000000)%100,((V)/1000000)%100,((V)/100)%100)
/*`
[heading `BOOST_PREDEF_MAKE_*..` date macros]
Date decomposition macros return a date in the relative to the 1970
Epoch date. If the month is not available, January 1st is used as the month and day.
If the day is not available, but the month is, the 1st of the month is used as the day.
*/
/*` `BOOST_PREDEF_MAKE_DATE(Y,M,D)` */
#define BOOST_PREDEF_MAKE_DATE(Y,M,D) BOOST_VERSION_NUMBER((Y)%10000-1970,(M)%100,(D)%100)
/*` `BOOST_PREDEF_MAKE_YYYYMMDD(V)` */
#define BOOST_PREDEF_MAKE_YYYYMMDD(V) BOOST_PREDEF_MAKE_DATE(((V)/10000)%10000,((V)/100)%100,(V)%100)
/*` `BOOST_PREDEF_MAKE_YYYY(V)` */
#define BOOST_PREDEF_MAKE_YYYY(V) BOOST_PREDEF_MAKE_DATE(V,1,1)
/*` `BOOST_PREDEF_MAKE_YYYYMM(V)` */
#define BOOST_PREDEF_MAKE_YYYYMM(V) BOOST_PREDEF_MAKE_DATE((V)/100,(V)%100,1)
#endif
|
ChenHuaYou/oldlinux-cpp-version
|
linux/lib/malloc.cc
|
<gh_stars>0
/*
* malloc.c --- a general purpose kernel memory allocator for Linux.
*
* Written by <NAME> (<EMAIL>), 11/29/91
*
* This routine is written to be as fast as possible, so that it
* can be called from the interrupt level.
*
* Limitations: maximum size of memory we can allocate using this routine
* is 4k, the size of a page in Linux.
*
* The general game plan is that each page (called a bucket) will only hold
* objects of a given size. When all of the object on a page are released,
* the page can be returned to the general free pool. When malloc() is
* called, it looks for the smallest bucket size which will fulfill its
* request, and allocate a piece of memory from that bucket pool.
*
* Each bucket has as its control block a bucket descriptor which keeps
* track of how many objects are in use on that page, and the free list
* for that page. Like the buckets themselves, bucket descriptors are
* stored on pages requested from get_free_page(). However, unlike buckets,
* pages devoted to bucket descriptor pages are never released back to the
* system. Fortunately, a system should probably only need 1 or 2 bucket
* descriptor pages, since a page can hold 256 bucket descriptors (which
* corresponds to 1 megabyte worth of bucket pages.) If the kernel is using
* that much allocated memory, it's probably doing something wrong. :-)
*
* Note: malloc() and free() both call get_free_page() and free_page()
* in sections of code where interrupts are turned off, to allow
* malloc() and free() to be safely called from an interrupt routine.
* (We will probably need this functionality when networking code,
* particularily things like NFS, is added to Linux.) However, this
* presumes that get_free_page() and free_page() are interrupt-level
* safe, which they may not be once paging is added. If this is the
* case, we will need to modify malloc() to keep a few unused pages
* "pre-allocated" so that it can safely draw upon those pages if
* it is called from an interrupt routine.
*
* Another concern is that get_free_page() should not sleep; if it
* does, the code is carefully ordered so as to avoid any race
* conditions. The catch is that if malloc() is called re-entrantly,
* there is a chance that unecessary pages will be grabbed from the
* system. Except for the pages for the bucket descriptor page, the
* extra pages will eventually get released back to the system, though,
* so it isn't all that bad.
*/
#include <linux/kernel.h>
#include <linux/mm.h>
#include <asm/system.h>
struct bucket_desc { /* 16 bytes */
void *page;
struct bucket_desc *next;
void *freeptr;
unsigned short refcnt;
unsigned short bucket_size;
};
struct _bucket_dir { /* 8 bytes */
int size;
struct bucket_desc *chain;
};
/*
* The following is the where we store a pointer to the first bucket
* descriptor for a given size.
*
* If it turns out that the Linux kernel allocates a lot of objects of a
* specific size, then we may want to add that specific size to this list,
* since that will allow the memory to be allocated more efficiently.
* However, since an entire page must be dedicated to each specific size
* on this list, some amount of temperance must be exercised here.
*
* Note that this list *must* be kept in order.
*/
struct _bucket_dir bucket_dir[] = {
{ 16, (struct bucket_desc *) 0},
{ 32, (struct bucket_desc *) 0},
{ 64, (struct bucket_desc *) 0},
{ 128, (struct bucket_desc *) 0},
{ 256, (struct bucket_desc *) 0},
{ 512, (struct bucket_desc *) 0},
{ 1024, (struct bucket_desc *) 0},
{ 2048, (struct bucket_desc *) 0},
{ 4096, (struct bucket_desc *) 0},
{ 0, (struct bucket_desc *) 0}}; /* End of list marker */
/*
* This contains a linked list of free bucket descriptor blocks
*/
struct bucket_desc *free_bucket_desc = (struct bucket_desc *) 0;
/*
* This routine initializes a bucket description page.
*/
static inline void init_bucket_desc()
{
struct bucket_desc *bdesc, *first;
int i;
first = bdesc = (struct bucket_desc *) get_free_page();
if (!bdesc)
panic("Out of memory in init_bucket_desc()");
for (i = PAGE_SIZE/sizeof(struct bucket_desc); i > 1; i--) {
bdesc->next = bdesc+1;
bdesc++;
}
/*
* This is done last, to avoid race conditions in case
* get_free_page() sleeps and this routine gets called again....
*/
bdesc->next = free_bucket_desc;
free_bucket_desc = first;
}
void *malloc(unsigned int len)
{
struct _bucket_dir *bdir;
struct bucket_desc *bdesc;
void *retval;
/*
* First we search the bucket_dir to find the right bucket change
* for this request.
*/
for (bdir = bucket_dir; bdir->size; bdir++)
if (bdir->size >= (int)len)
break;
if (!bdir->size) {
printk("malloc called with impossibly large argument (%d)\n",
len);
panic("malloc: bad arg");
}
/*
* Now we search for a bucket descriptor which has free space
*/
cli(); /* Avoid race conditions */
for (bdesc = bdir->chain; bdesc; bdesc = bdesc->next)
if (bdesc->freeptr)
break;
/*
* If we didn't find a bucket with free space, then we'll
* allocate a new one.
*/
if (!bdesc) {
char *cp;
int i;
if (!free_bucket_desc)
init_bucket_desc();
bdesc = free_bucket_desc;
free_bucket_desc = bdesc->next;
bdesc->refcnt = 0;
bdesc->bucket_size = bdir->size;
bdesc->page = bdesc->freeptr = (void *) (cp = (char *)get_free_page());
if (!cp)
panic("Out of memory in kernel malloc()");
/* Set up the chain of free objects */
for (i=PAGE_SIZE/bdir->size; i > 1; i--) {
*((char **) cp) = cp + bdir->size;
cp += bdir->size;
}
*((char **) cp) = 0;
bdesc->next = bdir->chain; /* OK, link it in! */
bdir->chain = bdesc;
}
retval = (void *) bdesc->freeptr;
bdesc->freeptr = *((void **) retval);
bdesc->refcnt++;
sti(); /* OK, we're safe again */
return(retval);
}
/*
* Here is the free routine. If you know the size of the object that you
* are freeing, then free_s() will use that information to speed up the
* search for the bucket descriptor.
*
* We will #define a macro so that "free(x)" is becomes "free_s(x, 0)"
*/
void free_s(void *obj, int size)
{
void *page;
struct _bucket_dir *bdir;
struct bucket_desc *bdesc, *prev;
bdesc = prev = 0;
/* Calculate what page this object lives in */
page = (void *) ((unsigned long) obj & 0xfffff000);
/* Now search the buckets looking for that page */
for (bdir = bucket_dir; bdir->size; bdir++) {
prev = 0;
/* If size is zero then this conditional is always false */
if (bdir->size < size)
continue;
for (bdesc = bdir->chain; bdesc; bdesc = bdesc->next) {
if (bdesc->page == page)
goto found;
prev = bdesc;
}
}
panic("Bad address passed to kernel free_s()");
found:
cli(); /* To avoid race conditions */
*((void **)obj) = bdesc->freeptr;
bdesc->freeptr = obj;
bdesc->refcnt--;
if (bdesc->refcnt == 0) {
/*
* We need to make sure that prev is still accurate. It
* may not be, if someone rudely interrupted us....
*/
if ((prev && (prev->next != bdesc)) ||
(!prev && (bdir->chain != bdesc)))
for (prev = bdir->chain; prev; prev = prev->next)
if (prev->next == bdesc)
break;
if (prev)
prev->next = bdesc->next;
else {
if (bdir->chain != bdesc)
panic("malloc bucket chains corrupted");
bdir->chain = bdesc->next;
}
free_page((unsigned long) bdesc->page);
bdesc->next = free_bucket_desc;
free_bucket_desc = bdesc;
}
sti();
return;
}
|
danbulant/node-x11
|
examples/windowmanager/wm.js
|
<reponame>danbulant/node-x11
var x11 = require('../../lib');
var EventEmitter = require('events').EventEmitter;
var X, root, white;
var events = x11.eventMask.Button1Motion|x11.eventMask.ButtonPress|x11.eventMask.ButtonRelease|x11.eventMask.SubstructureNotify|x11.eventMask.SubstructureRedirect|x11.eventMask.Exposure;
var frames = {};
var dragStart = null;
function ManageWindow(wid)
{
console.log("MANAGE WINDOW: " + wid);
X.GetWindowAttributes(wid, function(err, attrs) {
if (attrs[8]) // override-redirect flag
{
// don't manage
console.log("don't manage");
X.MapWindow(wid);
return;
}
var fid = X.AllocID();
frames[fid] = 1;
var winX, winY;
winX = parseInt(Math.random()*300);
winY = parseInt(Math.random()*300);
X.GetGeometry(wid, function(err, clientGeom) {
console.log("window geometry: ", clientGeom);
var width = clientGeom.width + 4;
var height = clientGeom.height + 24;
console.log("CreateWindow", fid, root, winX, winY, width, height);
X.CreateWindow(fid, root, winX, winY, width, height, 0, 0, 0, 0,
{
backgroundPixel: white,
eventMask: events
});
var bggrad = X.AllocID();
X.Render.LinearGradient(bggrad, [0,0], [0,24],
[
[0, [0,0,0xffff,0xffffff ] ],
[1, [0x00ff, 0xff00, 0, 0xffffff] ]
]);
var framepic = X.AllocID();
X.Render.CreatePicture(framepic, fid, X.Render.rgb24);
var ee = new EventEmitter();
X.event_consumers[fid] = ee;
ee.on('event', function(ev)
{
console.log(['event', ev]);
if (ev.type === 17) // DestroyNotify
{
X.DestroyWindow(fid);
} else if (ev.type == 4) {
dragStart = { rootx: ev.rootx, rooty: ev.rooty, x: ev.x, y: ev.y, winX: winX, winY: winY };
} else if (ev.type == 5) {
dragStart = null;
} else if (ev.type == 6) {
winX = dragStart.winX + ev.rootx - dragStart.rootx;
winY = dragStart.winY + ev.rooty - dragStart.rooty;
X.MoveWindow(fid, winX, winY);
} else if (ev.type == 12) {
X.Render.Composite(3, bggrad, 0, framepic, 0, 0, 0, 0, 0, 0, width, height);
}
});
X.ChangeSaveSet(1, wid);
X.ReparentWindow(wid, fid, 1, 21);
console.log("MapWindow", fid);
X.MapWindow(fid);
X.MapWindow(wid);
});
});
}
x11.createClient(function(err, display) {
X = display.client;
X.require('render', function(err, Render) {
X.Render = Render;
root = display.screen[0].root;
white = display.screen[0].white_pixel;
console.log('root = ' + root);
X.ChangeWindowAttributes(root, { eventMask: x11.eventMask.Exposure|x11.eventMask.SubstructureRedirect }, function(err) {
if (err.error == 10)
{
console.error('Error: another window manager already running.');
process.exit(1);
}
});
X.QueryTree(root, function(err, tree) {
tree.children.forEach(ManageWindow);
});
X.bggrad = X.AllocID();
Render.LinearGradient(X.bggrad, [-10,0], [0,1000],
//RenderRadialGradient(pic_grad, [0,0], [1000,100], 10, 1000,
//RenderConicalGradient(pic_grad, [250,250], 360,
[
[0, [0,0,0,0xffffff ] ],
//[0.1, [0xfff, 0, 0xffff, 0x1000] ] ,
//[0.25, [0xffff, 0, 0xfff, 0x3000] ] ,
//[0.5, [0xffff, 0, 0xffff, 0x4000] ] ,
[1, [0xffff, 0xffff, 0, 0xffffff] ]
]);
X.rootpic = X.AllocID();
Render.CreatePicture(X.rootpic, root, Render.rgb24);
})
}).on('error', function(err) {
console.error(err);
}).on('event', function(ev) {
console.log(ev);
if (ev.type === 20) // MapRequest
{
if (!frames[ev.wid])
ManageWindow(ev.wid);
return;
} else if (ev.type === 23) // ConfigureRequest
{
X.ResizeWindow(ev.wid, ev.width, ev.height);
} else if (ev.type === 12) {
console.log('EXPOSE', ev);
X.Render.Composite(3, X.bggrad, 0, X.rootpic, 0, 0, 0, 0, 0, 0, 1000, 1000);
}
console.log(ev);
});
|
Apereo-Learning-Analytics-Initiative/Larissa
|
src/test/java/nl/uva/larissa/repository/couchdb/ITAgentQuery.java
|
<filename>src/test/java/nl/uva/larissa/repository/couchdb/ITAgentQuery.java<gh_stars>1-10
package nl.uva.larissa.repository.couchdb;
import static org.junit.Assert.*;
import java.net.MalformedURLException;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import nl.uva.larissa.CouchDbConnectorFactory;
import nl.uva.larissa.json.model.Agent;
import nl.uva.larissa.json.model.IFI;
import nl.uva.larissa.json.model.StatementResult;
import nl.uva.larissa.repository.DuplicateIdException;
import nl.uva.larissa.repository.StatementFilter;
import nl.uva.larissa.repository.StatementFilterUtil;
import nl.uva.larissa.repository.UnknownStatementException;
import nl.uva.larissa.repository.VoidingTargetException;
import nl.uva.larissa.repository.couchdb.CouchDbStatementRepository.QueryStrategy;
import org.apache.abdera.i18n.iri.IRI;
import org.ektorp.http.HttpClient;
import org.ektorp.http.StdHttpClient;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class ITAgentQuery {
static CouchDbStatementRepository repository;
static HttpClient httpClient;
private static String SERVER_URL = "http://localhost:5984";
private static String DB_ID = "agentquerytest";
@BeforeClass
public static void beforeClass() throws MalformedURLException {
httpClient = new StdHttpClient.Builder().url(SERVER_URL).build();
repository = new CouchDbStatementRepository(
new CouchDbConnectorFactory().createConnector(SERVER_URL,
DB_ID, 20), new QueryResolver());
}
@AfterClass
public static void afterClass() {
repository.shutdown();
httpClient.shutdown();
}
@Before
public void createDb() {
repository.create();
}
@After
public void deleteDb() {
httpClient.delete(SERVER_URL + "/" + DB_ID);
}
@Test
public void testQuery() throws DuplicateIdException,
VoidingTargetException, UnknownStatementException {
// the order is important! determines stored-value ordering
/*
* equivalent in CouchDB map [A,A,3] [A,K,1] [A,S,4] [A,K,5] [N,S,2]
*/
List<DbKey> dbKeys = Arrays.asList(new DbKey("<EMAIL>", "Klimt"),
new DbKey("<EMAIL>", "Slingert"), new DbKey(
"<EMAIL>", "Aapt"), new DbKey("<EMAIL>",
"Slingert"), new DbKey("<EMAIL>", "Klimt"));
populateDb(dbKeys);
StatementResultQuery query = new AgentActivityQuery();
StatementFilter filter = new StatementFilter();
filter.setAgent(getAgent("<EMAIL>"));
filter.setLimit(1);
filter.setAscending(true);
StatementResult result = query.getQueryResult(
repository.getConnector(), filter, QueryStrategy.NORMAL);
assertEquals(1, result.getStatements().size());
String firstMore = result.getMore();
assertNotNull(firstMore);
StatementFilter moreFilter;
for (int i = 0; i < 2; i++) {
moreFilter = StatementFilterUtil.fromMoreUrl(result.getMore());
result = getResult(query, moreFilter);
assertEquals(1, result.getStatements().size());
assertNotNull(result.getMore());
}
moreFilter = StatementFilterUtil.fromMoreUrl(result.getMore());
result = getResult(query, moreFilter);
assertEquals(1, result.getStatements().size());
assertEquals("", result.getMore());
Date firstDbStatDate = repository
.getStatement(dbKeys.get(2).getStatement().getId())
.getStatements().get(0).getStored();
filter.setSince(firstDbStatDate);
filter.setLimit(null);
result = getResult(query, filter);
assertEquals(3, result.getStatements().size());
assertEquals("order is ascending",
dbKeys.get(4).getStatement().getId(), result.getStatements()
.get(2).getId());
assertEquals("", result.getMore());
filter.setAscending(false);
result = getResult(query, filter);
assertEquals(3, result.getStatements().size());
assertEquals("order is descending", dbKeys.get(2).getStatement()
.getId(), result.getStatements().get(2).getId());
}
private StatementResult getResult(StatementResultQuery query,
StatementFilter filter) {
return query.getQueryResult(repository.getConnector(), filter,
QueryStrategy.NORMAL);
}
private void populateDb(List<DbKey> asList) throws DuplicateIdException,
VoidingTargetException, UnknownStatementException {
for (DbKey dbKey : asList) {
repository.storeStatement(dbKey.getStatement());
}
}
static Agent getAgent(String email) {
Agent agent = new Agent();
agent.setIdentifier(new IFI());
agent.getIdentifier().setMbox(new IRI("mailto:" + email));
return agent;
}
}
|
thomas-mueller/clipl
|
clipl/analysis_modules/scaleerrors.py
|
# -*- coding: utf-8 -*-
import logging
import clipl.utility.logger as logger
log = logging.getLogger(__name__)
import ROOT
import clipl.analysisbase as analysisbase
class ScaleErrors(analysisbase.AnalysisBase):
"""Scale errors of histograms."""
def __init__(self):
super(ScaleErrors, self).__init__()
def modify_argument_parser(self, parser, args):
super(ScaleErrors, self).modify_argument_parser(parser, args)
self.ScaleErrors_options = parser.add_argument_group("ScaleErrors options")
self.ScaleErrors_options.add_argument("--scale-error-nicks", type=str, nargs="+",
help="Nick names of the histograms/graphs to be modified in place. [Default: modify all histograms]")
self.ScaleErrors_options.add_argument("--scale-error-factors", type=float, nargs="+", default=[0.0],
help="Scale factors for errors. [Default: %(default)s]")
def prepare_args(self, parser, plotData):
super(ScaleErrors, self).prepare_args(parser, plotData)
if plotData.plotdict["scale_error_nicks"] is None:
plotData.plotdict["scale_error_nicks"] = plotData.plotdict["nicks"]
self.prepare_list_args(plotData, ["scale_error_nicks", "scale_error_factors"])
def run(self, plotData=None):
super(ScaleErrors, self).run(plotData)
for nick, scale_factor in zip(plotData.plotdict["scale_error_nicks"], plotData.plotdict["scale_error_factors"]):
root_object = plotData.plotdict["root_objects"][nick]
ScaleErrors.scale_errors(root_object, scale_factor)
@staticmethod
def scale_errors(root_object, scale_factor=0.0):
if isinstance(root_object, ROOT.TH1): # and not isinstance(root_object, ROOT.TProfile):
for x_bin in xrange(1, root_object.GetNbinsX()+1):
for y_bin in xrange(1, root_object.GetNbinsY()+1):
for z_bin in xrange(1, root_object.GetNbinsZ()+1):
global_bin = root_object.GetBin(x_bin, y_bin, z_bin)
root_object.SetBinError(global_bin, scale_factor*root_object.GetBinError(global_bin))
elif isinstance(root_object, ROOT.TGraph) and (not isinstance(root_object, ROOT.TGraph2D)):
for point in xrange(root_object.GetN()):
if isinstance(root_object, ROOT.TGraphAsymmErrors):
root_object.SetPointError(point, scale_factor*root_object.GetErrorXlow(point), scale_factor*root_object.GetErrorXhigh(point),
scale_factor*root_object.GetErrorYlow(point), scale_factor*root_object.GetErrorYhigh(point))
elif isinstance(root_object, ROOT.TGraphErrors):
root_object.SetPointError(point, scale_factor*root_object.GetErrorX(point), scale_factor*root_object.GetErrorY(point))
|
sjj3086786/aliyun-openapi-java-sdk
|
aliyun-java-sdk-linkwan/src/main/java/com/aliyuncs/linkwan/transform/v20181230/ListOwnedJoinPermissionsResponseUnmarshaller.java
|
<filename>aliyun-java-sdk-linkwan/src/main/java/com/aliyuncs/linkwan/transform/v20181230/ListOwnedJoinPermissionsResponseUnmarshaller.java
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.linkwan.transform.v20181230;
import java.util.ArrayList;
import java.util.List;
import com.aliyuncs.linkwan.model.v20181230.ListOwnedJoinPermissionsResponse;
import com.aliyuncs.linkwan.model.v20181230.ListOwnedJoinPermissionsResponse.Data;
import com.aliyuncs.linkwan.model.v20181230.ListOwnedJoinPermissionsResponse.Data.JoinPermission;
import java.util.Map;
import com.aliyuncs.transform.UnmarshallerContext;
public class ListOwnedJoinPermissionsResponseUnmarshaller {
public static ListOwnedJoinPermissionsResponse unmarshall(ListOwnedJoinPermissionsResponse listOwnedJoinPermissionsResponse, UnmarshallerContext context) {
listOwnedJoinPermissionsResponse.setRequestId(context.stringValue("ListOwnedJoinPermissionsResponse.RequestId"));
listOwnedJoinPermissionsResponse.setSuccess(context.booleanValue("ListOwnedJoinPermissionsResponse.Success"));
Data data = new Data();
data.setTotalCount(context.longValue("ListOwnedJoinPermissionsResponse.Data.TotalCount"));
List<JoinPermission> list = new ArrayList<JoinPermission>();
for (int i = 0; i < context.lengthValue("ListOwnedJoinPermissionsResponse.Data.List.Length"); i++) {
JoinPermission joinPermission = new JoinPermission();
joinPermission.setJoinPermissionId(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].JoinPermissionId"));
joinPermission.setRenterAliyunId(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].RenterAliyunId"));
joinPermission.setJoinEui(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].JoinEui"));
joinPermission.setFreqBandPlanGroupId(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].FreqBandPlanGroupId"));
joinPermission.setClassMode(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].ClassMode"));
joinPermission.setAuthState(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].AuthState"));
joinPermission.setEnabled(context.booleanValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].Enabled"));
joinPermission.setNodesCnt(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].NodesCnt"));
joinPermission.setDataDispatchDestination(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].DataDispatchDestination"));
joinPermission.setRxDailySum(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].RxDailySum"));
joinPermission.setRxMonthSum(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].RxMonthSum"));
joinPermission.setTxDailySum(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].TxDailySum"));
joinPermission.setTxMonthSum(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].TxMonthSum"));
joinPermission.setCreateMillis(context.longValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].CreateMillis"));
joinPermission.setNodeGroupId(context.stringValue("ListOwnedJoinPermissionsResponse.Data.List["+ i +"].NodeGroupId"));
list.add(joinPermission);
}
data.setList(list);
listOwnedJoinPermissionsResponse.setData(data);
return listOwnedJoinPermissionsResponse;
}
}
|
ZMcursor/Daily
|
app/src/main/java/com/zmcursor/daily/UI/Activity/MainActivity.java
|
<filename>app/src/main/java/com/zmcursor/daily/UI/Activity/MainActivity.java
package com.zmcursor.daily.UI.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.FrameLayout;
import com.zmcursor.daily.Conf;
import com.zmcursor.daily.R;
import com.zmcursor.daily.UI.Fragment.TimelineFragment;
import com.zmcursor.daily.UI.ThemeManager;
import com.zmcursor.daily.Utils.Loger;
import com.zmcursor.daily.Utils.Utils;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private TimelineFragment timelineFragment;
private FrameLayout appbar;
private Toolbar toolbar;
private DrawerLayout mDrawerLayout;
private ActionBarDrawerToggle mDrawerToggle;
@Override
protected void onCreate(Bundle savedInstanceState) {
Loger.UI(TAG, "onCreate");
super.onCreate(savedInstanceState);
setContentView(ThemeManager.getThemeManager().getMain_layout());
initView();
timelineFragment = new TimelineFragment(this);
getFragmentManager().beginTransaction().replace(R.id.story_list_container, timelineFragment).commit();
}
@Override
protected void onResume() {
Loger.UI(TAG, "onResume");
super.onResume();
Loger.UI(TAG, "onResume");
}
private void initView() {
appbar = (FrameLayout) findViewById(R.id.appbar);
appbar.setBackgroundColor(ThemeManager.getThemeManager().getColorPrimary());
Utils.setStatusBarPadding(appbar);
ThemeManager.getThemeManager().addView(appbar, (colorPrimary, colorPrimaryDark, colorAccent) -> appbar.setBackgroundColor(colorPrimary));
setSupportActionBar(toolbar);
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
// mDrawerToggle = new DrawerLayout.DrawerListener()
mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, toolbar, R.string.app_name, R.string.app_name);
mDrawerLayout.addDrawerListener(mDrawerToggle);
mDrawerToggle.syncState();
}
public void startReading(int id) {
Intent intent = new Intent(this, ReadActivity.class);
// Intent intent = new Intent(mainActivity, TestActivity.class);
intent.putExtra(Conf.id, id);
startActivity(intent);
}
@Override
protected void onDestroy() {
super.onDestroy();
// Conf.getConf().getDataService().stopSelf();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.more_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// int id = item.getItemId();
switch (item.getItemId()) {
case R.id.btn_night_mode:
break;
case R.id.btn_setting:
break;
}
return true;
}
}
|
BayShubar/new-latin
|
src/navigation/index.js
|
import React from 'react'
import {
createStackNavigator,
createAppContainer,
createBottomTabNavigator
} from 'react-navigation'
import { Ionicons } from '@expo/vector-icons';
import { ACTIVE_MAIN_COLOR,
INACTIVE_MAIN_COLOR,
BACKGROUND_MAIN_COLOR } from '../constants'
import ArticlesScreen from '../screens/article/ArticlesScreen'
import ArticleScreen from '../screens/article/ArticleScreen'
import AlphabetScreen from '../screens/article/AlphabetScreen'
import TranslationScreen from '../screens/translation/TranslationScreen'
import GamesScreen from '../screens/game/GamesScreen'
import TestPlay from '../modals/TestPlay'
import ObserverPlay from '../modals/ObserverPlay'
import LetterPlay from '../modals/LetterPlay'
import ConstructPlay from '../modals/ConstructPlay'
//==================ArticleStack======================
const ArticleStack = createAppContainer(createStackNavigator(
{
ArticlesScreen:{screen: ArticlesScreen,},
ArticleScreen:{ screen: ArticleScreen, },
},
{
headerMode :'float',
initialRouteName: 'ArticlesScreen',
navigationOptions:{
headerStyle: {
backgroundColor: BACKGROUND_MAIN_COLOR,
},
headerTintColor: ACTIVE_MAIN_COLOR,
headerTitleStyle:{
fontWeight: '300',
}
}
},
))
//=================GameStack===================
const GameStack = createAppContainer(createStackNavigator(
{
GamesScreen: { screen: GamesScreen, }
},
{
initialRouteName: 'GamesScreen',
navigationOptions:{
headerStyle: {
backgroundColor: BACKGROUND_MAIN_COLOR,
},
headerTintColor: ACTIVE_MAIN_COLOR,
headerTitleStyle:{
fontWeight: '300',
}
}
}
))
//=================TranslationStack===================
const TranslationStack = createAppContainer(createStackNavigator(
{
TranslationScreen: { screen: TranslationScreen, }
},
{
initialRouteName: 'TranslationScreen',
navigationOptions:{
headerStyle: {
backgroundColor: BACKGROUND_MAIN_COLOR,
},
headerTintColor: ACTIVE_MAIN_COLOR,
headerTitleStyle:{
fontWeight: '300',
}
}
}
))
//=================AlphabetStack===================
const AlphabetStack = createAppContainer(createStackNavigator(
{
AlphabetScreen:{ screen: AlphabetScreen, }
},
{
initialRouteName: 'AlphabetScreen',
navigationOptions:{
headerStyle: {
backgroundColor: BACKGROUND_MAIN_COLOR,
},
headerTintColor: ACTIVE_MAIN_COLOR,
headerTitleStyle:{
fontWeight: '300',
}
}
}
))
//================TabBar===============================
const TabBar = createAppContainer(createBottomTabNavigator(
{
GameStack: { screen: GameStack,},
ArticleStack: { screen: ArticleStack, },
TranslationStack: { screen: TranslationStack,},
AlphabetStack:{ screen: AlphabetStack, }
},
{
initialRouteName: 'GameStack',
defaultNavigationOptions: ({ navigation }) => ({
tabBarIcon: ({ focused, horizontal, tintColor }) => {
const { routeName } = navigation.state;
let IconComponent = Ionicons;
let iconName;
if (routeName === 'ArticleStack') {
iconName = `ios-paper`;
}
else if (routeName === 'TranslationStack') {
iconName = `ios-globe`;
}
else if (routeName === 'GameStack') {
iconName = `ios-jet`;
}
else if(routeName ==='AlphabetStack'){
iconName = `ios-bulb`;
}
return <IconComponent name={iconName} size={30} color={tintColor} />;
},
}),
tabBarOptions: {
showLabel: false,
activeTintColor: ACTIVE_MAIN_COLOR,
inactiveTintColor: INACTIVE_MAIN_COLOR,
inactiveBackgroundColor: BACKGROUND_MAIN_COLOR,
activeBackgroundColor: BACKGROUND_MAIN_COLOR,
},
}
))
//========================Root===========================
export default createAppContainer(createStackNavigator(
{
TabBar: { screen: TabBar },
TestPlay: {screen: TestPlay},
ObserverPlay: {screen: ObserverPlay},
LetterPlay:{screen: LetterPlay},
ConstructPlay:{screen: ConstructPlay},
},
{
mode: 'Modal',
headerMode: 'none',
}
))
|
oxelson/gempak
|
gempak/source/cgemlib/cap/cappsmarkint.c
|
#include "capcmn.h"
void cap_psmarkint(PlacementSet placements, CMDObjectSet objects,
float *bbox, int *found, float inf_bbox[4], int *iret)
/*****************************************************************************
* cap_psmarkint
*
* Marks all the placement objects that are intersected by the area given
* as not placed so they are updated on the next placement update.
* Also determine the total extent of the area of impace for all the
* objects that could be updated when placement runs again.
*
* Input parameters:
* placements PlacementSet Handle to PlacementSet to be checked
* objects CMDObjectSet Meta data for the PlacementSet
* *bbox float Bounding box for the area to check
* in the order, minx, maxx, miny, maxy
*
* Output parameters:
* *found int Number of objects marked
* *inf_bbox float Bounding box for the of the area
* potentially influenced by the updates
* in the order, minx, maxx, miny, maxy
* *iret int Return code
* 0 = Function successful
* -1 = Invalid object
**
* Log:
* S.Danz/AWC 07/06 Created
****************************************************************************/
{
int index, ier;
float tmp_bbox[4], pl_bbox[4], ref_bbox[4], max_dist;
CMDObject obj;
PlaceInfoContainer *placeinfo;
PlacementSetContainer *p;
/*---------------------------------------------------------------------*/
if (!placements || !objects || !bbox || !found || !inf_bbox) {
*iret = -1;
return;
}
*found = 0;
*iret = 0;
inf_bbox[0] = bbox[0];
inf_bbox[1] = bbox[1];
inf_bbox[2] = bbox[2];
inf_bbox[3] = bbox[3];
/*
* First, for all the objects that are placed, see if their bounding box
* intersects the area in question. If so, mark them as not placed (so
* it gets updated) and include their area in the resulting total extent
*/
p = (PlacementSetContainer*)placements;
for (index = 0; index < p->used; index++) {
placeinfo = p->places[index];
if (placeinfo->was_placed) {
cmd_osgetob(objects, placeinfo->id, &obj, &ier);
cmd_obgetbb(obj, &pl_bbox[0], &pl_bbox[1],
&pl_bbox[2], &pl_bbox[3], &ier);
pl_bbox[0] += placeinfo->offset.delta_x;
pl_bbox[1] += placeinfo->offset.delta_x;
pl_bbox[2] += placeinfo->offset.delta_y;
pl_bbox[3] += placeinfo->offset.delta_y;
/*
* The 'from' part of the arrow is in the center of the object,
* just worry about the 'to' part
*/
pl_bbox[0] = G_MIN(pl_bbox[0], placeinfo->arrow_x[1]);
pl_bbox[1] = G_MAX(pl_bbox[1], placeinfo->arrow_x[1]);
pl_bbox[2] = G_MIN(pl_bbox[2], placeinfo->arrow_y[1]);
pl_bbox[3] = G_MAX(pl_bbox[3], placeinfo->arrow_y[1]);
/*
* Just a bbox intersection, nothing too 'heavy'
*/
if ((( bbox[0] <= pl_bbox[0] && pl_bbox[0] <= bbox[1]) ||
( bbox[0] <= pl_bbox[1] && pl_bbox[1] <= bbox[1]) ||
(pl_bbox[0] <= bbox[0] && bbox[0] <= pl_bbox[1]) ||
(pl_bbox[0] <= bbox[1] && bbox[1] <= pl_bbox[1])) &&
(( bbox[2] <= pl_bbox[2] && pl_bbox[2] <= bbox[3]) ||
( bbox[2] <= pl_bbox[3] && pl_bbox[3] <= bbox[3]) ||
(pl_bbox[2] <= bbox[2] && bbox[2] <= pl_bbox[3]) ||
(pl_bbox[2] <= bbox[3] && bbox[3] <= pl_bbox[3]))) {
placeinfo->was_placed = 0;
cap_mergebbox(inf_bbox, pl_bbox, inf_bbox);
*found += 1;
}
}
}
/*
* Now, starting from the area for the 'direct' impacts from original
* area, if any of 'potential' positions (from the max extent of the
* object) intersect this area, then include that 'max extent' in case
* the object moves within that area to a new location during placement
*/
tmp_bbox[0] = inf_bbox[0];
tmp_bbox[1] = inf_bbox[1];
tmp_bbox[2] = inf_bbox[2];
tmp_bbox[3] = inf_bbox[3];
for (index = 0; index < p->used; index++) {
placeinfo = p->places[index];
if (!placeinfo->was_placed) {
cmd_osgetob(objects, placeinfo->id, &obj, &ier);
cmd_obgetbb(obj, &pl_bbox[0], &pl_bbox[1],
&pl_bbox[2], &pl_bbox[3], &ier);
cap_psgetplmaxdist(placements, placeinfo, pl_bbox,
&max_dist, &ier);
cmd_osgetob(objects, placeinfo->reference, &obj, &ier);
cmd_obgetbb(obj, &ref_bbox[0], &ref_bbox[1],
&ref_bbox[2], &ref_bbox[3], &ier);
ref_bbox[0] -= max_dist;
ref_bbox[1] += max_dist;
ref_bbox[2] -= max_dist;
ref_bbox[3] += max_dist;
/*
* Again, just a bbox intersection, nothing too 'heavy'
*/
if (((tmp_bbox[0] <= ref_bbox[0] && ref_bbox[0] <= tmp_bbox[1]) ||
(tmp_bbox[0] <= ref_bbox[1] && ref_bbox[1] <= tmp_bbox[1]) ||
(ref_bbox[0] <= tmp_bbox[0] && tmp_bbox[0] <= ref_bbox[1]) ||
(ref_bbox[0] <= tmp_bbox[1] && tmp_bbox[1] <= ref_bbox[1])) &&
((tmp_bbox[2] <= ref_bbox[2] && ref_bbox[2] <= tmp_bbox[3]) ||
(tmp_bbox[2] <= ref_bbox[3] && ref_bbox[3] <= tmp_bbox[3]) ||
(ref_bbox[2] <= tmp_bbox[2] && tmp_bbox[2] <= ref_bbox[3]) ||
(ref_bbox[2] <= tmp_bbox[3] && tmp_bbox[3] <= ref_bbox[3]))) {
cap_mergebbox(inf_bbox, ref_bbox, inf_bbox);
*found += 1;
}
}
}
return;
}
|
bitcaster-io/bitcaster
|
src/bitcaster/dispatchers/handlers/twilio.py
|
<gh_stars>1-10
from logging import getLogger
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from twilio.rest import Client
from bitcaster.api.fields import PhoneNumberField
from bitcaster.dispatchers.base import (CoreDispatcher, DispatcherOptions,
MessageType, SubscriptionOptions,)
from bitcaster.dispatchers.registry import dispatcher_registry
from bitcaster.exceptions import PluginSendError
logger = getLogger(__name__)
class Message(MessageType):
pass
class TwilioSubscription(SubscriptionOptions):
recipient = PhoneNumberField()
class TwilioOptions(DispatcherOptions):
sid = serializers.CharField(allow_blank=False, required=True)
token = serializers.CharField(allow_blank=False, required=True)
sender = PhoneNumberField()
@dispatcher_registry.register
class Twilio(CoreDispatcher):
__help__ = _("""
You need a valid [Twilio](https://www.twilio.com/) account to use this service.
- Get your token at https://www.twilio.com/console
- Get twilio number at https://www.twilio.com/console/phone-numbers/incoming
""")
name = 'SMS (Twilio)'
subscription_class = TwilioSubscription
options_class = TwilioOptions
message_class = MessageType
def _get_connection(self) -> Client:
return Client(self.config['sid'],
self.config['token'])
def emit(self, address: str, subject: str, message: str,
connection=None, *args, **kwargs) -> str:
try:
connection = connection or self._get_connection()
connection.messages.create(
# to=address.encode('utf8'),
to=address,
from_=self.config['sender'].encode('utf8'),
body=message
)
return address
except Exception as e: # pragma: no cover
logger.exception(e)
raise PluginSendError(e)
def test_connection(self, raise_exception=False):
connection = self._get_connection()
return connection.api.signing_keys(self.config['sid'])
|
boltjs/bolt
|
projects/base/src/util/Globals.js
|
<reponame>boltjs/bolt<gh_stars>10-100
define(
'bolt.base.util.Globals',
[
],
function () {
var global = Function('return this')();
var get = function (parts, scope) {
var r = scope;
for (var i = 0; i < parts.length && r !== undefined; ++i)
r = r[parts[i]];
return r;
};
var resolve = function (name, scope) {
var parts = name.split('.');
return get(parts, scope || global);
};
var remove = function (name, scope) {
var parts = name.split('.');
var parent = get(parts.slice(0, -1), scope || global);
delete parent[parts[parts.length - 1]];
};
return {
global: global,
resolve: resolve,
remove: remove
};
}
);
|
tenbirds/PKMS
|
src/com/pkms/pkgmg/access/service/AccessService.java
|
package com.pkms.pkgmg.access.service;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import javax.annotation.Resource;
import org.apache.log4j.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.springframework.stereotype.Service;
import com.pkms.common.workSystem.model.WorkSystemModel;
import com.pkms.common.workSystem.service.WorkSystemServiceIf;
import com.pkms.pkgmg.access.dao.AccessDAO;
import com.pkms.pkgmg.pkg.model.PkgEquipmentModel;
import com.pkms.pkgmg.pkg.service.PkgEquipmentServiceIf;
import com.pkms.pkgmg.pkg21.controller.Pkg21Controller;
import com.pkms.pkgmg.pkg21.model.Pkg21Model;
import com.wings.util.WingsStringUtil;
@Service("AccessService")
public class AccessService implements AccessServiceIf{
@Resource(name="AccessDAO")
private AccessDAO accessDAO;
@Resource(name = "PkgEquipmentService")
private PkgEquipmentServiceIf pkgEquipmentService;
@Resource(name = "WorkSystemService")
private WorkSystemServiceIf workSystemService;
static Logger logger = Logger.getLogger(Pkg21Controller.class);
@Override
public List<Pkg21Model> readList(Pkg21Model pkg21Model) throws Exception {
pkg21Model.setTotalCount(accessDAO.readTotalCount(pkg21Model));
List<Pkg21Model> resultList = (List<Pkg21Model>) accessDAO.readList(pkg21Model);
return resultList;
}
@Override
public Pkg21Model read(Pkg21Model pkg21Model) throws Exception {
return accessDAO.read(pkg21Model);
}
@Override
public void create(Pkg21Model pkg21Model) throws Exception {
pkg21Model.setPkg_seq(accessDAO.create(pkg21Model));
}
@Override
public void update(Pkg21Model pkg21Model) throws Exception {
accessDAO.update(pkg21Model);
}
public void pkg_status_update(Pkg21Model pkg21Model) throws Exception {
accessDAO.pkg_status_update(pkg21Model);
}
@Override
public List<Pkg21Model> getChkList(Pkg21Model pkg21Model,String chk_type) throws Exception {
List<Pkg21Model> questList = null;
List<Pkg21Model> valueList = null;
List<Pkg21Model> returnList = new ArrayList<Pkg21Model>();
Pkg21Model result = null;
pkg21Model.setChk_type(chk_type);
questList = accessDAO.questList(pkg21Model);
valueList = accessDAO.valueList(pkg21Model);
for(Pkg21Model qModel : questList){
result = new Pkg21Model();
result.setVol_no(qModel.getVol_no());
result.setChk_seq(qModel.getChk_seq());
result.setTitle(qModel.getTitle());
if(valueList != null){
for(Pkg21Model vModel : valueList){
if(vModel.getChk_seq().equals(qModel.getChk_seq())){
result.setChk_result(vModel.getChk_result());
}
}
}
returnList.add(result);
}
return returnList;
}
@Override
public void pkg_chk_create(Pkg21Model pkg21Model) throws Exception {
Pkg21Model pkg21 = new Pkg21Model();
String[] chkSeqs = pkg21Model.getChk_seqs();
String[] chk_results = pkg21Model.getChk_results();
String[] cchk_results = WingsStringUtil.getNotNullStringArray(chk_results);
for(int i = 0; i < chkSeqs.length; i++) {
pkg21 = new Pkg21Model();
pkg21.setPkg_seq(pkg21Model.getPkg_seq());
pkg21.setChk_seq(chkSeqs[i]);
pkg21.setChk_result(cchk_results[i]);
accessDAO.pkg_chk_create(pkg21);
}
}
@Override
public void pkg_chk_delete(Pkg21Model pkg21Model) throws Exception {
Pkg21Model pkg21 = new Pkg21Model();
String[] chkSeqs = pkg21Model.getChk_seqs();
String[] chk_results = pkg21Model.getChk_results();
String[] cchk_results = WingsStringUtil.getNotNullStringArray(chk_results);
for(int i = 0; i < chkSeqs.length; i++) {
pkg21 = new Pkg21Model();
pkg21.setPkg_seq(pkg21Model.getPkg_seq());
pkg21.setChk_seq(chkSeqs[i]);
pkg21.setChk_result(cchk_results[i]);
accessDAO.pkg_chk_delete(pkg21);
}
}
@Override
public void pkg_chk_delete_all(Pkg21Model pkg21Model) throws Exception {
accessDAO.pkg_chk_delete_all(pkg21Model);
}
@Override
public List<PkgEquipmentModel> getPkgEquipment(Pkg21Model pkg21Model, String work_gubun) throws Exception {
List<PkgEquipmentModel> pkgEquipmentModelList = new ArrayList<PkgEquipmentModel>();
PkgEquipmentModel pkgEModel = new PkgEquipmentModel();
pkgEModel.setPkg_seq(pkg21Model.getPkg_seq());
pkgEModel.setWork_gubun(work_gubun);
pkgEquipmentModelList = pkgEquipmentService.readAccess(pkgEModel);
return pkgEquipmentModelList;
}
@Override
public void equipment_delete_file(Pkg21Model pkg21Model) throws Exception {
accessDAO.equipment_delete_file(pkg21Model);
}
@Override
public void not_like_delete_file(Pkg21Model pkg21Model) throws Exception {
accessDAO.not_like_delete_file(pkg21Model);
}
@Override
public void pkg_status_delete(Pkg21Model pkg21Model) throws Exception {
accessDAO.pkg_status_delete(pkg21Model);
}
@Override
public void pkg_status_delete_like(Pkg21Model pkg21Model) throws Exception {
accessDAO.pkg_status_delete_like(pkg21Model);
}
@Override
public Pkg21Model copy_cnt(Pkg21Model pkg21Model) throws Exception {
return accessDAO.copy_cnt(pkg21Model);
}
@Override
public void copy_file(Pkg21Model pkg21Model) throws Exception {
accessDAO.copy_file(pkg21Model);
}
@Override
public void copy_file_delete(Pkg21Model pkg21Model) throws Exception {
accessDAO.copy_file_delete(pkg21Model);
}
@Override
public void tangoWork(Pkg21Model pkg21Model, String work_gubun) throws Exception {
WorkSystemModel tangoMainMdl = new WorkSystemModel();
//기본정보 중 없는 것은 여기에서
Pkg21Model p21Model = new Pkg21Model();
p21Model = accessDAO.read(pkg21Model);
tangoMainMdl.setNo(pkg21Model.getPkg_seq());
tangoMainMdl.setMaster_file_id(pkg21Model.getMaster_file_id()); //첨부파일 연동
tangoMainMdl.setImpo("Y"); //pkms_main
tangoMainMdl.setBackground_target("PKMS의 작업절차서 파일 참조"); //pkms_main
tangoMainMdl.setJob_bunya("1"); //pkms_main
tangoMainMdl.setJob_gubun1("SW 작업"); //pkms_main
if("F".equals(p21Model.getVer_gubun())){ //전체
tangoMainMdl.setJob_gubun2("PKG적용-Full");
}else { //부분
tangoMainMdl.setJob_gubun2("PKG적용-Patch");
}
SimpleDateFormat DateFormat = new SimpleDateFormat ( "yyyyMMdd", Locale.KOREA );
Date current = new Date();
String c_date = DateFormat.format (current);
tangoMainMdl.setIns_date(c_date);//pkms_main
tangoMainMdl.setC_date(c_date);//pkms_sub
tangoMainMdl.setMaster_date_s(c_date);//pkms_sub
tangoMainMdl.setGojang_step("원복");//pkms_main
tangoMainMdl.setWork_effect("소통저조");//pkms_main
tangoMainMdl.setWork_rank("L");//pkms_main
tangoMainMdl.setIns_name(pkg21Model.getSession_user_name()); //pkms_main
tangoMainMdl.setIns_id(pkg21Model.getSession_user_id()); //pkms_main
tangoMainMdl.setIns_sosok(pkg21Model.getSession_user_group_name()); //pkms_main
tangoMainMdl.setIns_sosok_code(pkg21Model.getSession_user_group_id()); //pkms_sub
tangoMainMdl.setWork_phone(pkg21Model.getSession_user_mobile_phone());
tangoMainMdl.setWork_result_date_s(c_date); //pkms_sub
tangoMainMdl.setSystem_seq(pkg21Model.getSystem_seq());
WorkSystemModel sysUserMdl = new WorkSystemModel();
sysUserMdl.setSystem_seq(pkg21Model.getSystem_seq());
if(null != workSystemService.read_Sys_User_Info(sysUserMdl)){
sysUserMdl = workSystemService.read_Sys_User_Info(sysUserMdl);
tangoMainMdl.setJob_man(sysUserMdl.getJob_man());
tangoMainMdl.setJob_man_post(sysUserMdl.getJob_man_post());
tangoMainMdl.setTarget_system(sysUserMdl.getTarget_system());
tangoMainMdl.setWork_summary(sysUserMdl.getTarget_system());
}
WorkSystemModel WorkSysModel = new WorkSystemModel();
int seq = 0;
String w_sTime ="";
String w_eTime ="";
String team_code = pkg21Model.getSession_user_group_id();
WorkSysModel = workSystemService.read_SeqMax_Main(WorkSysModel);
seq = WorkSysModel.getSeq() + 1;
w_sTime = pkg21Model.getStart_date()+" "+pkg21Model.getStart_time1()+":"+pkg21Model.getStart_time2();
w_eTime = pkg21Model.getEnd_date()+" "+pkg21Model.getEnd_time1()+":"+pkg21Model.getEnd_time2();
sysUserMdl.setMaster_team_code(team_code.substring(4, 8));
tangoMainMdl.setSeq(seq); //SEQ
tangoMainMdl.setTeam_code(team_code);
tangoMainMdl.setWork_plandate_s(w_sTime); // 작업시작시간 WORK_PLANDATE_S 2013-03-25 02:00
tangoMainMdl.setWork_plandate_e(w_eTime); // 작업종료시간 WORK_PLANDATE_E 2013-03-25 07:00
tangoMainMdl.setSystem_name(p21Model.getSystem_name_real());
tangoMainMdl.setWork_sosok(pkg21Model.getSession_user_group_name());
tangoMainMdl.setService_effect_area(""); // 서비스 영향 지역 - 없음
tangoMainMdl.setFull_system_name(p21Model.getSystem_name_real());
String ver = "";
String title = "";
ver = p21Model.getVer();
title = p21Model.getSystem_name_real()+" "+pkg21Model.getPatch_title()+" "+ver;
String content = "■ 시스템 : " + p21Model.getSystem_name_real()+"<br/>" +
"■ 제목 : "+ p21Model.getTitle() + "<br/>" +
"■ 대상시스템 : ";
if("1S".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("S");
title = title + " 1차초도";
content = content + "1차초도";
}else if("2S".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("S");
title = title + " 2차초도";
content = content + "2차초도";
}else if("3S".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("S");
title = title + " 3차초도";
content = content + "3차초도";
}else if("1C".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("C");
title = title + " 1차상용";
content = content + "1차상용";
}else if("2C".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("C");
title = title + " 2차상용";
content = content + "2차상용";
}else if("1E".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("E");
title = title + " 확대";
content = content + "확대";
}
content = content + "■ CVT대표담당자 : "+ p21Model.getSystem_user_name()+"M <br/>"
+ "■ DVT대표담당자 : "+ p21Model.getDev_system_user_name()+"M <br/>"
+ "■ PKG버젼 : " +ver + "<br/>"
+ "■ 버전 구분 : ";
if("F".equals(p21Model.getVer_gubun())){
content = content + "Full <br/>";
}else{
content = content + "Patch <br/>";
}
content = content +"■ 서비스중단시간 : " + p21Model.getSer_downtime() + "(분) <br/>";
tangoMainMdl.setWork_content(content); //pkms_main
tangoMainMdl.setTitle(title);
workSystemService.create_pkms_main(tangoMainMdl);
String work_seq = String.valueOf(seq);
tangoMainMdl.setWork_seq(work_seq);//pkms_sub, pkms_target_system
tangoMainMdl.setState("미승인");//승인 or 미승인
tangoMainMdl.setWork_realdate_s(w_sTime);
tangoMainMdl.setWork_realdate_e(w_eTime);
tangoMainMdl.setWork_result("양호"); //pkms_sub
tangoMainMdl.setWork_result_id(pkg21Model.getSession_user_id()); //pkms_sub
tangoMainMdl.setWork_result_name(pkg21Model.getSession_user_name()); //pkms_sub
tangoMainMdl.setOrg_gubun("SKT"); //pkms_sub
//------------------------공통부분 끝--------------------
sysUserMdl.setSystem_seq(p21Model.getSystem_seq());
if("1S".equals(work_gubun)) {
sysUserMdl.setWork_gubun("DA");
}else if("2S".equals(work_gubun)) {
sysUserMdl.setWork_gubun("DA");
}else if("3S".equals(work_gubun)) {
sysUserMdl.setWork_gubun("DA");
}else if("1C".equals(work_gubun)) {
sysUserMdl.setWork_gubun("AU");
}else if("2C".equals(work_gubun)) {
sysUserMdl.setWork_gubun("AU");
}else if("1E".equals(work_gubun)) {
sysUserMdl.setWork_gubun("LA");
}
List<WorkSystemModel> readTJList = workSystemService.readTJacc(sysUserMdl);
int sub_seq =0;
WorkSystemModel workSeqModel = new WorkSystemModel();
workSeqModel = workSystemService.read_SeqMax_Sub(workSeqModel);
sub_seq = workSeqModel.getSeq() + 1;
WorkSystemModel work = new WorkSystemModel();
work.setSystem_seq(p21Model.getSystem_seq());
work = workSystemService.read_Work_Info(work);
tangoMainMdl.setWork_name(work.getWork_name());
tangoMainMdl.setWork_phone(work.getWork_phone());
tangoMainMdl.setWork_sosok(work.getWork_sosok());
tangoMainMdl.setWork_confirm_name(null);
if((readTJList != null) && (readTJList.size() > 0)){
for(WorkSystemModel tjModel : readTJList){
tangoMainMdl.setMaster_id(tjModel.getMaster_id());
tangoMainMdl.setMaster_name(tjModel.getConfirm_name());
tangoMainMdl.setMaster_team_code("0000"+tjModel.getMaster_team_code());
tangoMainMdl.setMaster_team_name(tjModel.getMaster_team_name());
tangoMainMdl.setMaster_movetel(tjModel.getMaster_movetel());
tangoMainMdl.setWork_result("");
tangoMainMdl.setWork_result_date_s(null);
tangoMainMdl.setState("미승인");
tangoMainMdl.setSeq(sub_seq);
workSystemService.create_pkms_sub_acc(tangoMainMdl);
sub_seq++;
}
}else{
tangoMainMdl.setMaster_id(pkg21Model.getSession_user_id());
tangoMainMdl.setMaster_name(pkg21Model.getSession_user_name());
tangoMainMdl.setMaster_team_code(pkg21Model.getSession_user_group_id());
tangoMainMdl.setMaster_team_name(pkg21Model.getSession_user_group_name());
tangoMainMdl.setMaster_movetel(pkg21Model.getSession_user_mobile_phone());
tangoMainMdl.setWork_result("");
tangoMainMdl.setWork_result_date_s(null);
tangoMainMdl.setState("미승인");
tangoMainMdl.setSeq(sub_seq);
workSystemService.create_pkms_sub_acc(tangoMainMdl);
}
}
/*@Override
public void tangoWork(Pkg21Model pkg21Model, String work_gubun, String max_ord) throws Exception {
WorkSystemModel tangoMainMdl = new WorkSystemModel();
//기본정보 중 없는 것은 여기에서
Pkg21Model p21Model = new Pkg21Model();
p21Model = accessDAO.read(pkg21Model);
tangoMainMdl.setNo(pkg21Model.getPkg_seq());
tangoMainMdl.setMaster_file_id(pkg21Model.getMaster_file_id()); //첨부파일 연동
tangoMainMdl.setImpo("Y"); //pkms_main
tangoMainMdl.setBackground_target("PKMS의 작업절차서 파일 참조"); //pkms_main
tangoMainMdl.setJob_bunya("1"); //pkms_main
tangoMainMdl.setJob_gubun1("SW 작업"); //pkms_main
if("F".equals(p21Model.getVer_gubun())){ //전체
tangoMainMdl.setJob_gubun2("PKG적용-Full");
}else { //부분
tangoMainMdl.setJob_gubun2("PKG적용-Patch");
}
SimpleDateFormat DateFormat = new SimpleDateFormat ( "yyyyMMdd", Locale.KOREA );
Date current = new Date();
String c_date = DateFormat.format (current);
tangoMainMdl.setIns_date(c_date);//pkms_main
tangoMainMdl.setC_date(c_date);//pkms_sub
tangoMainMdl.setMaster_date_s(c_date);//pkms_sub
tangoMainMdl.setGojang_step("원복");//pkms_main
tangoMainMdl.setWork_effect("소통저조");//pkms_main
tangoMainMdl.setWork_rank("L");//pkms_main
tangoMainMdl.setIns_name(pkg21Model.getSession_user_name()); //pkms_main
tangoMainMdl.setIns_id(pkg21Model.getSession_user_id()); //pkms_main
tangoMainMdl.setIns_sosok(pkg21Model.getSession_user_group_name()); //pkms_main
tangoMainMdl.setIns_sosok_code(pkg21Model.getSession_user_group_id()); //pkms_sub
tangoMainMdl.setWork_phone(pkg21Model.getSession_user_mobile_phone());
tangoMainMdl.setWork_result_date_s(c_date); //pkms_sub
tangoMainMdl.setSystem_seq(pkg21Model.getSystem_seq());
WorkSystemModel sysUserMdl = new WorkSystemModel();
sysUserMdl.setSystem_seq(pkg21Model.getSystem_seq());
if(null != workSystemService.read_Sys_User_Info(sysUserMdl)){
sysUserMdl = workSystemService.read_Sys_User_Info(sysUserMdl);
tangoMainMdl.setJob_man(sysUserMdl.getJob_man());
tangoMainMdl.setJob_man_post(sysUserMdl.getJob_man_post());
tangoMainMdl.setTarget_system(sysUserMdl.getTarget_system());
tangoMainMdl.setWork_summary(sysUserMdl.getTarget_system());
}
WorkSystemModel WorkSysModel = new WorkSystemModel();
int seq = 0;
String w_sTime ="";
String w_eTime ="";
String team_code = pkg21Model.getSession_user_group_id();
WorkSysModel = workSystemService.read_SeqMax_Tango(WorkSysModel);
// WorkSysModel = workSystemService.read_SeqMax_Main(WorkSysModel);
// seq = WorkSysModel.getSeq() + 1;
seq = WorkSysModel.getSeq();
w_sTime = pkg21Model.getStart_date()+" "+pkg21Model.getStart_time1()+":"+pkg21Model.getStart_time2();
w_eTime = pkg21Model.getEnd_date()+" "+pkg21Model.getEnd_time1()+":"+pkg21Model.getEnd_time2();
sysUserMdl.setMaster_team_code(team_code.substring(4, 8));
tangoMainMdl.setSeq(seq); //SEQ
tangoMainMdl.setTeam_code(team_code);
tangoMainMdl.setWork_plandate_s(w_sTime); // 작업시작시간 WORK_PLANDATE_S 2013-03-25 02:00
tangoMainMdl.setWork_plandate_e(w_eTime); // 작업종료시간 WORK_PLANDATE_E 2013-03-25 07:00
tangoMainMdl.setSystem_name(p21Model.getSystem_name_real());
tangoMainMdl.setWork_sosok(pkg21Model.getSession_user_group_name());
tangoMainMdl.setService_effect_area(""); // 서비스 영향 지역 - 없음
tangoMainMdl.setFull_system_name(p21Model.getSystem_name_real());
String ver = "";
String title = "";
ver = p21Model.getVer();
title = p21Model.getSystem_name_real()+" "+pkg21Model.getPatch_title()+" "+ver;
String content = "■ 시스템 : " + p21Model.getSystem_name_real()+"<br/>" +
"■ 제목 : "+ p21Model.getTitle() + "<br/>" +
"■ 대상시스템 : ";
if("1S".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("S");
title = title + " 1차초도";
content = content + "1차초도";
}else if("2S".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("S");
title = title + " 2차초도";
content = content + "2차초도";
}else if("3S".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("S");
title = title + " 3차초도";
content = content + "3차초도";
}else if("1C".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("C");
title = title + " 1차상용";
content = content + "1차상용";
}else if("2C".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("C");
title = title + " 2차상용";
content = content + "2차상용";
}else if("1E".equals(work_gubun)) {
tangoMainMdl.setWork_gubun("E");
title = title + " 확대";
content = content + "확대";
}
content = content + "■ CVT대표담당자 : "+ p21Model.getSystem_user_name()+"M <br/>"
+ "■ DVT대표담당자 : "+ p21Model.getDev_system_user_name()+"M <br/>"
+ "■ PKG버젼 : " +ver + "<br/>"
+ "■ 버전 구분 : ";
if("F".equals(p21Model.getVer_gubun())){
content = content + "Full <br/>";
}else{
content = content + "Patch <br/>";
}
content = content +"■ 서비스중단시간 : " + p21Model.getSer_downtime() + "(분) <br/>";
tangoMainMdl.setWork_content(content); //pkms_main
tangoMainMdl.setTitle(title);
// workSystemService.create_pkms_main(tangoMainMdl);
JSONObject jsonObject = new JSONObject();
jsonObject.put("lnkgSystemNm", "PKMS");
jsonObject.put("lnkgSystemWorkId", String.valueOf(seq));
jsonObject.put("dmnDivCd", "101");
if("1S".equals(work_gubun)) {
if("F".equals(p21Model.getVer_gubun())){ //전체
jsonObject.put("workTypCd", "101020101");
}else{ //패치
jsonObject.put("workTypCd", "101020301");
}
}else if("2S".equals(work_gubun)) {
if("F".equals(p21Model.getVer_gubun())){ //전체
jsonObject.put("workTypCd", "101020101");
}else{ //패치
jsonObject.put("workTypCd", "101020301");
}
}else if("3S".equals(work_gubun)) {
if("F".equals(p21Model.getVer_gubun())){ //전체
jsonObject.put("workTypCd", "101020101");
}else{ //패치
jsonObject.put("workTypCd", "101020301");
}
}else if("1C".equals(work_gubun)) {
if("F".equals(p21Model.getVer_gubun())){ //전체
jsonObject.put("workTypCd", "101020102");
}else{ //패치
jsonObject.put("workTypCd", "101020302");
}
}else if("2C".equals(work_gubun)) {
if("F".equals(p21Model.getVer_gubun())){ //전체
jsonObject.put("workTypCd", "101020102");
}else{ //패치
jsonObject.put("workTypCd", "101020302");
}
}else if("1E".equals(work_gubun)) {
if("F".equals(p21Model.getVer_gubun())){ //전체
jsonObject.put("workTypCd", "101020102");
}else{ //패치
jsonObject.put("workTypCd", "101020302");
}
}
jsonObject.put("workNm", tangoMainMdl.getTitle());
jsonObject.put("genWorkTerrLstCd", "");
jsonObject.put("purpWorkCd", "");
jsonObject.put("rstrWayCtt", tangoMainMdl.getWork_summary());
jsonObject.put("attnMtrCtt", tangoMainMdl.getBackground_target());
jsonObject.put("workDtlCtt", content);
jsonObject.put("workStaDate", tangoMainMdl.getWork_plandate_s());
jsonObject.put("workEndDate", tangoMainMdl.getWork_plandate_e());
jsonObject.put("srvcStopTime", p21Model.getSer_downtime());
jsonObject.put("rstrSrvcStopTime", "");
String reqpId = "SKT" + pkg21Model.getSession_user_id();
jsonObject.put("reqpId", reqpId);
jsonObject.put("reqDate", c_date);
if("주간".equals(pkg21Model.getAmpm())){
jsonObject.put("dytmWorkYn", "Y");
jsonObject.put("dytmWorkCd", "");
}else{
jsonObject.put("dytmWorkYn", "N");
jsonObject.put("dytmWorkCd", "");
}
jsonObject.put("cmdWorkYn", "N");
jsonObject.put("svrWorkYn", "N");
jsonObject.put("emsWorkYn", "N");
jsonObject.put("workReqOrgId", pkg21Model.getSession_user_group_id());
String workRegrtId = "SKT" + tangoMainMdl.getIns_id();
jsonObject.put("workRegrtId", workRegrtId);
jsonObject.put("workInfuRegrtSelCd", "");
jsonObject.put("frstRegDate", tangoMainMdl.getIns_date());
String frstRegUserId = "SKT" + tangoMainMdl.getIns_id();
jsonObject.put("frstRegUserId", frstRegUserId);
jsonObject.put("lastChgDate", "");
jsonObject.put("lastChgUserId", "");
String work_seq = String.valueOf(seq);
tangoMainMdl.setWork_seq(work_seq);//pkms_sub, pkms_target_system
tangoMainMdl.setState("미승인");//승인 or 미승인
tangoMainMdl.setWork_realdate_s(w_sTime);
tangoMainMdl.setWork_realdate_e(w_eTime);
tangoMainMdl.setWork_result("양호"); //pkms_sub
tangoMainMdl.setWork_result_id(pkg21Model.getSession_user_id()); //pkms_sub
tangoMainMdl.setWork_result_name(pkg21Model.getSession_user_name()); //pkms_sub
tangoMainMdl.setOrg_gubun("SKT"); //pkms_sub
//------------------------공통부분 끝--------------------
sysUserMdl.setSystem_seq(p21Model.getSystem_seq());
if("1S".equals(work_gubun)) {
sysUserMdl.setWork_gubun("DA");
}else if("2S".equals(work_gubun)) {
sysUserMdl.setWork_gubun("DA");
}else if("3S".equals(work_gubun)) {
sysUserMdl.setWork_gubun("DA");
}else if("1C".equals(work_gubun)) {
sysUserMdl.setWork_gubun("AU");
}else if("2C".equals(work_gubun)) {
sysUserMdl.setWork_gubun("AU");
}else if("1E".equals(work_gubun)) {
sysUserMdl.setWork_gubun("LA");
}
List<WorkSystemModel> readTJList = workSystemService.readTJacc(sysUserMdl);
int sub_seq =0;
WorkSystemModel workSeqModel = new WorkSystemModel();
workSeqModel = workSystemService.read_SeqMax_Sub(workSeqModel);
sub_seq = workSeqModel.getSeq() + 1;
WorkSystemModel work = new WorkSystemModel();
work.setSystem_seq(p21Model.getSystem_seq());
work = workSystemService.read_Work_Info(work);
tangoMainMdl.setWork_name(work.getWork_name());
tangoMainMdl.setWork_phone(work.getWork_phone());
tangoMainMdl.setWork_sosok(work.getWork_sosok());
tangoMainMdl.setWork_confirm_name(null);
if((readTJList != null) && (readTJList.size() > 0)){
for(WorkSystemModel tjModel : readTJList){
tangoMainMdl.setMaster_id(tjModel.getMaster_id());
tangoMainMdl.setMaster_name(tjModel.getConfirm_name());
tangoMainMdl.setMaster_team_code("0000"+tjModel.getMaster_team_code());
tangoMainMdl.setMaster_team_name(tjModel.getMaster_team_name());
tangoMainMdl.setMaster_movetel(tjModel.getMaster_movetel());
tangoMainMdl.setWork_result("");
tangoMainMdl.setWork_result_date_s(null);
tangoMainMdl.setState("미승인");
tangoMainMdl.setSeq(sub_seq);
// workSystemService.create_pkms_sub_acc(tangoMainMdl);
sub_seq++;
}
}else{
tangoMainMdl.setMaster_id(pkg21Model.getSession_user_id());
tangoMainMdl.setMaster_name(pkg21Model.getSession_user_name());
tangoMainMdl.setMaster_team_code(pkg21Model.getSession_user_group_id());
tangoMainMdl.setMaster_team_name(pkg21Model.getSession_user_group_name());
tangoMainMdl.setMaster_movetel(pkg21Model.getSession_user_mobile_phone());
tangoMainMdl.setWork_result("");
tangoMainMdl.setWork_result_date_s(null);
tangoMainMdl.setState("미승인");
tangoMainMdl.setSeq(sub_seq);
// workSystemService.create_pkms_sub_acc(tangoMainMdl);
}
JSONArray workDept = new JSONArray();
JSONObject workDeptInfo_main = new JSONObject();//시행부서
workDeptInfo_main.put("orgDivCd", "1");
workDeptInfo_main.put("orgId", tangoMainMdl.getMaster_team_code());
workDeptInfo_main.put("workProgStatCd", "002002");
workDeptInfo_main.put("workStatChgDate", tangoMainMdl.getIns_date());
workDeptInfo_main.put("realWorkStaDate", tangoMainMdl.getWork_plandate_s());
workDeptInfo_main.put("realWorkEndDate", tangoMainMdl.getWork_plandate_e());
String workChrrId = "SKT" + tangoMainMdl.getIns_id();
workDeptInfo_main.put("workChrrId", workChrrId);
workDeptInfo_main.put("workRsltCd", "1");
workDeptInfo_main.put("workRsltDtlCtt", "");
workDeptInfo_main.put("realWorkStaChrrId", "");
workDeptInfo_main.put("realWorkEndChrrId", "");
workDeptInfo_main.put("workRstrStaDate", "");
workDeptInfo_main.put("workRstrEndDate", "");
workDeptInfo_main.put("frstRegDate", tangoMainMdl.getC_date());
String getMaster_id = "SKT" + tangoMainMdl.getMaster_id();
workDeptInfo_main.put("frstRegUserId", getMaster_id);
workDeptInfo_main.put("lastChgDate", "");
workDeptInfo_main.put("lastChgUserId", "");
workDept.add(workDeptInfo_main);
List<WorkSystemModel> deptInfo_sub = workSystemService.sub_Team(tangoMainMdl);
for(WorkSystemModel diMdl : deptInfo_sub){
JSONObject workDeptInfo_sub = new JSONObject();//시행부서
workDeptInfo_sub.put("orgDivCd", "2");
workDeptInfo_sub.put("orgId", diMdl.getMaster_team_code());
workDeptInfo_sub.put("workProgStatCd", "002002");
workDeptInfo_sub.put("workStatChgDate", tangoMainMdl.getIns_date());
workDeptInfo_sub.put("realWorkStaDate", tangoMainMdl.getWork_plandate_s());
workDeptInfo_sub.put("realWorkEndDate", tangoMainMdl.getWork_plandate_e());
workDeptInfo_sub.put("workChrrId", workChrrId);
workDeptInfo_sub.put("workRsltCd", "1");
workDeptInfo_sub.put("workRsltDtlCtt", "");
workDeptInfo_sub.put("realWorkStaChrrId", "");
workDeptInfo_sub.put("realWorkEndChrrId", "");
workDeptInfo_sub.put("workRstrStaDate", "");
workDeptInfo_sub.put("workRstrEndDate", "");
workDeptInfo_sub.put("frstRegDate", c_date);
workDeptInfo_sub.put("frstRegUserId", getMaster_id);
workDeptInfo_sub.put("lastChgDate", "");
workDeptInfo_sub.put("lastChgUserId", "");
workDept.add(workDeptInfo_sub);
}
jsonObject.put("workDept", workDept);
// 승인정보 (검토/승인 정보)
JSONArray workAprvHst = new JSONArray();
JSONObject workAprvHst_main = new JSONObject();
workAprvHst_main.put("orgId", tangoMainMdl.getMaster_team_code());
workAprvHst_main.put("workProgStatCd", "002002");
workAprvHst_main.put("rvAprvDivVal", "1");
workAprvHst_main.put("aprvrId", getMaster_id);
workAprvHst_main.put("aprvDate", c_date);
workAprvHst_main.put("rvOponCtt", "");
workAprvHst_main.put("aprvYn", "N");//승인
workAprvHst_main.put("frstRegDate", c_date);
workAprvHst_main.put("frstRegUserId", getMaster_id);
workAprvHst_main.put("lastChgDate", "");
workAprvHst_main.put("lastChgUserId", "");
workAprvHst.add(workAprvHst_main);
jsonObject.put("workAprvHst", workAprvHst);
//작업 부가정보
JSONObject workAddInfo = new JSONObject();
workAddInfo.put("lnkgEqpCtt", "");
workAddInfo.put("refcOrgCd", "");
workAddInfo.put("chrInfuYn", "N");
workAddInfo.put("roamLnkgYn", "N");
workAddInfo.put("testEndNotiYn", "");
// workAprvHst_main.put("cmdregYn", "");
// workAprvHst_main.put("cmdInclYn", "");
workAddInfo.put("eqpPkgVerVal", p21Model.getVer());
if("F".equals(p21Model.getVer_gubun())){
workAddInfo.put("eqpPkgDivCd", "F");
}else{
workAddInfo.put("eqpPkgDivCd", "P");
}
workAddInfo.put("workOnrCd", "");
workAddInfo.put("copWorkMeansCd", "");
workAddInfo.put("frstRegDate", c_date);
workAddInfo.put("frstRegUserId", frstRegUserId);
workAddInfo.put("lastChgDate", "");
workAddInfo.put("lastChgUserId", "");
jsonObject.put("workAddInfo", workAddInfo);
// 첨부파일
JSONArray attchFile = new JSONArray();
WorkSystemModel fileModel = new WorkSystemModel();
fileModel.setMaster_file_id(p21Model.getMaster_file_id());
fileModel.setFile_name("AccPKGStd");
int file_tango = 0;
if(null != workSystemService.readFileTango(fileModel)){
List<WorkSystemModel>readFilecvtP = workSystemService.readFileTango(fileModel);
for(WorkSystemModel cvtP : readFilecvtP){
file_tango++;
JSONObject attchFile_p = new JSONObject();
attchFile_p.put("atflClCd", "001");
attchFile_p.put("atflNm", cvtP.getFile_org_name());
attchFile_p.put("fileUrl", "http://pkms.sktelecom.com/");
attchFile_p.put("frstRegDate", c_date);
attchFile_p.put("frstRegUserId", frstRegUserId);
attchFile_p.put("lastChgDate", "");
attchFile_p.put("lastChgUserId", "");
attchFile.add(attchFile_p);
}
}
fileModel.setFile_name("AccSvtResult");
if(null != workSystemService.readFileTango(fileModel)){
List<WorkSystemModel>readFilecvtS = workSystemService.readFileTango(fileModel);
for(WorkSystemModel cvtS : readFilecvtS){
file_tango++;
JSONObject attchFile_s = new JSONObject();
attchFile_s.put("atflClCd", "002");
attchFile_s.put("atflNm", cvtS.getFile_org_name());
attchFile_s.put("fileUrl", "http://pkms.sktelecom.com/");
attchFile_s.put("frstRegDate", c_date);
attchFile_s.put("frstRegUserId", frstRegUserId);
attchFile_s.put("lastChgDate", "");
attchFile_s.put("lastChgUserId", "");
attchFile.add(attchFile_s);
}
}
fileModel.setFile_name("AccSvtSupstate");
if(null != workSystemService.readFileTango(fileModel)){
List<WorkSystemModel>readFilecvtR = workSystemService.readFileTango(fileModel);
for(WorkSystemModel cvtR : readFilecvtR){
file_tango++;
JSONObject attchFile_r = new JSONObject();
attchFile_r.put("atflClCd", "003");
attchFile_r.put("atflNm", cvtR.getFile_org_name());
attchFile_r.put("fileUrl", "http://pkms.sktelecom.com/");
attchFile_r.put("frstRegDate", c_date);
attchFile_r.put("frstRegUserId", frstRegUserId);
attchFile_r.put("lastChgDate", "");
attchFile_r.put("lastChgUserId", "");
attchFile.add(attchFile_r);
}
}
if(file_tango == 0){
JSONObject attchFile_main = new JSONObject();
attchFile_main.put("atflClCd", "");
attchFile_main.put("atflNm", "");
attchFile_main.put("fileUrl", "");
attchFile_main.put("frstRegDate", "");
attchFile_main.put("frstRegUserId", "");
attchFile_main.put("lastChgDate", "");
attchFile_main.put("lastChgUserId", "");
attchFile.add(attchFile_main);
}
jsonObject.put("attchFile", attchFile);
// 장비정보
JSONArray eqpInfo = new JSONArray();
WorkSystemModel eqModel = new WorkSystemModel();
eqModel.setPkg_seq(pkg21Model.getPkg_seq());
eqModel.setWork_gubun(work_gubun);
eqModel.setOrd(max_ord);
List<WorkSystemModel>readEqAcc = workSystemService.readEqAcc(fileModel);
for(WorkSystemModel eq : readEqAcc){
JSONObject eqpInfo_main = new JSONObject();
eqpInfo_main.put("eqpId", eq.getTango_id());
eqpInfo_main.put("eqpNm", eq.getEquipment_name());
eqpInfo_main.put("svrIp", "");
eqpInfo_main.put("svrCnntAcntgId", "");
eqpInfo_main.put("rootAcntgUseYn", "");
eqpInfo_main.put("workRegrtId", "");
eqpInfo_main.put("oprrId", "");
eqpInfo_main.put("secureGwOprrId", "");
eqpInfo_main.put("frstRegDate", c_date);
eqpInfo_main.put("frstRegUserId", frstRegUserId);
eqpInfo_main.put("lastChgDate", "");
eqpInfo_main.put("lastChgUserId", "");
eqpInfo.add(eqpInfo_main);
}
jsonObject.put("eqpInfo", eqpInfo);
//장비정보안의 사용자정보, 명령어정보, 스크립트 정보는 존재하지 않음
//장비정보안의 사용자정보, 명령어정보, 스크립트 정보는 존재하지 않음
//장비정보안의 사용자정보, 명령어정보, 스크립트 정보는 존재하지 않음
//추가 필요 항목
JSONObject etcInfo = new JSONObject();
JSONObject etcItm1 = new JSONObject();
etcItm1.put("pkgSrno", pkg21Model.getPkg_seq());
etcInfo.put("etcItm1",etcItm1);
JSONObject etcItm2 = new JSONObject();
etcItm2.put("dmnCd", "101");
etcItm2.put("workPrjNm", pkg21Model.getTitle());
etcItm2.put("workDgr", "");
etcItm2.put("workSchdStaDt", w_sTime);
etcItm2.put("workSchdEndDt", w_eTime);
etcItm2.put("reqOrgId", tangoMainMdl.getMaster_team_code());
etcItm2.put("regOrgId", tangoMainMdl.getMaster_team_code());
etcItm2.put("regUserId", getMaster_id);
etcItm2.put("workDtlCtt", pkg21Model.getContent());
etcItm2.put("prjProgStatCd", "04");
etcItm2.put("workProgGdCd", "01");
if("주간".equals(pkg21Model.getAmpm())){
etcItm2.put("dytmWorkYn", "Y");
}else{
etcItm2.put("dytmWorkYn", "N");
}
etcItm2.put("workPurpCtt", "");
etcItm2.put("frstRegDate", c_date);
etcItm2.put("frstRegUserId", frstRegUserId);
etcInfo.put("etcItm2",etcItm2);
JSONArray etcItm3 = new JSONArray();
for(WorkSystemModel eq2 : readEqAcc){
JSONObject etcItm3_main = new JSONObject();
etcItm3_main.put("eqpId", eq2.getTango_id());
etcItm3_main.put("eqpNm", eq2.getEquipment_name());
etcItm3.add(etcItm3_main);
}
etcInfo.put("etcItm3",etcItm3);
jsonObject.put("etcInfo", etcInfo);
String url_seq = String.valueOf(seq);
String url = "https://openapistg.tango.sktelecom.com/tango-operation-owm-biz/workManagement/lnkgWorkInfo/workCmdInfo/PKMS/" + url_seq;
// String url = "https://openapi.tango.sktelecom.com/tango-operation-owm-biz/workManagement/lnkgWorkInfo/workCmdInfo/PKMS/" + url_seq;
String jason = jsonObject.toString();
System.out.println("=== TANGO DATA START====");
System.out.println(jason);
System.out.println("=== TANGO DATA END====");
//HTTP
HttpClient httpclient = HttpClients.createDefault();
HttpPost httpPost = new HttpPost(url);
StringEntity input = new StringEntity(jason, "UTF-8");
input.setContentType("application/json");
httpPost.setEntity(input);
//개발
httpPost.addHeader("X-Auth-token", "<KEY>");
//상용
httpPost.addHeader("X-Auth-token", "aBdF4QE0bC4yPpVO98H7KW0TrEzF1NWITCWL4CQ4hcVKNNdqt8BxSFS6vYUJtPdz");
httpPost.addHeader("X-Data-Type", "");
HttpResponse response = httpclient.execute(httpPost);
String serverResponse ="";
if(response.getStatusLine().getStatusCode() != 200){
logger.error("\n========Failed========\n HTTP error code : " + response.getStatusLine().getStatusCode() +
"\n HTTP error reason : " + response.getStatusLine().getReasonPhrase());
if(response.getStatusLine().getStatusCode() != 400)
logger.error(new String(("{\"error\":\""+response.getStatusLine().getStatusCode() + response.getStatusLine().getReasonPhrase() +"\"}").getBytes(), "UTF-8"));
}
BufferedReader br = new BufferedReader(
new InputStreamReader((response.getEntity().getContent()), "UTF-8"));
String output;
logger.debug("Output from Server .... \n");
while((output = br.readLine()) != null){
serverResponse += output;
logger.debug(output);
}
logger.debug(serverResponse);
//SPRING REST
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
//개발
headers.add("X-Auth-token", "<KEY>");
//상용
headers.add("X-Auth-token", "<KEY>");
headers.add("X-Data-Type", "");
HttpEntity param= new HttpEntity(JSONInput, headers);
RestTemplate restTemplate = new RestTemplate();
String result = restTemplate.postForObject(url, param, String.class);
System.out.println("=== TANGO RESULT====");
System.out.println(result);
System.out.println("=== TANGO RESULT====");
}*/
}
|
CloudBPM/SwinFlowCloud-CloudSide
|
fm/src/main/webapp/js/models/datetimeinput.js
|
/**
* @author <NAME> created 2017-08 and last updated at 10:21 on 2019-03-05
*/
function DateTimeInput() {
this.id = null;
this.parent = null; // parent Id
this.currOwner = null; // form Id
// basic properties
this.title = "日期时间";
this.placeholder = "请输入占位描述...";
this.help = "我的帮助及提示"; // description
this.required = 1; // 0: not required; 1: required;
this.tabIndex = "-1";
this.initValue = "";
this.readOnly = 0; // 0:false; 1:true; 如果只读,就只显示内容而不显示文本框
this.disabled = 0; // 0:false; 1:true
this.hidden = 0; // 0: visible; 1: invisible
// Three types: DateTime; Date; Time
this.datatype = "DateTime";
this.prependType = "Icon"; // No; Text; Icon prepend;
this.prepend = "fa-calendar-o"; //fa-clock-o
this.appendType = "No"; // No; Text; Icon prepend;
this.append = null;
this.classtypename = "DateTimeInput";
this.varId = null; // variable Id
this.ac = null; // accessControl, 0: read only; 1: writable
this.evn = 0; // environment: 0: design time; 1; runtime
this.rules = []; // list of rules between the associated components.
};
DateTimeInput.prototype = new UIComponent();
// for previewing
DateTimeInput.prototype.clone = function () {
var t = new DateTimeInput();
t.id = this.id;
t.parent = this.parent; // parent Id
t.currOwner = this.currOwner; // form Id
t.title = this.title;
t.placeholder = this.placeholder;
t.help = this.help; // description
t.required = this.required;
t.tabIndex = this.tabIndex;
t.initValue = this.initValue;
t.prependType = this.prependType; // No; Text; Icon prepend;
t.prepend = this.prepend;
t.datatype = this.datatype;
t.appendType = this.appendType; // No; Text; Icon prepend;
t.append = this.append;
t.readOnly = this.readOnly; // 0:false; 1:true
t.disabled = this.disabled; // 0:false; 1:true
t.hidden = this.hidden;
t.varId = this.varId;
t.ac = this.ac;
t.evn = this.evn;
t.toDomForHTML();
return t;
};
// for previewing
DateTimeInput.prototype.cloneRules = function (owner, old) {
this.f = owner;
var o = old.seekObjectByID(this.id);
if (o.rules.length > 0) {
for (var i = 0; i < o.rules.length; i++) {
this.rules.push(o.rules[i].clone(old));
}
}
};
// for previewing
DateTimeInput.prototype.toDomForHTML = function(parent) {
this.dom = document.createElement("DIV");
parent.appendChild(this.dom);
this.dom.id = this.id;
this.dom.className = "form-group";
this.dom.tabIndex = "-1";
this.updateDom();
return this.dom;
};
DateTimeInput.prototype.toDom = function (parent) {
this.toDomforFormGroup(parent);
this.updateDom();
};
DateTimeInput.prototype.updateDom = function () {
while (this.dom.hasChildNodes()) { // clear dom
if (this.dom.lastChild.id != "rm" + this.id) {
this.dom.removeChild(this.dom.lastChild);
} else if (this.dom.children.length == 1) {
break;
}
}
var titlelabel = document.createElement("LABEL");
titlelabel.className = "control-label";
this.dom.appendChild(titlelabel);
var titleNode = document.createTextNode(this.title);
titlelabel.appendChild(titleNode);
var that = this;
if (this.readOnly == 0) {
var input = document.createElement("INPUT");
input.id = "datetime" + this.id;
input.className = "form-control";
input.type = "button";
input.style.textAlign = "left";
if (this.required == 0) { //
var requiredSpan = document.createElement("SPAN");
requiredSpan.style.color = "red";
requiredSpan.innerHTML = " *";
titlelabel.appendChild(requiredSpan);
input.setAttribute("required", "");
} else if (this.required == 1) { //
input.removeAttribute("required");
}
input.value = this.initValue;// write
if (this.datatype == "DateTime") {
$(input).datetimepicker({
dateFormat: "yy-mm-dd",
timeFormat: "HH:mm:ss",
onClose: function () {
that.initValue = this.value;// read
},
});
} else if (this.datatype == "Date") {
$(input).datepicker({
dateFormat: "yy-mm-dd",
onClose: function () {
that.initValue = this.value;// read
},
});
} else if (this.datatype == "Time") {
$(input).timepicker({
timeFormat: "HH:mm:ss",
onClose: function () {
that.initValue = this.value;// read
},
});
}
input.tabIndex = this.tabIndex;
if (this.disabled == 1)
input.disabled = true;
input.setAttribute("placeholder", this.placeholder);
input.addEventListener("focus", this, false);
input.addEventListener("blur", this, false);
input.addEventListener("change", this, false);
input.addEventListener("input", this, false);
input.addEventListener("select", this, false);
if (this.prependType != "No" || this.appendType != "No") {
var inputGroup = document.createElement("DIV");
inputGroup.className = "input-group";
this.dom.appendChild(inputGroup);
if (this.prependType == "Text") {
var groupPreAddOn = document.createElement("DIV");
groupPreAddOn.className = "input-group-addon";
inputGroup.appendChild(groupPreAddOn);
var textPrepend = document.createTextNode("");
groupPreAddOn.appendChild(textPrepend);
if (this.prepend != null) {
textPrepend.textContent = this.prepend;
} else {
textPrepend.textContent = "";
}
} else if (this.prependType == "Icon") {
var groupPreAddOn = document.createElement("DIV");
groupPreAddOn.className = "input-group-addon";
inputGroup.appendChild(groupPreAddOn);
var preIcon = document.createElement("I");
groupPreAddOn.appendChild(preIcon);
if (this.prepend != null) {
preIcon.className = "fa " + this.prepend;
} else {
preIcon.className = "";
}
}
inputGroup.appendChild(input);
if (this.appendType == "Text") {
var groupPostAddOn = document.createElement("DIV");
groupPostAddOn.className = "input-group-addon";
inputGroup.appendChild(groupPostAddOn);
var textAppend = document.createTextNode("");
groupPostAddOn.appendChild(textAppend);
if (this.append != null) {
textAppend.textContent = this.append;
} else {
textAppend.textContent = "";
}
} else if (this.appendType == "Icon") {
var groupPostAddOn = document.createElement("DIV");
groupPostAddOn.className = "input-group-addon";
inputGroup.appendChild(groupPostAddOn);
var postIcon = document.createElement("I");
groupPostAddOn.appendChild(postIcon);
if (this.append != null) {
postIcon.className = "fa " + this.append;
} else {
postIcon.className = "";
}
}
} else {
this.dom.appendChild(input);
}
if (this.help != null && this.help != "") {
var helpNode = document.createElement("SPAN");
helpNode.className = "help-block";
helpNode.textContent = this.help;
helpNode.id = "hlp" + this.id;
this.dom.appendChild(helpNode);
}
} else {
var input = document.createElement("DIV");
input.id = "datetime" + this.id;
input.innerText = this.initValue;
this.dom.appendChild(input);
}
if (this.hidden == 0)
this.dom.style.display = "";
else
this.dom.style.display = "none";
};
DateTimeInput.prototype.fetchValue = function () {
var format = "";// format
return this.initValue;
};
DateTimeInput.prototype.updateValueforRuntime = function () {
var o = document.getElementById("datetime" + this.id);
o.value = this.initValue;
};
DateTimeInput.prototype.editable = function (e) {
var o = document.getElementById("datetime" + this.id);
if (e == "0") {
o.readOnly = false;
} else {
o.readOnly = true;
}
};
DateTimeInput.prototype.show = function (d) {
if (this.f != undefined) {
var o = this.f.seekObjectByID(this.id);
var i = o.dom;
if (d == "1") {
i.style.display = "";
} else {
i.style.display = "none";
}
} else {
var o = document.getElementById(this.id);
if (d == "1") {
o.style.display = "";
} else {
o.style.display = "none";
}
}
};
DateTimeInput.prototype.enable = function (e) {
if (this.f != undefined) {
var o = this.f.seekObjectByID(this.id);
var i = o.input;
if (e == "1") {
i.disabled = false;
} else {
i.disabled = true;
}
} else {
var o = document.getElementById("datetime" + this.id);
if (e == "1") {
o.disabled = false;
} else {
o.disabled = true;
}
}
};
DateTimeInput.prototype.toDomforFormGroup = function (parent) {
this.dom = document.createElement("DIV");
parent.appendChild(this.dom);
this.dom.id = this.id;
this.dom.className = "form-group comp_outline";
this.dom.tabIndex = "-1";
this.dom.draggable = "true";
// dragged component
this.dom.addEventListener("dragstart", this, false);
this.dom.addEventListener("drag", this, false);
this.dom.addEventListener("dragend", this, false);
this.dom.addEventListener("click", this, false);
this.dom.addEventListener("focus", this, false);
this.dom.addEventListener("blur", this, false);
var remove = document.createElement("A");
remove.id = "rm" + this.id;
remove.className = "remove";
this.dom.appendChild(remove);
var removeSpan = document.createElement("i");
remove.appendChild(removeSpan);
removeSpan.className = "glyphicon glyphicon-remove";
removeSpan.addEventListener("click", this, false);
};
DateTimeInput.prototype.toRealValue = function () {
var input = document.getElementById("datetime" + this.id);
if (input != undefined && input != null) {
return input.value;
}
return null;
};
DateTimeInput.prototype.handleEvent = function (e) {
switch (e.type) {
case "dragstart":// Events fired on the draggable target(source element)
this.doDragStart(e);
break;
case "drag":// Events fired on the draggable target (the source element)
this.doDrag(e);
break;
case "dragend":// Events fired on the draggable target(source element)
this.doDragEnd(e);
break;
case "click":
this.doClick(e);
break;
case "focus":
this.doFocus(e);
break;
case "blur":
this.doBlur(e);
break;
case "select":
this.doSelect(e);
break;
case "change":
this.doChange(e);
break;
case "input":
this.doInput(e);
break;
}
};
DateTimeInput.prototype.doClick = function (evt) {
if (this.evn == 0) {
if (evt.target.className == "glyphicon glyphicon-remove") {
if (map[this.currOwner] != null
&& map[this.currOwner].currObject instanceof Form) {
map[this.currOwner].stack.execute(new FMRemoveRowCmd(
evt.target.parentNode.parentNode.id,
map[this.currOwner].currObject));
}
} else {
evt.target.focus();
map[this.currOwner].selected = this;
map[this.currOwner].enableEditButtons();
map[this.currOwner].setPropertySheet();
}
Utils.stopBubble(evt);
} else if (this.evn == 1) {
}
};
DateTimeInput.prototype.doFocus = function (evt) {
this.doHandle(evt, 4);
bgcache = evt.target.style.backgroundColor;
evt.target.style.backgroundColor = Utils.highLight();
Utils.stopBubble(evt);
};
DateTimeInput.prototype.doBlur = function (evt) {
this.doHandle(evt, 5);
evt.target.style.backgroundColor = bgcache;
bgcache = null;
Utils.stopBubble(evt);
};
DateTimeInput.prototype.doChange = function (evt) {
if (this.evn == 0) {
this.doHandle(evt, 2);
} else if (this.evn == 1) {
map[this.currOwner].stack.execute(new CLValueChangedCmd(this,
"initValue", evt, this.currOwner));
}
Utils.stopBubble(evt);
};
DateTimeInput.prototype.doSelect = function (evt) {
this.doHandle(evt, 3);
Utils.stopBubble(evt);
};
DateTimeInput.prototype.doInput = function (evt) {
this.doHandle(evt, 1);
Utils.stopBubble(evt);
};
DateTimeInput.prototype.message = function (evt, cmd, msg) {
if (cmd == 10 || cmd == 11 || cmd == 12 || cmd == 13) {
var t = document.getElementById("hlp" + this.id);
if (t == null) {
t = document.createElement("SPAN");
this.dom.appendChild(t);
t.className = "help-block";
t.id = "hlp" + this.id;
}
if (cmd == 10) {
t.classList.add("alert-success");
} else if (cmd == 11) {
t.classList.add("alert-info");
} else if (cmd == 12) {
t.classList.add("alert-warning");
} else if (cmd == 13) {
t.classList.add("alert-danger");
}
t.textContent = msg;
}
};
DateTimeInput.prototype.act = function (evt, cmd, o, e, val) {
if (cmd == 1) {
o.show("0"); // hidden
} else if (cmd == 2) {
o.show("1"); // show
} else if (cmd == 3) {
if (o.editable)
o.editable("0"); // writable
} else if (cmd == 4) {
if (o.editable)
o.editable("1"); // read only
} else if (cmd == 5) {
if (o.enable)
o.enable("1"); // enabled
} else if (cmd == 6) {
if (o.enable)
o.enable("0"); // disabled
} else if (cmd == 7) { // clear
e.value = "";
o.initValue = "";
} else if (cmd == 8) { // update
if (o instanceof SingleLineText || o instanceof MultipleLineText) {
o.initValue = val;
o.updateDom();
}
} else if (cmd == 9) { // verify
}
};
DateTimeInput.prototype.fCmd = function (evt, r, e) {
if (r.eComId == null || r.eComId == "") {
return;
}
if (r.eComId == 0) {
this.message(evt, r.eComAction, (r.eComExpressions == null ? ""
: eval(r.eComExpressions.toRealValue())));
} else {
var o = map[this.currOwner].currObject.seekObjectByID(r.eComId);
this.act(evt, r.eComAction, o, e, (r.eComExpressions == null ? ""
: eval(r.eComExpressions.toRealValue())));
}
};
DateTimeInput.prototype.tCmd = function (evt, r, e) {
if (r.tComId == null || r.tComId == "") {
return;
}
if (r.tComId == 0) {
this.message(evt, r.tComAction, (r.tComExpressions == null ? ""
: eval(r.tComExpressions.toRealValue())));
} else {
var o = map[this.currOwner].currObject.seekObjectByID(r.tComId);
this.act(evt, r.tComAction, o, e, (r.tComExpressions == null ? ""
: eval(r.tComExpressions.toRealValue())));
}
};
DateTimeInput.prototype.doHandle = function (evt, cmd) {
try {
var e = document.getElementById("datetime" + this.id);
if (e == undefined || e == null) {
return;
}
for (var i = 0; i < this.rules.length; i++) {
if (this.rules[i].behavior == cmd) {
if (this.rules[i].conditions == null
|| this.rules[i].conditions.isNull()) {
// execute two commands:
// the first command:
this.tCmd(evt, this.rules[i], e);
// the second command:
this.fCmd(evt, this.rules[i], e);
} else {
if (eval(this.rules[i].conditions.toRealValue())) {
this.tCmd(evt, this.rules[i], e);
} else {
this.fCmd(evt, this.rules[i], e);
}
}
}
}
} catch (exception) {
console.log(exception);
}
};
DateTimeInput.prototype.doDragStart = function (evt) {
if (this.evn == 0) {
if (map[this.currOwner] != null
&& map[this.currOwner].currObject instanceof Form) {
// this / e.target is the source node.
evt.target.style.opacity = '0.7';
evt.dataTransfer.effectAllowed = 'move';
copyclip = evt.target.id; // critical
}
} else if (this.evn == 1) {
}
Utils.stopBubble(evt);
};
DateTimeInput.prototype.doDrag = function (evt) {
Utils.stopBubble(evt);
};
DateTimeInput.prototype.doDragEnd = function (evt) {
if (this.evn == 0) {
if (map[this.currOwner] != null
&& map[this.currOwner].currObject instanceof Form) {
evt.target.style.opacity = '1';
}
Utils.stopBubble(evt);
} else if (this.evn == 1) {
}
};
DateTimeInput.prototype.parseFromJSON = function (json, evn) {
this.id = json.id;
this.parent = json.parent; // parent Id
this.currOwner = json.currOwner; // form Id
this.title = json.title;
this.placeholder = json.placeholder;
this.help = json.help; // description
this.required = json.required; // 0: not required; 1: required;
this.tabIndex = json.tabIndex;
this.initValue = json.initValue;
this.readOnly = json.readOnly; // 0:false; 1:true
this.disabled = json.disabled; // 0:false; 1:true
this.hidden = json.hidden;
this.datatype = json.datatype;
this.prependType = json.prependType; // No; Text; Icon prepend;
this.prepend = json.prepend;
this.appendType = json.appendType; // No; Text; Icon prepend;
this.append = json.append;
this.varId = json.varId;
this.ac = json.ac;
this.evn = evn;
// parsing propagation rules firstly.
if (json.rules != undefined && json.rules != null && json.rules.length > 0) {
for (var i = 0; i < json.rules.length; i++) {
var r = new PropagateRule();
r.parseFromJSON(json.rules[i]);
this.rules.push(r);
}
}
};
DateTimeInput.prototype.parseExpressions = function (owner) {
for (var i = 0; i < this.rules.length; i++) {
this.rules[i].parseExpressions(owner);
}
};
DateTimeInput.prototype.toTree = function () {
return {
id: this.id,
text: this.title,
icon: "fa fa-file-text-o",
data: "日期时间|" + (this.initValue == null ? "" : this.initValue),
}
};
DateTimeInput.prototype.fetchRuleByNum = function (number) {
if (this.rules.length > 0) {
for (var i = 0; i < this.rules.length; i++) {
if (i + 1 == number) {
return this.rules[i];
}
}
}
return null;
};
DateTimeInput.prototype.toString = function () {
return this.title == null ? "日期时间" : this.title;
};
DateTimeInput.prototype.addRule = function (r) {
this.rules.push(r);
};
DateTimeInput.prototype.insertRule = function (r, index) {
this.rules.splice(index, 0, r);
};
DateTimeInput.prototype.removeRule = function (r) {
for (var i = 0; i < this.rules.length; i++) {
if (this.rules[i] == r) {
this.rules.splice(i, 1);
break;
}
}
};
|
ScalablyTyped/SlinkyTyped
|
m/maxim_mazurok__gapi_dot_client_dot_iap/src/main/scala/typingsSlinky/maximMazurokGapiClientIap/gapi/client/iap/ListIdentityAwareProxyClientsResponse.scala
|
<reponame>ScalablyTyped/SlinkyTyped
package typingsSlinky.maximMazurokGapiClientIap.gapi.client.iap
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait ListIdentityAwareProxyClientsResponse extends StObject {
/** Clients existing in the brand. */
var identityAwareProxyClients: js.UndefOr[js.Array[IdentityAwareProxyClient]] = js.native
/** A token, which can be send as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent pages. */
var nextPageToken: js.UndefOr[String] = js.native
}
object ListIdentityAwareProxyClientsResponse {
@scala.inline
def apply(): ListIdentityAwareProxyClientsResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[ListIdentityAwareProxyClientsResponse]
}
@scala.inline
implicit class ListIdentityAwareProxyClientsResponseMutableBuilder[Self <: ListIdentityAwareProxyClientsResponse] (val x: Self) extends AnyVal {
@scala.inline
def setIdentityAwareProxyClients(value: js.Array[IdentityAwareProxyClient]): Self = StObject.set(x, "identityAwareProxyClients", value.asInstanceOf[js.Any])
@scala.inline
def setIdentityAwareProxyClientsUndefined: Self = StObject.set(x, "identityAwareProxyClients", js.undefined)
@scala.inline
def setIdentityAwareProxyClientsVarargs(value: IdentityAwareProxyClient*): Self = StObject.set(x, "identityAwareProxyClients", js.Array(value :_*))
@scala.inline
def setNextPageToken(value: String): Self = StObject.set(x, "nextPageToken", value.asInstanceOf[js.Any])
@scala.inline
def setNextPageTokenUndefined: Self = StObject.set(x, "nextPageToken", js.undefined)
}
}
|
nspotrepka/ofxPDSP
|
src/DSP/utility/LinToDB.h
|
// LinToDB.h
// ofxPDSP
// <NAME>, MIT License, 2016
#ifndef PDSP_UTIL_LINTODB_H_INCLUDED
#define PDSP_UTIL_LINTODB_H_INCLUDED
#include "../pdspCore.h"
namespace pdsp{
/**
@brief Converts linear values into dB values
*/
class LinToDB : public Formula {
public:
/**
@brief static function that performs linear to dB operation on a given value and returns the results
@param[in] value value to convert
*/
static float eval(float value);
private:
float formula(const float &x) noexcept override;
void formulaAudioRate(float* &output, const float* &input, const int &bufferSize) noexcept override;
};
}//END NAMESPACE
#endif // LINTODB_H_INCLUDED
|
lboobl/TinyIM
|
Code/Client/ClientUI/Source/SkinLib/SkinEdit.h
|
<filename>Code/Client/ClientUI/Source/SkinLib/SkinEdit.h
#pragma once
#include "SkinManager.h"
// 不支持滚动条皮肤
// 图片背景支持不完整
// Edit控件必须是ES_MULTILINE风格,SetMarginsEx才能生效,ES_MULTILINE风格好像不能动态修改,只能在生成控件时指定
class CSkinEdit : public CWindowImpl<CSkinEdit, CEdit>
{
public:
CSkinEdit(void);
~CSkinEdit(void);
BEGIN_MSG_MAP_EX(CSkinEdit)
MSG_WM_CREATE(OnCreate)
MSG_WM_NCCALCSIZE(OnNcCalcSize)
MSG_WM_ERASEBKGND(OnEraseBkgnd)
MSG_WM_NCPAINT(OnNcPaint)
MSG_OCM_CTLCOLOREDIT(OnCtlColor)
MSG_WM_LBUTTONDOWN(OnLButtonDown)
MSG_WM_LBUTTONUP(OnLButtonUp)
MSG_WM_MOUSEMOVE(OnMouseMove)
MSG_WM_MOUSELEAVE(OnMouseLeave)
MSG_WM_SETFOCUS(OnSetFocus)
MSG_WM_KILLFOCUS(OnKillFocus)
MSG_WM_SIZE(OnSize)
MSG_WM_DESTROY(OnDestroy)
DEFAULT_REFLECTION_HANDLER()
END_MSG_MAP()
public:
BOOL SetBgNormalPic(LPCTSTR lpszFileName, RECT* lpNinePart = NULL);
BOOL SetBgHotPic(LPCTSTR lpszFileName, RECT* lpNinePart = NULL);
BOOL SetIconPic(LPCTSTR lpszFileName);
void SetTransparent(BOOL bTransparent, HDC hBgDC);
void SetDefaultText(LPCTSTR lpszText);
BOOL IsDefaultText();
void SetDefaultTextMode(BOOL bIsDefText);
void SetMarginsEx(int nLeft, int nTop, int nRight, int nBottom); // 设置上下左右边距函数
void SetMultiLine(BOOL bMultiLine = TRUE);
BOOL SubclassWindow(HWND hWnd);
private:
int OnCreate(LPCREATESTRUCT lpCreateStruct);
LRESULT OnNcCalcSize(BOOL bCalcValidRects, LPARAM lParam);
BOOL OnEraseBkgnd(CDCHandle dc);
void OnNcPaint(CRgnHandle rgn);
HBRUSH OnCtlColor(CDCHandle dc, CEdit edit);
void OnLButtonDown(UINT nFlags, CPoint point);
void OnLButtonUp(UINT nFlags, CPoint point);
void OnMouseMove(UINT nFlags, CPoint point);
void OnMouseLeave();
void OnSetFocus(CWindow wndOld);
void OnKillFocus(CWindow wndFocus);
void OnSize(UINT nType, CSize size);
void OnDestroy();
BOOL StartTrackMouseLeave();
void CalcCenterRect(CRect& rcDest, int cx, int cy, CRect& rcCenter);
void SetCenterInSingleLine();
void DrawParentWndBg(HDC hDC);
private:
CImageEx* m_lpBgImgN;
CImageEx* m_lpBgImgH;
CImageEx* m_lpIconImg;
int m_nIconWidth;
BOOL m_bFocus, m_bPress, m_bHover, m_bMouseTracking;
BOOL m_bTransparent;
HDC m_hBgDC;
BOOL m_bIsDefText;
CString m_strDefText;
TCHAR m_cPwdChar;
BOOL m_bMultiLine;
};
|
fmahebert/ufo
|
src/ufo/filters/DifferenceCheck.h
|
/*
* (C) Copyright 2019 UCAR
*
* This software is licensed under the terms of the Apache Licence Version 2.0
* which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
*/
#ifndef UFO_FILTERS_DIFFERENCECHECK_H_
#define UFO_FILTERS_DIFFERENCECHECK_H_
#include <memory>
#include <ostream>
#include <string>
#include <vector>
#include "oops/util/ObjectCounter.h"
#include "oops/util/parameters/OptionalParameter.h"
#include "oops/util/parameters/RequiredParameter.h"
#include "ufo/filters/FilterBase.h"
#include "ufo/filters/QCflags.h"
#include "ufo/filters/Variable.h"
#include "ufo/utils/parameters/ParameterTraitsVariable.h"
namespace eckit {
class Configuration;
}
namespace ioda {
template <typename DATATYPE> class ObsDataVector;
class ObsSpace;
}
namespace ufo {
/// Parameters controlling the operation of the DifferenceCheck filter.
class DifferenceCheckParameters : public FilterParametersBase {
OOPS_CONCRETE_PARAMETERS(DifferenceCheckParameters, FilterParametersBase)
public:
/// Name of the reference variable.
oops::RequiredParameter<Variable> ref{"reference", this};
/// Name of the test variable.
oops::RequiredParameter<Variable> val{"value", this};
/// The filter will flag observations for which the difference `test - reference` is below
/// `minvalue`.
oops::OptionalParameter<float> minvalue{"minvalue", this};
/// The filter will flag observations for which the difference `test - reference` is above
/// `maxvalue`.
oops::OptionalParameter<float> maxvalue{"maxvalue", this};
/// If the `threshold` option is specified, the filter behaves as if `minvalue` was set to
/// `-threshold` and `maxvalue` was set to `threshold` (overriding any values of these options
/// specified independently).
oops::OptionalParameter<float> threshold{"threshold", this};
};
/// A filter that compares the difference between a test variable and a reference variable and
/// flags observations for which this difference is outside of a prescribed range.
///
/// See DifferenceCheckParameters for the documentation of the parameters controlling this filter.
class DifferenceCheck : public FilterBase,
private util::ObjectCounter<DifferenceCheck> {
public:
/// The type of parameters accepted by the constructor of this filter.
/// This typedef is used by the FilterFactory.
typedef DifferenceCheckParameters Parameters_;
static const std::string classname() {return "ufo::DifferenceCheck";}
DifferenceCheck(ioda::ObsSpace &, const Parameters_ &,
std::shared_ptr<ioda::ObsDataVector<int> >,
std::shared_ptr<ioda::ObsDataVector<float> >);
~DifferenceCheck();
private:
void print(std::ostream &) const override;
void applyFilter(const std::vector<bool> &, const Variables &,
std::vector<std::vector<bool>> &) const override;
int qcFlag() const override {return QCflags::diffref;}
Parameters_ parameters_;
};
} // namespace ufo
#endif // UFO_FILTERS_DIFFERENCECHECK_H_
|
lingxiankong/patrole
|
patrole_tempest_plugin/tests/api/compute/test_server_consoles_rbac.py
|
# Copyright 2017 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.lib import decorators
from patrole_tempest_plugin import rbac_rule_validation
from patrole_tempest_plugin.tests.api.compute import rbac_base
CONF = config.CONF
class ServerConsolesRbacTest(rbac_base.BaseV2ComputeRbacTest):
@classmethod
def skip_checks(cls):
super(ServerConsolesRbacTest, cls).skip_checks()
if not CONF.compute_feature_enabled.console_output:
raise cls.skipException('Console output not available.')
@classmethod
def setup_credentials(cls):
cls.set_network_resources(network=True, subnet=True, router=True)
super(ServerConsolesRbacTest, cls).setup_credentials()
@classmethod
def resource_setup(cls):
super(ServerConsolesRbacTest, cls).resource_setup()
cls.server_id = cls.create_test_server(wait_until='ACTIVE')['id']
@rbac_rule_validation.action(
service="nova",
rules=["os_compute_api:os-console-output"])
@decorators.idempotent_id('90fd80f6-456c-11e7-a919-92ebcb67fe33')
def test_get_console_output(self):
with self.override_role():
self.servers_client.get_console_output(self.server_id)
class ServerConsolesMaxV25RbacTest(rbac_base.BaseV2ComputeRbacTest):
max_microversion = '2.5'
@classmethod
def skip_checks(cls):
super(ServerConsolesMaxV25RbacTest, cls).skip_checks()
if not CONF.compute_feature_enabled.console_output:
raise cls.skipException('Console output not available.')
@classmethod
def setup_credentials(cls):
cls.set_network_resources(network=True, subnet=True, router=True)
super(ServerConsolesMaxV25RbacTest, cls).setup_credentials()
@classmethod
def resource_setup(cls):
super(ServerConsolesMaxV25RbacTest, cls).resource_setup()
cls.server_id = cls.create_test_server(wait_until='ACTIVE')['id']
@rbac_rule_validation.action(
service="nova",
rules=["os_compute_api:os-remote-consoles"])
@decorators.idempotent_id('b0a72c02-9b15-4dcb-b186-efe8753370ab')
def test_get_vnc_console_output(self):
with self.override_role():
self.servers_client.get_vnc_console(self.server_id, type="novnc")
class ServerConsolesV26RbacTest(rbac_base.BaseV2ComputeRbacTest):
min_microversion = '2.6'
max_microversion = 'latest'
@classmethod
def skip_checks(cls):
super(ServerConsolesV26RbacTest, cls).skip_checks()
if not CONF.compute_feature_enabled.console_output:
raise cls.skipException('Console output not available.')
@classmethod
def setup_credentials(cls):
cls.set_network_resources(network=True, subnet=True, router=True)
super(ServerConsolesV26RbacTest, cls).setup_credentials()
@classmethod
def resource_setup(cls):
super(ServerConsolesV26RbacTest, cls).resource_setup()
cls.server_id = cls.create_test_server(wait_until='ACTIVE')['id']
@rbac_rule_validation.action(
service="nova",
rules=["os_compute_api:os-remote-consoles"])
@decorators.idempotent_id('879597de-87e0-4da9-a60a-28c8088dc508')
def test_get_remote_console_output(self):
with self.override_role():
self.servers_client.get_remote_console(self.server_id,
"novnc", "vnc")
|
masker-Lee/explorer_es
|
src/main/java/com/nemtool/explorer/pojo/Accountmosaics.java
|
<reponame>masker-Lee/explorer_es
package com.nemtool.explorer.pojo;
import java.io.Serializable;
public class Accountmosaics implements Serializable{
/**
*
*/
private static final long serialVersionUID = 1L;
private Integer id;
private String address;
private String mosaicid;
private Long quantity;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address == null ? null : address.trim();
}
public String getMosaicid() {
return mosaicid;
}
public void setMosaicid(String mosaicid) {
this.mosaicid = mosaicid == null ? null : mosaicid.trim();
}
public Long getQuantity() {
return quantity;
}
public void setQuantity(Long quantity) {
this.quantity = quantity;
}
@Override
public String toString() {
return "Accountmosaics [id=" + id + ", address=" + address + ", mosaicid=" + mosaicid + ", quantity=" + quantity
+ "]";
}
}
|
an9elkiss/api-super-manager
|
src/main/java/com/an9elkiss/api/manager/api/CodeReviewController.java
|
<filename>src/main/java/com/an9elkiss/api/manager/api/CodeReviewController.java
package com.an9elkiss.api.manager.api;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.an9elkiss.api.manager.command.CodeReviewCommand;
import com.an9elkiss.api.manager.command.CodeReviewInfoCommand;
import com.an9elkiss.api.manager.service.CodeReviewService;
import com.an9elkiss.commons.auth.spring.Access;
import com.an9elkiss.commons.command.ApiResponseCmd;
import com.google.gson.Gson;
@Controller
public class CodeReviewController implements CodeReviewApi{
@Autowired
private CodeReviewService codeReviewService;
private Gson gson = new Gson();
/**
* 创建codeReview信息接口
*/
@Override
@Access("API_CODE_REVIEW_CREATE")
@RequestMapping(value = "/codeReview",produces = { "application/json" },method = RequestMethod.POST)
public ResponseEntity<ApiResponseCmd<CodeReviewCommand>> saveCodeReviewInfo(CodeReviewCommand codeReviewCommand,BindingResult result){
return ResponseEntity.ok(codeReviewService.createCodeReviewInfo(codeReviewCommand));
}
/**
* 根据userId查到改用户所有的codeReview
*/
@Override
@Access("API_CODE_REVIEW_GET")
@RequestMapping(value = "/codeReview/{userId}",produces = { "application/json" },method = RequestMethod.GET)
public ResponseEntity<ApiResponseCmd<List<CodeReviewCommand>>> findCodeReviews(@PathVariable("userId") Integer userId){
return ResponseEntity.ok(codeReviewService.findCodeReviewsByUserId(userId));
}
/***
* 根据codeReviewId查到详细的codeReview信息
*/
@Override
@Access("API_CODE_REVIEW_INFO_GET")
@RequestMapping(value = "/codeReview/codeReviewInfo/{codeReviewId}",produces = { "application/json" },method = RequestMethod.GET)
public ResponseEntity<ApiResponseCmd<List<CodeReviewInfoCommand>>> findCodeReviewInfos(@PathVariable("codeReviewId") Integer codeReviewId){
return ResponseEntity.ok(codeReviewService.findCodeReviewInfosByCodeReviewId(codeReviewId));
}
/***
* 根据codeReviewId逻辑删除所有的codereview
*/
@Override
@Access("API_CODE_REVIEW_DELETE")
@RequestMapping(value = "/codeReview/delete/{codeReviewId}",produces = { "application/json" },method = RequestMethod.DELETE)
public ResponseEntity<ApiResponseCmd<Integer>> deleteCodeReviewInfos(@PathVariable("codeReviewId") Integer codeReviewId){
return ResponseEntity.ok(codeReviewService.deleteCodeReview(codeReviewId));
}
/***
* 修改codereview信息数据
*/
@Override
@Access("API_CODE_REVIEW_UPDATE")
@RequestMapping(value = "/codeReview/codeReviewInfo/update",produces = { "application/json" },method = RequestMethod.POST)
public ResponseEntity<ApiResponseCmd<CodeReviewCommand>> updateCodeReviewInfosByCodeReviewId(CodeReviewCommand codeReviewCommand,BindingResult result){
if (null == codeReviewCommand.getId()){
return ResponseEntity.ok(null);
}
codeReviewService.updateCodeReview(codeReviewCommand);
ApiResponseCmd<CodeReviewCommand> findCodeReviewsById = codeReviewService.findCodeReviewsById(codeReviewCommand.getId());
ApiResponseCmd<List<CodeReviewInfoCommand>> findCodeReviewInfosByCodeReviewId = codeReviewService.findCodeReviewInfosByCodeReviewId(codeReviewCommand.getId());
CodeReviewCommand codeReview = findCodeReviewsById.getData();
List<CodeReviewInfoCommand> data = findCodeReviewInfosByCodeReviewId.getData();
if (null == data || data.isEmpty()){
return ResponseEntity.ok(null);
}
String jsonstring = gson.toJson(data);
codeReview.setCodeReviewInfos(jsonstring);
return ResponseEntity.ok(findCodeReviewsById);
}
@Override
@Access("API_CODE_REVIEW_STATISTICAL_GROUP")
@RequestMapping(value = "/codeReview/statistical/group",produces = { "application/json" },method = RequestMethod.GET)
public ResponseEntity<ApiResponseCmd<Map<String, List<Integer>>>> statisticalCodeReviewByGroup(HttpServletRequest request){
return ResponseEntity.ok(codeReviewService.statisticalCodeReviewByGroup(request.getParameter("token")));
}
@Override
@Access("API_CODE_REVIEW_STATISTICAL_GROUP_INFO")
@RequestMapping(value = "/codeReview/statistical/group/info",produces = { "application/json" },method = RequestMethod.GET)
public ResponseEntity<ApiResponseCmd<Map<String, List<CodeReviewCommand>>>> statisticalCodeReviewByGroupInfo(HttpServletRequest request,Integer month,String groupManagerIds){
return ResponseEntity.ok(codeReviewService.statisticalCodeReviewByGroupInfo(request.getParameter("token"), month, groupManagerIds));
}
@Override
@Access("API_CODE_REVIEW_ID_GET")
@RequestMapping(value = "/codeReview",produces = { "application/json" },method = RequestMethod.GET)
public ResponseEntity<ApiResponseCmd<CodeReviewCommand>> findCodeReview(Integer id){
return ResponseEntity.ok(codeReviewService.findCodeReviewsById(id));
}
}
|
hugonoro/my-dessert
|
MyDessert/Tests/specs/dessertsService.spec.js
|
describe("app.dataServices", function () {
"use strict";
var $rootScope, dessertsService, sessionService;
// Fake route provider and inject needed dependencies
beforeEach(function () {
module('app', function ($provide) {
specHelper.fakeRouteProvider($provide);
});
inject(function (_$rootScope_, _dessertsService_, _sessionService_) {
$rootScope = _$rootScope_;
dessertsService = _dessertsService_;
sessionService = _sessionService_;
});
});
afterEach(function () {
});
describe('toggleCurrentlyBaking', function () {
it('should return error message when trying to add more than 5 recipes to currently baking', function () {
var response;
dessertsService.toggleCurrentlyBaking(1);
dessertsService.toggleCurrentlyBaking(2);
dessertsService.toggleCurrentlyBaking(3);
dessertsService.toggleCurrentlyBaking(4);
dessertsService.toggleCurrentlyBaking(5);
dessertsService.toggleCurrentlyBaking(6).then(function (result) {
response = result;
});
$rootScope.$apply();
expect(response.Message).toBe("You have exceed the maximum number of recipes. Please unselect at least one before proceeding");
});
it('should return addToShopping flag true when is baking less than 5 and is not duplicate', function () {
var response;
dessertsService.toggleCurrentlyBaking(1);
dessertsService.toggleCurrentlyBaking(2);
dessertsService.toggleCurrentlyBaking(3);
dessertsService.toggleCurrentlyBaking(6).then(function (result) {
response = result;
});
$rootScope.$apply();
expect(response.data.addToShoppingList).toBeTruthy();
});
it('should return isCurrentlyBaking flag true when is baking less than 5 and is not duplicate', function () {
var response;
dessertsService.toggleCurrentlyBaking(5);
dessertsService.toggleCurrentlyBaking(6);
dessertsService.toggleCurrentlyBaking(7);
dessertsService.toggleCurrentlyBaking(9).then(function (result) {
response = result;
});
$rootScope.$apply();
expect(response.data.isCurrentlyBaking).toBeTruthy();
});
});
});
|
pm-deepak-dhage/prebid-server-java
|
src/main/java/org/prebid/server/handler/info/BidderDetailsHandler.java
|
<gh_stars>1-10
package org.prebid.server.handler.info;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.vertx.core.Handler;
import io.vertx.core.json.Json;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
import io.vertx.ext.web.RoutingContext;
import org.prebid.server.bidder.BidderCatalog;
import org.prebid.server.util.HttpUtil;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
public class BidderDetailsHandler implements Handler<RoutingContext> {
private static final Logger logger = LoggerFactory.getLogger(BidderDetailsHandler.class);
private final Map<String, String> bidderInfos;
private BidderCatalog bidderCatalog;
public BidderDetailsHandler(BidderCatalog bidderCatalog) {
this.bidderCatalog = Objects.requireNonNull(bidderCatalog);
bidderInfos = createBidderInfos(bidderCatalog);
}
/**
* Returns a map with bidder name as a key and json-encoded bidder info as a value.
*/
private static Map<String, String> createBidderInfos(BidderCatalog bidderCatalog) {
return bidderCatalog.names().stream()
.collect(Collectors.toMap(Function.identity(),
bidderName -> Json.encode(bidderCatalog.bidderInfoByName(bidderName))));
}
@Override
public void handle(RoutingContext context) {
final String bidderName = context.request().getParam("bidderName");
if (bidderInfos.containsKey(bidderName)) {
final String bidderInfoAsString = bidderInfos.get(bidderName);
final String response;
if (bidderCatalog.isAlias(bidderName)) {
try {
// Add alias to the existing json object
final ObjectNode node = (ObjectNode) Json.mapper.readTree(bidderInfoAsString);
node.set("aliasOf", new TextNode(bidderCatalog.nameByAlias(bidderName)));
response = Json.encode(node);
} catch (IOException e) {
logger.warn("Error occurred while parsing bidder info for {0}", e, bidderName);
context.response()
.setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code())
.end("Parsing bidder info error");
return;
}
} else {
response = bidderInfoAsString;
}
context.response()
.putHeader(HttpUtil.CONTENT_TYPE_HEADER, HttpHeaderValues.APPLICATION_JSON)
.end(response);
} else {
context.response()
.setStatusCode(HttpResponseStatus.NOT_FOUND.code())
.end();
}
}
}
|
play14team/play14-community-gatsby
|
src/components/Photos/PhotoGallery.js
|
<reponame>play14team/play14-community-gatsby
import React from 'react'
import {Link} from 'gatsby'
import Lightbox from 'react-image-lightbox'
import { GatsbyImage, getImage } from "gatsby-plugin-image"
const PhotoGallery = (props) => {
const [photoIndex, setPhotoIndex] = React.useState(0);
const [isOpenImage, setIsOpenImage] = React.useState(false);
const {images} = props
return (
<div className="gallery-area pt-100 pb-70">
<div className="container">
<div className="row">
{
images.map((image, i) => {
return (
<div key={`col${i}`} className="col-lg-4 col-md-6 col-sm-6">
<div key={`item${i}`} className="single-gallery-item">
<Link
key={`link${i}`}
to="#"
onClick={e => {
e.preventDefault();
setIsOpenImage(true);
setPhotoIndex(0);}
}
>
<GatsbyImage key={`image${i}`} image={getImage(image)} alt={image.name} />
</Link>
</div>
</div>
)
})
}
</div>
</div>
{/* Lightbox */}
{isOpenImage && (
<Lightbox
mainSrc={images[photoIndex].childImageSharp.gatsbyImageData.images.fallback.src}
nextSrc={images[(photoIndex + 1) % images.length].childImageSharp.gatsbyImageData.images.fallback.src}
prevSrc={images[(photoIndex + images.length - 1) % images.length].childImageSharp.gatsbyImageData.images.fallback.src}
onCloseRequest={() => setIsOpenImage(false)}
onMovePrevRequest={() =>
setPhotoIndex((photoIndex + images.length - 1) % images.length)
}
onMoveNextRequest={() =>
setPhotoIndex((photoIndex + 1) % images.length)
}
/>
)}
</div>
);
}
export default PhotoGallery
|
wuweiweiwu/babel
|
packages/babel-plugin-proposal-throw-expressions/test/fixtures/stupid-fixture-nesting-folder/function-sent-in-argument/output.js
|
<reponame>wuweiweiwu/babel
function* test() {
(function (e) {
throw e;
})(new Error(function.sent));
}
|
jfoley-yw/scrabble
|
dqn/dqn_scrabble_environment.py
|
import os
from dqn.dqn_simulation import DQNSimulation
from scrabbler.player import Player
class DQNScrabbleObservation:
def __init__(self, state, actions, action_mask):
self.state = state
self.actions = actions
self.action_mask = action_mask
# An MDP environment for Scrabble
class DQNScrabbleEnvironment:
def __init__(self):
self.simulation = None
self.num_actions = 0
self.player = None
self.current_possible_moves = None
self.action_index_mapping = dict()
script_path = os.path.dirname(__file__)
dictionary_path = os.path.join(script_path, '../resources/wwf4/dictionary.txt')
dictionary_file = open(dictionary_path, 'r')
for line in dictionary_file:
self.action_index_mapping[line[:-1]] = self.num_actions
self.num_actions += 1
dictionary_file.close()
def reset(self):
self.player = Player(None)
self.simulation = DQNSimulation(self.player, self.player)
observation = self.get_observation()
done = (len(observation.actions) == 0)
return observation, done
def step(self, action):
move = self.current_possible_moves[action]
reward = move.score
self.simulation.simulate_step(move)
observation = self.get_observation()
done = (self.simulation.is_rack_empty() or len(observation.actions) == 0)
return (observation, reward, done, self.player.get_score())
def get_observation(self):
game = self.simulation.game
board = game.board
valid_moves = game.find_valid_moves(self.player.get_rack())
action_mask = [float('-inf')] * self.num_actions
actions = []
self.current_possible_moves = dict()
for move in valid_moves:
action_index = self.action_index_mapping[move.word.lower()]
action_mask[action_index] = 0
actions.append(action_index)
self.current_possible_moves[action_index] = move
return DQNScrabbleObservation(board, actions, action_mask)
|
smagill/opensphere-desktop
|
open-sphere-base/core/src/main/java/io/opensphere/core/debug/package-info.java
|
/**
* A set of classes used to help debug a running instance of the application.
*/
package io.opensphere.core.debug;
|
BoostryJP/ibet-Prime
|
app/model/schema/file.py
|
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
import base64
from datetime import datetime
from typing import (
Optional,
List,
Dict,
Any
)
from pydantic import (
BaseModel,
validator,
Field
)
from config import MAX_UPLOAD_FILE_SIZE
from .types import ResultSet
############################
# REQUEST
############################
class UploadFileRequest(BaseModel):
"""Upload File schema (Request)"""
relation: Optional[str] = Field(None, max_length=50)
file_name: str = Field(..., max_length=256)
content: str
description: Optional[str] = Field(None, max_length=1000)
@validator("content")
def content_is_less_than_max_upload_file_size(cls, v):
try:
data = base64.b64decode(v)
except Exception:
raise ValueError("content is not a Base64-encoded string")
if len(data) >= MAX_UPLOAD_FILE_SIZE:
raise ValueError(
f"file size(Base64-decoded size) must be less than or equal to {MAX_UPLOAD_FILE_SIZE}")
return v
class Config:
@staticmethod
def schema_extra(schema: Dict[str, Any], _) -> None:
notice_code_schema = schema["properties"]["content"]
notice_code_schema["description"] = "Base64-encoded content.\n" \
f"Max length of binary data before encoding is {MAX_UPLOAD_FILE_SIZE}."
############################
# RESPONSE
############################
class FileResponse(BaseModel):
"""File schema (Response)"""
file_id: str
issuer_address: str
relation: Optional[str]
file_name: str
content_size: int
description: Optional[str]
created: datetime
class ListAllFilesResponse(BaseModel):
"""List All Files schema (Response)"""
result_set: ResultSet
files: List[FileResponse]
class DownloadFileResponse(BaseModel):
"""Download File schema (Response)"""
file_id: str
issuer_address: str
relation: Optional[str]
file_name: str
content: str
content_size: int
description: Optional[str]
class Config:
@staticmethod
def schema_extra(schema: Dict[str, Any], _) -> None:
notice_code_schema = schema["properties"]["content"]
notice_code_schema["description"] = "Base64-encoded content"
|
uruzahe/carla
|
Co-Simulation/Sumo/sumo-1.7.0/src/microsim/MSEdgeControl.cpp
|
<reponame>uruzahe/carla
/****************************************************************************/
// Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo
// Copyright (C) 2001-2020 German Aerospace Center (DLR) and others.
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0/
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License 2.0 are satisfied: GNU General Public License, version 2
// or later which is available at
// https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
/****************************************************************************/
/// @file MSEdgeControl.cpp
/// @author <NAME>
/// @author <NAME>
/// @author <NAME>
/// @author <NAME>
/// @date Mon, 09 Apr 2001
///
// Stores edges and lanes, performs moving of vehicle
/****************************************************************************/
#include <config.h>
#include <iostream>
#include <queue>
#include <vector>
#include "MSEdgeControl.h"
#include "MSVehicleControl.h"
#include "MSGlobals.h"
#include "MSEdge.h"
#include "MSLane.h"
#include "MSVehicle.h"
#define PARALLEL_PLAN_MOVE
#define PARALLEL_EXEC_MOVE
//#define PARALLEL_CHANGE_LANES
//#define LOAD_BALANCING
// ===========================================================================
// member method definitions
// ===========================================================================
MSEdgeControl::MSEdgeControl(const std::vector< MSEdge* >& edges)
: myEdges(edges),
myLanes(MSLane::dictSize()),
myWithVehicles2Integrate(MSGlobals::gNumSimThreads > 1),
myLastLaneChange(MSEdge::dictSize()),
myInactiveCheckCollisions(MSGlobals::gNumSimThreads > 1),
myMinLengthGeometryFactor(1.) {
// build the usage definitions for lanes
for (MSEdge* const edge : myEdges) {
const std::vector<MSLane*>& lanes = edge->getLanes();
if (!edge->hasLaneChanger()) {
const int pos = lanes.front()->getNumericalID();
myLanes[pos].lane = lanes.front();
myLanes[pos].amActive = false;
myLanes[pos].haveNeighbors = false;
myMinLengthGeometryFactor = MIN2(edge->getLengthGeometryFactor(), myMinLengthGeometryFactor);
} else {
for (MSLane* const l : lanes) {
const int pos = l->getNumericalID();
myLanes[pos].lane = l;
myLanes[pos].amActive = false;
myLanes[pos].haveNeighbors = true;
myMinLengthGeometryFactor = MIN2(l->getLengthGeometryFactor(), myMinLengthGeometryFactor);
}
myLastLaneChange[edge->getNumericalID()] = -1;
}
}
#ifdef HAVE_FOX
if (MSGlobals::gNumThreads > 1) {
while (myThreadPool.size() < MSGlobals::gNumThreads) {
new WorkerThread(myThreadPool);
}
}
#endif
}
MSEdgeControl::~MSEdgeControl() {
#ifdef HAVE_FOX
myThreadPool.clear();
#endif
}
void
MSEdgeControl::patchActiveLanes() {
for (std::set<MSLane*, ComparatorNumericalIdLess>::iterator i = myChangedStateLanes.begin(); i != myChangedStateLanes.end(); ++i) {
LaneUsage& lu = myLanes[(*i)->getNumericalID()];
// if the lane was inactive but is now...
if (!lu.amActive && (*i)->getVehicleNumber() > 0) {
// ... add to active lanes and mark as such
if (lu.haveNeighbors) {
myActiveLanes.push_front(*i);
} else {
myActiveLanes.push_back(*i);
}
lu.amActive = true;
}
}
myChangedStateLanes.clear();
}
void
MSEdgeControl::planMovements(SUMOTime t) {
#ifdef LOAD_BALANCING
myRNGLoad = std::priority_queue<std::pair<int, int> >();
for (int i = 0; i < MSLane::getNumRNGs(); i++) {
myRNGLoad.emplace(0, i);
}
#endif
for (std::list<MSLane*>::iterator i = myActiveLanes.begin(); i != myActiveLanes.end();) {
const int vehNum = (*i)->getVehicleNumber();
if (vehNum == 0) {
myLanes[(*i)->getNumericalID()].amActive = false;
i = myActiveLanes.erase(i);
} else {
#ifdef LOAD_BALANCING
std::pair<int, int> minRNG = myRNGLoad.top();
(*i)->setRNGIndex(minRNG.second);
myRNGLoad.pop();
minRNG.first -= vehNum;
myRNGLoad.push(minRNG);
#endif
#ifdef HAVE_FOX
if (MSGlobals::gNumSimThreads > 1) {
myThreadPool.add((*i)->getPlanMoveTask(t), (*i)->getRNGIndex() % myThreadPool.size());
++i;
continue;
}
#endif
(*i)->planMovements(t);
++i;
}
}
#ifdef HAVE_FOX
if (MSGlobals::gNumSimThreads > 1) {
myThreadPool.waitAll(false);
}
#endif
}
void
MSEdgeControl::setJunctionApproaches(SUMOTime t) {
for (MSLane* const lane : myActiveLanes) {
lane->setJunctionApproaches(t);
}
}
void
MSEdgeControl::executeMovements(SUMOTime t) {
std::vector<MSLane*> wasActive(myActiveLanes.begin(), myActiveLanes.end());
myWithVehicles2Integrate.clear();
#ifdef HAVE_FOX
#ifdef PARALLEL_EXEC_MOVE
if (MSGlobals::gNumSimThreads > 1) {
#ifdef LOAD_BALANCING
myRNGLoad = std::priority_queue<std::pair<int, int> >();
for (int i = 0; i < MSLane::getNumRNGs(); i++) {
myRNGLoad.emplace(0, i);
}
#endif
for (MSLane* const lane : myActiveLanes) {
#ifdef LOAD_BALANCING
std::pair<int, int> minRNG = myRNGLoad.top();
lane->setRNGIndex(minRNG.second);
myRNGLoad.pop();
minRNG.first -= lane->getVehicleNumber();
myRNGLoad.push(minRNG);
#endif
myThreadPool.add(lane->getExecuteMoveTask(t), lane->getRNGIndex() % myThreadPool.size());
}
myThreadPool.waitAll(false);
}
#endif
#endif
for (std::list<MSLane*>::iterator i = myActiveLanes.begin(); i != myActiveLanes.end();) {
if (
#ifdef PARALLEL_EXEC_MOVE
MSGlobals::gNumSimThreads <= 1 &&
#endif
(*i)->getVehicleNumber() > 0) {
(*i)->executeMovements(t);
}
if ((*i)->getVehicleNumber() == 0) {
myLanes[(*i)->getNumericalID()].amActive = false;
i = myActiveLanes.erase(i);
} else {
++i;
}
}
for (MSLane* lane : wasActive) {
lane->updateLengthSum();
}
MSNet::getInstance()->getVehicleControl().removePending();
std::vector<MSLane*>& toIntegrate = myWithVehicles2Integrate.getContainer();
std::sort(toIntegrate.begin(), toIntegrate.end(), ComparatorIdLess());
/// @todo: sorting only needed to account for lane-ordering dependencies.
//This should disappear when parallelization is working. Until then it would
//be better to use ComparatorNumericalIdLess instead of ComparatorIdLess
myWithVehicles2Integrate.unlock();
for (MSLane* const lane : toIntegrate) {
const bool wasInactive = lane->getVehicleNumber() == 0;
lane->integrateNewVehicles();
if (wasInactive && lane->getVehicleNumber() > 0) {
LaneUsage& lu = myLanes[lane->getNumericalID()];
if (!lu.amActive) {
if (lu.haveNeighbors) {
myActiveLanes.push_front(lane);
} else {
myActiveLanes.push_back(lane);
}
lu.amActive = true;
}
}
}
}
void
MSEdgeControl::changeLanes(const SUMOTime t) {
std::vector<MSLane*> toAdd;
#ifdef PARALLEL_CHANGE_LANES
std::vector<const MSEdge*> recheckLaneUsage;
#endif
MSGlobals::gComputeLC = true;
for (const MSLane* const l : myActiveLanes) {
if (myLanes[l->getNumericalID()].haveNeighbors) {
const MSEdge& edge = l->getEdge();
if (myLastLaneChange[edge.getNumericalID()] != t) {
myLastLaneChange[edge.getNumericalID()] = t;
#ifdef PARALLEL_CHANGE_LANES
if (MSGlobals::gNumSimThreads > 1) {
MSLane* lane = edge.getLanes()[0];
myThreadPool.add(lane->getLaneChangeTask(t), lane->getRNGIndex() % myThreadPool.size());
recheckLaneUsage.push_back(&edge);
} else {
#endif
edge.changeLanes(t);
for (MSLane* const lane : edge.getLanes()) {
LaneUsage& lu = myLanes[lane->getNumericalID()];
//if ((*i)->getID() == "disabled") {
// std::cout << SIMTIME << " vehicles=" << toString((*i)->getVehiclesSecure()) << "\n";
// (*i)->releaseVehicles();
//}
if (lane->getVehicleNumber() > 0 && !lu.amActive) {
toAdd.push_back(lane);
lu.amActive = true;
}
}
#ifdef PARALLEL_CHANGE_LANES
}
#endif
}
} else {
break;
}
}
#ifdef PARALLEL_CHANGE_LANES
if (MSGlobals::gNumSimThreads > 1) {
myThreadPool.waitAll(false);
for (const MSEdge* e : recheckLaneUsage) {
for (MSLane* const l : e->getLanes()) {
LaneUsage& lu = myLanes[l->getNumericalID()];
if (l->getVehicleNumber() > 0 && !lu.amActive) {
toAdd.push_back(l);
lu.amActive = true;
}
}
}
}
#endif
MSGlobals::gComputeLC = false;
for (std::vector<MSLane*>::iterator i = toAdd.begin(); i != toAdd.end(); ++i) {
myActiveLanes.push_front(*i);
}
if (MSGlobals::gLateralResolution > 0) {
// sort maneuver reservations
for (LaneUsageVector::iterator it = myLanes.begin(); it != myLanes.end(); ++it) {
(*it).lane->sortManeuverReservations();
}
}
}
void
MSEdgeControl::detectCollisions(SUMOTime timestep, const std::string& stage) {
// Detections is made by the edge's lanes, therefore hand over.
for (MSLane* lane : myActiveLanes) {
if (lane->needsCollisionCheck()) {
lane->detectCollisions(timestep, stage);
}
}
if (myInactiveCheckCollisions.size() > 0) {
for (MSLane* lane : myInactiveCheckCollisions.getContainer()) {
lane->detectCollisions(timestep, stage);
}
myInactiveCheckCollisions.clear();
myInactiveCheckCollisions.unlock();
}
}
void
MSEdgeControl::gotActive(MSLane* l) {
myChangedStateLanes.insert(l);
}
void
MSEdgeControl::checkCollisionForInactive(MSLane* l) {
myInactiveCheckCollisions.insert(l);
}
void
MSEdgeControl::setAdditionalRestrictions() {
for (MSEdgeVector::const_iterator i = myEdges.begin(); i != myEdges.end(); ++i) {
const std::vector<MSLane*>& lanes = (*i)->getLanes();
for (std::vector<MSLane*>::const_iterator j = lanes.begin(); j != lanes.end(); ++j) {
(*j)->initRestrictions();
}
}
}
/****************************************************************************/
|
d0sadata/studio
|
contentcuration/contentcuration/frontend/shared/vuex/draggablePlugin/module/submodule/getters.js
|
<filename>contentcuration/contentcuration/frontend/shared/vuex/draggablePlugin/module/submodule/getters.js
import { bitMaskToObject, DraggableIdentityHelper } from '../utils';
import { DraggableFlags } from 'shared/vuex/draggablePlugin/module/constants';
/**
* @return {String|null}
*/
export function activeDraggableId(state) {
return state.activeDraggable.id;
}
/**
* @return {String|null}
*/
export function hoverDraggableId(state) {
return state.hoverDraggable.id;
}
export function isHoverDraggableAncestor(state, getters) {
/**
* @param {Object} identity
* @return {Boolean}
*/
return function({ id, type }) {
return Boolean(getters.getHoverAncestor({ id, type }));
};
}
export function getHoverAncestor(state) {
/**
* @param {Object} match - An object with which it will test for match with ancestor
*/
return function(match) {
return new DraggableIdentityHelper(state.hoverDraggable).findClosestAncestor(match);
};
}
/**
* Determines the section of which we should register potential placement of
* the current draggable items.
*/
export function hoverDraggableTarget(state, getters, rootState) {
let section = DraggableFlags.NONE;
const { lastHoverDraggableId, hoverDraggableSection, lastHoverDraggableSection } = state;
const { draggableDirection } = rootState.draggable;
// If no section is being hovered over, so no target section should be displayed
if (hoverDraggableSection === DraggableFlags.NONE) {
return section;
}
// Get all booleans for hover section and direction flags
const hoverSection = bitMaskToObject(hoverDraggableSection);
const direction = bitMaskToObject(draggableDirection);
// When the user has dragged from outside of the universe, or is dragging from the area of the
// item that has just started dragging.
if (!lastHoverDraggableId) {
// If it's an entrance, we want to target the same section that the user
// has entered through dragging
section ^= hoverSection.top ? DraggableFlags.TOP : DraggableFlags.BOTTOM;
section ^= hoverSection.left ? DraggableFlags.LEFT : DraggableFlags.RIGHT;
} else {
// We'll use the last hover section to determine if the user has changed directions
// within a dropzone and therefore to retarget appropriately when they cross the
// boundary of a section
const lastHoverSection = bitMaskToObject(lastHoverDraggableSection);
if (lastHoverSection.bottom || (!lastHoverSection.any && hoverSection.bottom && direction.up)) {
section ^= DraggableFlags.TOP;
} else if (
lastHoverSection.top ||
(!lastHoverSection.any && hoverSection.top && direction.down)
) {
section ^= DraggableFlags.BOTTOM;
}
if (lastHoverSection.right || (hoverSection.right && direction.left)) {
section ^= DraggableFlags.LEFT;
} else if (lastHoverSection.left || (hoverSection.left && direction.right)) {
section ^= DraggableFlags.RIGHT;
}
}
return section;
}
|
renzog6/sae-entiy
|
src/main/java/ar/nex/jpa/UsuarioEventoJpaController.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ar.nex.jpa;
import ar.nex.entity.UsuarioEvento;
import java.io.Serializable;
import javax.persistence.Query;
import javax.persistence.EntityNotFoundException;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import ar.nex.entity.UsuarioHistorial;
import ar.nex.jpa.exceptions.NonexistentEntityException;
import ar.nex.jpa.exceptions.PreexistingEntityException;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
/**
*
* @author Renzo
*/
public class UsuarioEventoJpaController implements Serializable {
public UsuarioEventoJpaController(EntityManagerFactory emf) {
this.emf = emf;
}
private EntityManagerFactory emf = null;
public EntityManager getEntityManager() {
return emf.createEntityManager();
}
public void create(UsuarioEvento usuarioEvento) throws PreexistingEntityException, Exception {
if (usuarioEvento.getUsrHistorialList() == null) {
usuarioEvento.setUsrHistorialList(new ArrayList<UsuarioHistorial>());
}
EntityManager em = null;
try {
em = getEntityManager();
em.getTransaction().begin();
List<UsuarioHistorial> attachedUsrHistorialList = new ArrayList<UsuarioHistorial>();
for (UsuarioHistorial usrHistorialListUsuarioHistorialToAttach : usuarioEvento.getUsrHistorialList()) {
usrHistorialListUsuarioHistorialToAttach = em.getReference(usrHistorialListUsuarioHistorialToAttach.getClass(), usrHistorialListUsuarioHistorialToAttach.getIdHistorial());
attachedUsrHistorialList.add(usrHistorialListUsuarioHistorialToAttach);
}
usuarioEvento.setUsrHistorialList(attachedUsrHistorialList);
em.persist(usuarioEvento);
for (UsuarioHistorial usrHistorialListUsuarioHistorial : usuarioEvento.getUsrHistorialList()) {
UsuarioEvento oldEventoOfUsrHistorialListUsuarioHistorial = usrHistorialListUsuarioHistorial.getEvento();
usrHistorialListUsuarioHistorial.setEvento(usuarioEvento);
usrHistorialListUsuarioHistorial = em.merge(usrHistorialListUsuarioHistorial);
if (oldEventoOfUsrHistorialListUsuarioHistorial != null) {
oldEventoOfUsrHistorialListUsuarioHistorial.getUsrHistorialList().remove(usrHistorialListUsuarioHistorial);
oldEventoOfUsrHistorialListUsuarioHistorial = em.merge(oldEventoOfUsrHistorialListUsuarioHistorial);
}
}
em.getTransaction().commit();
} catch (Exception ex) {
if (findUsuarioEvento(usuarioEvento.getIdEvento()) != null) {
throw new PreexistingEntityException("UsuarioEvento " + usuarioEvento + " already exists.", ex);
}
throw ex;
} finally {
if (em != null) {
em.close();
}
}
}
public void edit(UsuarioEvento usuarioEvento) throws NonexistentEntityException, Exception {
EntityManager em = null;
try {
em = getEntityManager();
em.getTransaction().begin();
UsuarioEvento persistentUsuarioEvento = em.find(UsuarioEvento.class, usuarioEvento.getIdEvento());
List<UsuarioHistorial> usrHistorialListOld = persistentUsuarioEvento.getUsrHistorialList();
List<UsuarioHistorial> usrHistorialListNew = usuarioEvento.getUsrHistorialList();
List<UsuarioHistorial> attachedUsrHistorialListNew = new ArrayList<UsuarioHistorial>();
for (UsuarioHistorial usrHistorialListNewUsuarioHistorialToAttach : usrHistorialListNew) {
usrHistorialListNewUsuarioHistorialToAttach = em.getReference(usrHistorialListNewUsuarioHistorialToAttach.getClass(), usrHistorialListNewUsuarioHistorialToAttach.getIdHistorial());
attachedUsrHistorialListNew.add(usrHistorialListNewUsuarioHistorialToAttach);
}
usrHistorialListNew = attachedUsrHistorialListNew;
usuarioEvento.setUsrHistorialList(usrHistorialListNew);
usuarioEvento = em.merge(usuarioEvento);
for (UsuarioHistorial usrHistorialListOldUsuarioHistorial : usrHistorialListOld) {
if (!usrHistorialListNew.contains(usrHistorialListOldUsuarioHistorial)) {
usrHistorialListOldUsuarioHistorial.setEvento(null);
usrHistorialListOldUsuarioHistorial = em.merge(usrHistorialListOldUsuarioHistorial);
}
}
for (UsuarioHistorial usrHistorialListNewUsuarioHistorial : usrHistorialListNew) {
if (!usrHistorialListOld.contains(usrHistorialListNewUsuarioHistorial)) {
UsuarioEvento oldEventoOfUsrHistorialListNewUsuarioHistorial = usrHistorialListNewUsuarioHistorial.getEvento();
usrHistorialListNewUsuarioHistorial.setEvento(usuarioEvento);
usrHistorialListNewUsuarioHistorial = em.merge(usrHistorialListNewUsuarioHistorial);
if (oldEventoOfUsrHistorialListNewUsuarioHistorial != null && !oldEventoOfUsrHistorialListNewUsuarioHistorial.equals(usuarioEvento)) {
oldEventoOfUsrHistorialListNewUsuarioHistorial.getUsrHistorialList().remove(usrHistorialListNewUsuarioHistorial);
oldEventoOfUsrHistorialListNewUsuarioHistorial = em.merge(oldEventoOfUsrHistorialListNewUsuarioHistorial);
}
}
}
em.getTransaction().commit();
} catch (Exception ex) {
String msg = ex.getLocalizedMessage();
if (msg == null || msg.length() == 0) {
Long id = usuarioEvento.getIdEvento();
if (findUsuarioEvento(id) == null) {
throw new NonexistentEntityException("The usuarioEvento with id " + id + " no longer exists.");
}
}
throw ex;
} finally {
if (em != null) {
em.close();
}
}
}
public void destroy(Long id) throws NonexistentEntityException {
EntityManager em = null;
try {
em = getEntityManager();
em.getTransaction().begin();
UsuarioEvento usuarioEvento;
try {
usuarioEvento = em.getReference(UsuarioEvento.class, id);
usuarioEvento.getIdEvento();
} catch (EntityNotFoundException enfe) {
throw new NonexistentEntityException("The usuarioEvento with id " + id + " no longer exists.", enfe);
}
List<UsuarioHistorial> usrHistorialList = usuarioEvento.getUsrHistorialList();
for (UsuarioHistorial usrHistorialListUsuarioHistorial : usrHistorialList) {
usrHistorialListUsuarioHistorial.setEvento(null);
usrHistorialListUsuarioHistorial = em.merge(usrHistorialListUsuarioHistorial);
}
em.remove(usuarioEvento);
em.getTransaction().commit();
} finally {
if (em != null) {
em.close();
}
}
}
public List<UsuarioEvento> findUsuarioEventoEntities() {
return findUsuarioEventoEntities(true, -1, -1);
}
public List<UsuarioEvento> findUsuarioEventoEntities(int maxResults, int firstResult) {
return findUsuarioEventoEntities(false, maxResults, firstResult);
}
private List<UsuarioEvento> findUsuarioEventoEntities(boolean all, int maxResults, int firstResult) {
EntityManager em = getEntityManager();
try {
CriteriaQuery cq = em.getCriteriaBuilder().createQuery();
cq.select(cq.from(UsuarioEvento.class));
Query q = em.createQuery(cq);
if (!all) {
q.setMaxResults(maxResults);
q.setFirstResult(firstResult);
}
return q.getResultList();
} finally {
em.close();
}
}
public UsuarioEvento findUsuarioEvento(Long id) {
EntityManager em = getEntityManager();
try {
return em.find(UsuarioEvento.class, id);
} finally {
em.close();
}
}
public int getUsuarioEventoCount() {
EntityManager em = getEntityManager();
try {
CriteriaQuery cq = em.getCriteriaBuilder().createQuery();
Root<UsuarioEvento> rt = cq.from(UsuarioEvento.class);
cq.select(em.getCriteriaBuilder().count(rt));
Query q = em.createQuery(cq);
return ((Long) q.getSingleResult()).intValue();
} finally {
em.close();
}
}
}
|
bertptrs/adventofcode
|
2019/tests/test_day04.py
|
<gh_stars>10-100
import pytest
from aoc2019.day04 import valid
@pytest.mark.parametrize('number,strict,expected', [
(122345, False, True),
(111123, False, True),
(111111, False, True),
(223450, False, False),
(123789, False, False),
(112233, True, True),
(123444, True, False),
(111122, True, True)
])
def test_valid(number: int, strict: bool, expected: bool) -> None:
assert valid(number, strict) == expected
|
zealoussnow/chromium
|
chromeos/services/secure_channel/public/cpp/client/fake_client_channel.cc
|
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromeos/services/secure_channel/public/cpp/client/fake_client_channel.h"
#include <vector>
#include "base/callback.h"
#include "base/memory/ptr_util.h"
#include "chromeos/services/secure_channel/public/mojom/secure_channel_types.mojom.h"
namespace chromeos {
namespace secure_channel {
FakeClientChannel::FakeClientChannel() = default;
FakeClientChannel::~FakeClientChannel() {
if (destructor_callback_)
std::move(destructor_callback_).Run();
}
void FakeClientChannel::InvokePendingGetConnectionMetadataCallback(
mojom::ConnectionMetadataPtr connection_metadata) {
std::move(get_connection_metadata_callback_queue_.front())
.Run(std::move(connection_metadata));
get_connection_metadata_callback_queue_.pop();
}
void FakeClientChannel::PerformGetConnectionMetadata(
base::OnceCallback<void(mojom::ConnectionMetadataPtr)> callback) {
get_connection_metadata_callback_queue_.push(std::move(callback));
}
void FakeClientChannel::PerformSendMessage(const std::string& payload,
base::OnceClosure on_sent_callback) {
sent_messages_.push_back(
std::make_pair(payload, std::move(on_sent_callback)));
}
void FakeClientChannel::PerformRegisterPayloadFile(
int64_t payload_id,
mojom::PayloadFilesPtr payload_files,
base::RepeatingCallback<void(mojom::FileTransferUpdatePtr)>
file_transfer_update_callback,
base::OnceCallback<void(bool)> registration_result_callback) {
registered_file_payloads_.push_back(payload_id);
std::move(registration_result_callback).Run(/*success=*/true);
}
} // namespace secure_channel
} // namespace chromeos
|
fisherxu/external-dns
|
vendor/github.com/exoscale/egoscale/cmd/exo/cmd/serviceoffering.go
|
package cmd
import (
"fmt"
"os"
"github.com/exoscale/egoscale"
"github.com/exoscale/egoscale/cmd/exo/table"
"github.com/spf13/cobra"
)
// serviceofferingCmd represents the serviceoffering command
var serviceofferingCmd = &cobra.Command{
Use: "serviceoffering",
Short: "List available services offerings with details",
RunE: func(cmd *cobra.Command, args []string) error {
return listServiceOffering()
},
}
func listServiceOffering() error {
serviceOffering, err := cs.ListWithContext(gContext, &egoscale.ServiceOffering{})
if err != nil {
return err
}
table := table.NewTable(os.Stdout)
table.SetHeader([]string{"Name", "cpu", "ram"})
for _, soff := range serviceOffering {
f := soff.(*egoscale.ServiceOffering)
ram := ""
if f.Memory > 1000 {
ram = fmt.Sprintf("%d GB", f.Memory>>10)
} else if f.Memory < 1000 {
ram = fmt.Sprintf("%d MB", f.Memory)
}
table.Append([]string{f.Name, fmt.Sprintf("%d× %d MHz", f.CPUNumber, f.CPUSpeed), ram})
}
table.Render()
return nil
}
func getServiceOfferingByName(name string) (*egoscale.ServiceOffering, error) {
so := &egoscale.ServiceOffering{}
id, err := egoscale.ParseUUID(name)
if err != nil {
so.Name = name
} else {
so.ID = id
}
if err := cs.GetWithContext(gContext, so); err != nil {
return nil, err
}
return so, nil
}
func init() {
vmCmd.AddCommand(serviceofferingCmd)
}
|
mcopik/perf-taint
|
benchmarks/milc/milc_qcd-7.8.1/clover_dynamical/update.c
|
/********** update.c *** for clover fermions *************************/
/* MIMD version 7 */
/*
Update lattice.
Improved method for 1-2 flavors:
update U by (epsilon/2)*(1-Nf/2)
compute PHI
update U to epsilon/2
compute X
update H, full step
update U to next time needed
This routine does not refresh the antihermitian momenta.
This routine begins at "integral" time, with H and U evaluated
at same time.
*/
#include "cl_dyn_includes.h"
#ifdef HAVE_IEEEFP_H
#include <ieeefp.h> /* For "finite" */
#endif
void predict_next_psi(Real *oldtime,Real *newtime,Real *nexttime);
int update() {
int step, iters=0;
Real final_rsq;
Real cg_time; /* simulation time for last two CG's */
#ifdef PHI_ALGORITHM
Real old_cg_time,next_cg_time;
double starttrlogA, endtrlogA;
#endif
Real CKU0 = kappa*clov_c/(u0*u0*u0);
#ifdef HMC_ALGORITHM
double startaction = 0, endaction, change;
Real xrandom;
#endif
/* printf("in update.c CKU0 = %f\n", CKU0); */
/* refresh the momenta */
ranmom();
boundary_flip(MINUS); /* turn on antiperiodic b.c. */
/* do "steps" microcanonical steps" */
for(step=1; step <= steps; step++){
#ifdef PHI_ALGORITHM
/* generate a pseudofermion configuration only at start*/
if(step==1){
compute_clov(gen_clov, CKU0);
#ifdef LU
compute_clovinv(gen_clov, ODD);
starttrlogA = gen_clov->trlogA;
#else
starttrlogA = (double)0.0;
#endif /*LU*/
grsource_w();
old_cg_time = cg_time = -1.0e6;
}
#ifdef HMC_ALGORITHM
/* find action */
if(step==1){
/* do conjugate gradient to get (Madj M)inverse * chi */
iters += congrad_cl(niter,rsqmin,&final_rsq);
cg_time = 0.0;
/**checkmul();**/
startaction=d_action();
startaction -= (double)2.0 * starttrlogA;
/* printf("startaction= %g\n",startaction); */
/* copy link field to old_link */
gauge_field_copy(F_OFFSET(link[0]), F_OFFSET(old_link[0]));
}
#endif /*hmc*/
if(step==1){
free_this_clov(gen_clov);
}
/* update U's to middle of interval */
update_u(0.5*epsilon);
/* save conjugate gradient solution, predict next one */
next_cg_time = ((Real)step-0.5)*epsilon;
predict_next_psi(&old_cg_time,&cg_time,&next_cg_time);
#else /* "R" algorithm */
/* first update the U's to special time interval */
update_u(epsilon*(0.5-nflavors/4.0));
/* generate a pseudofermion configuration */
compute_clov(gen_clov, CKU0);
#ifdef LU
compute_clovinv(gen_clov, ODD);
#endif /*LU*/
grsource_w();
free_this_clov(gen_clov);
/* update U's to middle of interval */
update_u(epsilon*nflavors/4.0);
#endif /* phi & R */
/* do conjugate gradient to get (Madj M)inverse * chi */
compute_clov(gen_clov, CKU0);
#ifdef LU
compute_clovinv(gen_clov, ODD);
#endif /*LU*/
iters += congrad_cl(niter,rsqmin,&final_rsq);
cg_time = ((Real)step - 0.5)*epsilon;
/**checkmul();**/
/* now update H by full time interval */
update_h(epsilon);
free_this_clov(gen_clov);
/* update U's by half time step to get to even time */
update_u(epsilon*0.5);
/* reunitarize the gauge field */
boundary_flip(PLUS);
reunitarize();
boundary_flip(MINUS);
} /* end loop over microcanonical steps */
#ifdef HMC_ALGORITHM
/* find action */
/* do conjugate gradient to get (Madj M)inverse * chi */
next_cg_time = steps*epsilon;
predict_next_psi(&old_cg_time,&cg_time,&next_cg_time);
compute_clov(gen_clov, CKU0);
#ifdef LU
compute_clovinv(gen_clov, ODD);
endtrlogA = gen_clov->trlogA;
#else
endtrlogA = (double)0.0;
#endif /*LU*/
iters += congrad_cl(niter,rsqmin,&final_rsq);
free_this_clov(gen_clov);
cg_time = steps*epsilon;
endaction=d_action();
endaction -= (double)2.0 * endtrlogA;
/* printf("endaction= %g\n",endaction); */
change = endaction-startaction;
/* Reject configurations giving overflow */
#ifndef HAVE_IEEEFP_H
if(fabs((double)change)>1e20){
#else
if(!finite((double)change)){
#endif
if(this_node==0)printf(
"WARNING: Correcting Apparent Overflow: Delta S = %e\n", change);
change = 1.0e20;
}
/* decide whether to accept, if not, copy old link field back */
/* careful - must generate only one random number for whole lattice */
if(this_node==0)xrandom = myrand(&node_prn);
broadcast_float(&xrandom);
if( exp( -change ) < (double)xrandom ){
if(steps > 0)
gauge_field_copy( F_OFFSET(old_link[0]), F_OFFSET(link[0]) );
if(this_node==0)printf("REJECT: delta S = %e\n", change);
}
else {
if(this_node==0)printf("ACCEPT: delta S = %e\n", change);
}
#endif /*HMC*/
boundary_flip(PLUS);
if(steps > 0)return (iters/steps);
else return(-99);
}
#ifdef PHI_ALGORITHM
#ifdef LU
#define FORMYSITES FOREVENSITES
#else
#define FORMYSITES FORALLSITES
#endif
/* use linear extrapolation to predict next conjugate gradient solution */
/* only need even sites */
void predict_next_psi(Real *oldtime,Real *newtime,Real *nexttime)
{
register int i;
register site *s;
register Real x;
wilson_vector tvec;
if( *newtime != *oldtime ) x = (*nexttime-*newtime)/(*newtime-*oldtime);
else x = 0.0;
if( *oldtime < 0.0 ){
FORMYSITES(i,s){
s->old_psi = s->psi;
}
}
else {
FORMYSITES(i,s){
sub_wilson_vector( &(s->psi), &(s->old_psi), &tvec);
s->old_psi = s->psi;
scalar_mult_add_wvec( &(s->psi), &tvec,x, &(s->psi) );
}
}
*oldtime = *newtime;
*newtime = *nexttime;
}
#endif
|
consoles/dsa4js
|
app/chapter3/3.2/30.js
|
<filename>app/chapter3/3.2/30.js
const BST = require('../BST');
class BST30 extends BST {
/**
* 如果以该节点为根的子树中的所有节点都在min和max之间
* min,max的确分别是树中的最小和最大的节点并且BST的有序性对树中的所有键都成立返回true,否则返回false
*/
isOrdered(node, min, max) {
if (!node) return true;
if (min && node.key < min) return false;
if (max && node.key > max) return false;
return this.isOrdered(node.left, min, node.key) && this.isOrdered(node.right, node.key, max);
}
}
|
HU-ACTS/sensor-module
|
src/MQTTController.cpp
|
<gh_stars>0
#include "MQTTController.hpp"
extern const uint8_t aws_root_ca_pem_start[] asm("_binary_aws_root_ca_pem_start");
extern const uint8_t aws_root_ca_pem_end[] asm("_binary_aws_root_ca_pem_end");
extern const uint8_t certificate_pem_crt_start[] asm("_binary_certificate_pem_crt_start");
extern const uint8_t certificate_pem_crt_end[] asm("_binary_certificate_pem_crt_end");
extern const uint8_t private_pem_key_start[] asm("_binary_private_pem_key_start");
extern const uint8_t private_pem_key_end[] asm("_binary_private_pem_key_end");
MQTTController::MQTTController() {
}
void MQTTController::publish(double value) {
IoT_Error_t rc = FAILURE;
char cPayload[200];
int32_t i = 0;
paramsQOS1.qos = QOS1;
paramsQOS1.payload = (void *) cPayload;
paramsQOS1.isRetained = 0;
sprintf(cPayload, "{\"key\" :\"%s\",\"value\" : \"%lf\"}", MQTT_USER_KEY_CODE, value);
paramsQOS1.payloadLen = strlen(cPayload);
rc = aws_iot_mqtt_publish(&client, TOPIC, TOPIC_LEN, ¶msQOS1);
if (rc == MQTT_REQUEST_TIMEOUT_ERROR) {
ESP_LOGW(TAG, "QOS1 publish ack not received.");
rc = SUCCESS;
}
}
void MQTTController::disconnect(){
IoT_Error_t rc = aws_iot_mqtt_autoreconnect_set_status(&client, false);
if(SUCCESS != rc) {
ESP_LOGE(TAG, "Unable to set Auto Reconnect to true - %d", rc);
}
aws_iot_mqtt_disconnect(&client);
}
static void iot_subscribe_callback_handler(
AWS_IoT_Client *pClient, char *topicName, uint16_t topicNameLen,IoT_Publish_Message_Params *params, void *pData) {
char * TAG = "TEST";
ESP_LOGI(TAG, "Subscribe callback");
ESP_LOGI(TAG, "%.*s\t%.*s", topicNameLen, topicName, (int) params->payloadLen, (char *)params->payload);
}
void disconnectCallbackHandler(AWS_IoT_Client *pClient, void *data) {
char * TAG = "disconnect";
ESP_LOGW(TAG, "MQTT Disconnect");
IoT_Error_t rc = FAILURE;
if(NULL == pClient) {
return;
}
if(aws_iot_is_autoreconnect_enabled(pClient)) {
ESP_LOGI(TAG, "Auto Reconnect is enabled, Reconnecting attempt will start now");
} else {
ESP_LOGW(TAG, "Auto Reconnect not enabled. Starting manual reconnect...");
rc = aws_iot_mqtt_attempt_reconnect(pClient);
if(NETWORK_RECONNECTED == rc) {
ESP_LOGW(TAG, "Manual Reconnect Successful");
} else {
ESP_LOGW(TAG, "Manual Reconnect Failed - %d", rc);
}
}
}
void MQTTController::connectMQTT() {
IoT_Error_t rc = FAILURE;
IoT_Client_Init_Params mqttInitParams = iotClientInitParamsDefault;
IoT_Client_Connect_Params connectParams = iotClientConnectParamsDefault;
ESP_LOGI(TAG, "AWS IoT SDK Version %d.%d.%d-%s", VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, VERSION_TAG);
mqttInitParams.enableAutoReconnect = false; // We enable this later below
mqttInitParams.pHostURL = host;
mqttInitParams.port = port;
mqttInitParams.pRootCALocation = (const char *)aws_root_ca_pem_start;
mqttInitParams.pDeviceCertLocation = (const char *)certificate_pem_crt_start;
mqttInitParams.pDevicePrivateKeyLocation = (const char *)private_pem_key_start;
mqttInitParams.mqttCommandTimeout_ms = 20000;
mqttInitParams.tlsHandshakeTimeout_ms = 5000;
mqttInitParams.isSSLHostnameVerify = true;
mqttInitParams.disconnectHandler = disconnectCallbackHandler;
mqttInitParams.disconnectHandlerData = NULL;
rc = aws_iot_mqtt_init(&client, &mqttInitParams);
if(SUCCESS != rc) {
ESP_LOGE(TAG, "aws_iot_mqtt_init returned error : %d ", rc);
}
connectParams.keepAliveIntervalInSec = 10;
connectParams.isCleanSession = true;
connectParams.MQTTVersion = MQTT_3_1_1;
connectParams.pClientID = client_id;
connectParams.clientIDLen = (uint16_t) strlen(client_id);
connectParams.isWillMsgPresent = false;
for(int j = 0; j < 10; j++) {
rc = aws_iot_mqtt_connect(&client, &connectParams);
if(SUCCESS != rc) {
ESP_LOGE(TAG, "Error(%d) connecting to %s:%d", rc, mqttInitParams.pHostURL, mqttInitParams.port);
vTaskDelay(1000 / portTICK_RATE_MS);
}else{
break;
}
}
rc = aws_iot_mqtt_autoreconnect_set_status(&client, true);
if(SUCCESS != rc) {
ESP_LOGE(TAG, "Unable to set Auto Reconnect to true - %d", rc);
}
}
MQTTController::~MQTTController(){
disconnect();
}
|
stealify/stealify
|
examples/bundle/repl/config.js
|
const files = []
const capabilities = []
const modules = []
module.exports = { files, modules, capabilities }
|
skobow/IDS-ConfigurationManager
|
src/main/java/de/fraunhofer/isst/configmanager/configmanagement/entities/config/CustomBroker.java
|
package de.fraunhofer.isst.configmanager.configmanagement.entities.config;
import lombok.AccessLevel;
import lombok.Data;
import lombok.experimental.FieldDefaults;
import javax.persistence.*;
import java.net.URI;
import java.util.List;
/**
* A custom broker entity, to be able to persist the broker in the intern database.
*/
@Entity
@Data
@FieldDefaults(level = AccessLevel.PRIVATE)
public class CustomBroker {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
Long id;
URI brokerUri;
String title;
BrokerStatus brokerStatus;
@ElementCollection
List<String> registeredResources;
public CustomBroker() {
}
public CustomBroker(final URI brokerUri) {
this.brokerUri = brokerUri;
}
}
|
hrist-dina/legenda
|
src/vue/store/products/getters.js
|
export default {
itemsMap(state, getters) {
let map = {}
getters.all.forEach((item, i) => (map[item.id] = i))
return map
},
one: (state, getters) => id => {
const ind = getters.itemsMap[id]
return getters.all[ind]
},
all: state => state.items,
itemsOrderMap(state, getters) {
let map = {}
getters.allOrder.forEach((item, i) => (map[item.id] = i))
return map
},
oneOrder: (state, getters) => id => {
const ind = getters.itemsOrderMap[id]
return getters.allOrder[ind]
},
allOrder: state => state.itemsOrder
}
|
mclaughlin6464/pearce
|
bin/mcmc/pearce_mcmc_xigg_jk.py
|
from pearce.emulator import OriginalRecipe, ExtraCrispy, SpicyBuffalo
from pearce.mocks import cat_dict
from pearce.inference import run_mcmc_iterator
from scipy.optimize import minimize_scalar
import numpy as np
from os import path
#training_file = '/u/ki/swmclau2/des/xi_cosmo_trainer/PearceRedMagicXiCosmoFixedNd.hdf5'
training_file = '/scratch/users/swmclau2/xi_zheng07_cosmo_lowmsat/PearceRedMagicXiCosmoFixedNd.hdf5'
em_method = 'gp'
split_method = 'random'
load_fixed_params = {'z':0.0}#, 'HOD': 0}
np.random.seed(0)
emu = SpicyBuffalo(training_file, method = em_method, fixed_params=load_fixed_params, custom_mean_function = 'linear', downsample_factor = 0.1)
print 'Metric:', emu._emulators[0].get_parameter_vector()
#Remember if training data is an LHC can't load a fixed set, do that after
fixed_params = {}#'f_c':1.0}#,'logM1': 13.8 }# 'z':0.0}
cosmo_params = {'simname':'testbox', 'boxno': 0, 'realization':0, 'scale_factors':[1.0], 'system': 'sherlock'}
cat = cat_dict[cosmo_params['simname']](**cosmo_params)#construct the specified catalog!
cat.load(1.0, HOD='zheng07')
emulation_point = [('logM0', 14.0), ('sigma_logM', 0.2),
('alpha', 1.083),('logM1', 13.7)]#, ('logMmin', 12.233)]
em_params = dict(emulation_point)
em_params.update(fixed_params)
def add_logMmin(hod_params, cat):
"""
In the fixed number density case, find the logMmin value that will match the nd given hod_params
:param: hod_params:
The other parameters besides logMmin
:param cat:
the catalog in question
:return:
None. hod_params will have logMmin added to it.
"""
hod_params['logMmin'] = 13.0 #initial guess
#cat.populate(hod_params) #may be overkill, but will ensure params are written everywhere
def func(logMmin, hod_params):
hod_params.update({'logMmin':logMmin})
return (cat.calc_analytic_nd(hod_params) - 1e-4)**2
res = minimize_scalar(func, bounds = (12, 16), args = (hod_params,), options = {'maxiter':100}, method = 'Bounded')
# assuming this doens't fail
hod_params['logMmin'] = res.x
add_logMmin(em_params, cat)
r_bins = np.logspace(-1.1, 1.6, 19)
rpoints = emu.scale_bin_centers
xi_vals = []
for i in xrange(50):
cat.populate(em_params)
xi_vals.append(cat.calc_xi(r_bins))
# TODO need a way to get a measurement cov for the shams
xi_vals = np.log10(np.array(xi_vals))
cat.populate(em_params)
y10, cov10 = cat.calc_xi(r_bins, do_jackknife=True, jk_args = {'n_rands':10, 'n_sub':5})
#y10 = np.loadtxt('xi_gg_true_jk.npy')
#cov10 = np.loadtxt('xi_gg_cov_true_jk.npy')
y = np.log10(y10)
#y = np.mean(xi_vals, axis = 0)
shot_cov = np.cov(xi_vals, rowvar = False)
cov = np.log10(1+cov10/(np.outer(y10, y10))) # TODO check this is right?
np.savetxt('xi_gg_true_jk.npy', y)
#np.savetxt('xi_gg_cov_true_jk.npy', cov)
scov = np.loadtxt('xigg_scov_log.npy')
# TODOremoved jackknife
cov+=scov # add the emu covariance
cov+=shot_cov #
np.savetxt('xi_gg_cov_true_all.npy', cov)
#cov = np.diag(np.diag(cov))
#xi_vals = np.log10(np.array(xi_vals))
#y = np.loadtxt('xi_gg_true.npy') #xi_vals.mean(axis = 0) #take one example as our xi. could also use the mean, but lets not cheat.
#cov = np.loadtxt('xi_gg_cov_true.npy')#/np.sqrt(50)
# get cosmo params
del em_params['logMmin']
cpv = cat._get_cosmo_param_names_vals()
cosmo_param_dict = {key: val for key, val in zip(cpv[0], cpv[1])}
#em_params.update( cosmo_param_dict)
fixed_params.update(em_params)
#fixed_params.update(cosmo_param_dict)
em_params = cosmo_param_dict
param_names = [k for k in em_params.iterkeys() if k not in fixed_params]
nwalkers = 10
nsteps = 100
nburn = 0
savedir = '/scratch/users/swmclau2/PearceMCMC/'
#chain_fname = path.join(savedir, '%d_walkers_%d_steps_chain_cosmo_zheng_xi_lowmsat.npy'%(nwalkers, nsteps ))
chain_fname = path.join(savedir, '%d_walkers_%d_steps_chain_cosmo_zheng_xi_jk_meanv2.npy'%(nwalkers, nsteps))
with open(chain_fname, 'w') as f:
f.write('#' + '\t'.join(param_names)+'\n')
print 'starting mcmc'
np.random.seed(0)
for pos in run_mcmc_iterator([emu], param_names, [y], [cov], rpoints, fixed_params = fixed_params,nwalkers = nwalkers,\
nsteps = nsteps, nburn = nburn, ncores = 1):#, resume_from_previous = chain_fname):
with open(chain_fname, 'a') as f:
np.savetxt(f, pos)
|
stefb965/dapp
|
lib/dapp/dimg/dapp/command/stages/cleanup_local.rb
|
<gh_stars>0
module Dapp
module Dimg
module Dapp
module Command
module Stages
module CleanupLocal
def stages_cleanup_local
lock_repo(option_repo, readonly: true) do
raise ::Dapp::Error::Command, code: :stages_cleanup_required_option unless stages_cleanup_option?
proper_cache if proper_cache_version?
stages_cleanup_by_repo if proper_repo_cache?
proper_git_commit if proper_git_commit?
end
end
protected
def proper_cache
log_proper_cache do
lock("#{name}.images") do
remove_project_images begin
dapp_project_dimgstages.select do |image|
!actual_cache_project_dimgstages.map { |dimgstage| dimgstage[:id] }.include?(image[:id])
end
end
end
end
end
def actual_cache_project_dimgstages
@actual_cache_project_images_ids ||= prepare_docker_images("-f \"label=dapp-cache-version=#{::Dapp::BUILD_CACHE_VERSION}\" #{stage_cache}")
end
def stages_cleanup_by_repo
log_proper_repo_cache do
lock("#{name}.images") do
registry = dimg_registry(option_repo)
repo_dimgs = repo_detailed_dimgs_images(registry)
dimgstages = clone_dapp_project_dimgstages
repo_dimgs.each { |repo_dimg| except_image_id_with_parents(repo_dimg[:parent], dimgstages) }
# Удаление только образов старше 2ч
dimgstages.delete_if do |dimgstage|
Time.now - dimgstage[:created_at] < 2 * 60 * 60
end unless ENV['DAPP_STAGES_CLEANUP_LOCAL_DISABLED_DATE_POLICY']
remove_project_images(dimgstages)
end
end
end
def clone_dapp_project_dimgstages
Marshal.load(Marshal.dump(dapp_project_dimgstages))
end
def except_image_id_with_parents(image_id, dimgstages)
return unless (project_image = dapp_project_image_by_id(image_id))
except_dapp_project_image_with_parents(project_image, dimgstages)
end
def except_dapp_project_image_with_parents(image, dimgstages)
dapp_project_image_artifacts_ids_in_labels(image).each { |aiid| except_image_id_with_parents(aiid, dimgstages) }
i = image
loop do
array_hash_delete_if_by_id(dimgstages, i)
break if (i = dapp_project_image_parent(i)).nil?
end
end
def dapp_project_image_artifacts_ids_in_labels(image)
select_dapp_artifacts_ids(dapp_project_image_labels(image))
end
def proper_git_commit
log_proper_git_commit do
lock("#{name}.images") do
unproper_images = []
dapp_project_dimgstages.each do |dimgstage|
dapp_project_image_labels(dimgstage).each do |repo_name, commit|
next if (repo = dapp_git_repositories[repo_name]).nil?
unproper_images.concat(dapp_project_image_with_children(dimgstage)) unless repo.commit_exists?(commit)
end
end
remove_project_images(unproper_images)
end
end
end
def dapp_project_image_with_children(image)
children = []
images = [image]
loop do
children.concat(dapp_project_images.select { |project_image| images.include?(project_image) })
images.map! do |parent_image|
dapp_project_images
.select { |project_image| dapp_project_image_parent(project_image) == parent_image }
end
images.flatten!
break if images.empty?
end
children
end
def dapp_project_image_parent(image)
dapp_project_image_by_id(dapp_project_image_inspect(image)['Parent'])
end
end
end
end
end
end # Dimg
end # Dapp
|
DVSR1966/par4all
|
packages/PIPS/validation/Scilab/COLD-1.0.5.sub/stubs/src/scilab_rt_min_d0i2_d2.c
|
void scilab_rt_min_d0i2_d2(double in0,
int sin10, int sin11, int in1[sin10][sin11],
int sout00, int sout01, double out0[sout00][sout01])
{
int i,j;
double val0=0;
if (in0) {
for (i = 0; i < sin10; ++i) {
for (j = 0; j < sin11; ++j) {
val0 += in1[i][j];
}
}
for (i = 0 ; i < sout00 ; ++i){
for(j = 0 ; j < sout01 ; ++j){
out0[i][j] = val0;;
}
}
}
}
|
pmundt/openPDS
|
openpds/tastypie_mongodb/resources.py
|
<filename>openpds/tastypie_mongodb/resources.py
from bson import ObjectId
from pymongo import MongoClient, ASCENDING, DESCENDING
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.conf import settings
from tastypie.bundle import Bundle
from tastypie.resources import Resource
import pdb
from openpds.core.models import Profile
db = MongoClient(
host=getattr(settings, "MONGODB_HOST", None),
port=getattr(settings, "MONGODB_PORT", None)
)
class Document(dict):
# dictionary-like object for mongodb documents.
__getattr__ = dict.get
class MongoDBResource(Resource):
"""
A base resource that allows to make CRUD operations for mongodb.
"""
def get_collection(self, request):
"""
Encapsulates collection name.
"""
try:
# If no owner is specified in the request, we use the default from settings for now
# moving forward, we'll want to remove this fallback and require that the owner is specified
# from the owner uuid, we're looking up the internal identifier from the corresponding profile
#pdb.set_trace()
database = None
if (request and "datastore_owner__uuid" in request.GET):
profile, created = Profile.objects.get_or_create(uuid = request.GET["datastore_owner__uuid"])
database = profile.getDBName()
return db[database][self._meta.collection] if database is not None else None
except AttributeError:
raise ImproperlyConfigured("Define a collection in your resource.")
def get_filter_object_value(self, parts, value):
'''
Gets object that describes the operation to apply in a filter, as a mongodb filter object.
A simple string value means equality. Compound objects are of the form { operation : value }
For example, "endsin" becomes { "$regex" : "value$" }
Note: this currently only handles filtering on top-level fields, not sub-fields, etc.
'''
# No operator implies equality
if (len(parts) == 1):
return value
op = parts[1]
if (op == "endsin"):
return { "$regex" : value + "$" }
return value
def get_filter_object(self, request):
filter_object = {}
if (request == None):
return filter_object
for var in request.GET:
if (var not in ["datastore_owner__uuid", "format", "bearer_token", "order_by"]):
# Ignoring known required querystring parameters, build the filters
value = request.GET[var]
parts = var.split("__")
name = parts[0]
filter_object[name] = self.get_filter_object_value(parts, value)
return filter_object
def get_order_field_and_direction(self, request):
if (request is None or "order_by" not in request.GET):
return None, None
field_name = request.GET["order_by"]
direction = ASCENDING
if field_name[0] == "-":
field_name = field_name[1:]
direction = DESCENDING
return field_name, direction
def obj_get_list(self, request=None, **kwargs):
"""
Maps mongodb documents to Document class.
"""
filter_object = self.get_filter_object(request)
list = self.get_collection(request).find(filter_object)
order_field, direction = self.get_order_field_and_direction(request)
if (order_field is not None):
list.sort(order_field, direction)
return map(Document, list)
def obj_get(self, request=None, **kwargs):
"""
Returns mongodb document from provided id.
"""
return Document(self.get_collection(request).find_one({
"_id": ObjectId(kwargs.get("pk"))
}))
def obj_create(self, bundle, request = None, **kwargs):
"""
Creates mongodb document from POST data.
"""
#pdb.set_trace()
object_id = self.get_collection(request).insert(bundle.data)
bundle.obj = self.obj_get(request, pk = object_id)
return bundle
def obj_update(self, bundle, request=None, **kwargs):
"""
Updates mongodb document.
"""
self.get_collection(request).update({
"_id": ObjectId(kwargs.get("pk"))
}, {
"$set": bundle.data
})
return bundle
def obj_delete(self, request=None, **kwargs):
"""
Removes single document from collection
"""
self.get_collection(request).remove({ "_id": ObjectId(kwargs.get("pk")) })
def obj_delete_list(self, request=None, **kwargs):
"""
Removes all documents from collection
"""
self.get_collection(request).remove()
def get_resource_uri(self, item):
"""
Returns resource URI for bundle or object.
"""
if isinstance(item, Bundle):
pk = item.obj._id
else:
pk = item._id
return reverse("api_dispatch_detail", kwargs={
"resource_name": self._meta.resource_name,
"api_name": self._meta.api_name,
"pk": pk
})
|
jelmd/snakeyaml
|
src/test/java/org/yaml/snakeyaml/issues/issue137/SupplementaryCharactersTest.java
|
<gh_stars>1-10
/**
* Copyright (c) 2008, SnakeYAML
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml.issues.issue137;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
import org.yaml.snakeyaml.Yaml;
/**
* http://java.sun.com/developer/technicalArticles/Intl/Supplementary/
*/
public class SupplementaryCharactersTest extends TestCase {
public static class EmojiContainer {
public Map<String, Map<String, Integer>> sizes;
public Map<String, Map<String, List<String>>> values;
}
public void testSupplementaryCharacter() {
Yaml yaml = new Yaml();
String parsed = (String) yaml.load("\"\\U0001f648\"");
assertEquals("\ud83d\ude48", parsed);
// System.out.println(data);
}
public void testBasicMultilingualPlane() {
Yaml yaml = new Yaml();
String parsed = (String) yaml.load("\"\\U00000041\"");
assertEquals("A", parsed);
}
/**
* Supplementary code points are dumped normally
*/
public void testDumpSupplementaryCodePoint() throws UnsupportedEncodingException {
String supplementary = "\ud83d\ude48";
Yaml yaml = new Yaml();
String output = yaml.dump(supplementary);
assertEquals("\ud83d\ude48\n", output);
String binString = (String) yaml.load(output);
assertEquals(supplementary, binString);
}
/**
* Non-printable characters are escaped
*/
public void testDumpNonPrintableCharacter() throws UnsupportedEncodingException {
String supplementary = "\u0001";
Yaml yaml = new Yaml();
String output = yaml.dump(supplementary);
assertEquals("!!binary |-\n AQ==\n", output);
byte[] binary = (byte[]) yaml.load(output);
String binString = new String(binary, "UTF-8");
assertEquals(supplementary, binString);
}
public void testDumpSurrogateCharacter() throws UnsupportedEncodingException {
String supplementary = "\ud83d";
Yaml yaml = new Yaml();
try {
yaml.dump(supplementary);
fail("dumping half code point without other half should fail");
} catch (Exception e) {
assertEquals("invalid string value has occurred", e.getMessage());
}
}
public void testLoadSupplementaryCodePoint() {
new Yaml().load("\"\ud83d\ude48\"\n");
}
public void testLoadSurrogateCharacter() {
try {
new Yaml().load("\"\ud83d\"\n");
fail("separate surrogate characters are not printable");
} catch (Exception e) {
assertEquals("special characters are not allowed", e.getMessage());
}
}
/*
* This method tests loading of the document with a lot of
* SupplementaryCharacters. Main purpose is to check that StreamReader
* actually reads document fully, but not in one read (since file is bigger
* than StreamReader buffer).
*/
public void testLoadingEmoji() {
InputStream input = this.getClass().getClassLoader()
.getResourceAsStream("issues/emoji.yaml");
EmojiContainer emoji = new Yaml().loadAs(input, EmojiContainer.class);
assertEquals(emoji.sizes.keySet(), emoji.values.keySet());
for (Map.Entry<String, Map<String, Integer>> mainTopic : emoji.sizes.entrySet()) {
String mainName = mainTopic.getKey();
Map<String, Integer> subtopic2size = mainTopic.getValue();
Map<String, List<String>> subtopic2values = emoji.values.get(mainName);
assertEquals(subtopic2size.keySet(), subtopic2values.keySet());
for (Map.Entry<String, Integer> subTopic : subtopic2size.entrySet()) {
String subName = subTopic.getKey();
assertEquals(subTopic.getValue().intValue(), subtopic2values.get(subName).size());
}
}
}
}
|
chizidegit/douban-movie
|
dao/src/main/java/me/aaron/dao/api/retrofit/EmptyJsonConverterFactory.java
|
package me.aaron.dao.api.retrofit;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
import retrofit2.Converter;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
public class EmptyJsonConverterFactory extends Converter.Factory {
private final GsonConverterFactory mGsonConverterFactory;
public EmptyJsonConverterFactory(GsonConverterFactory gsonConverterFactory) {
mGsonConverterFactory = gsonConverterFactory;
}
@Override
public Converter<?, RequestBody> requestBodyConverter(Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) {
return mGsonConverterFactory.requestBodyConverter(type, parameterAnnotations, methodAnnotations, retrofit);
}
@Override
public Converter<ResponseBody, ?> responseBodyConverter(Type type, Annotation[] annotations, Retrofit retrofit) {
final Converter<ResponseBody, ?> delegateConverter = mGsonConverterFactory.responseBodyConverter(type, annotations, retrofit);
return value -> {
if (value.contentLength() == 0) return null;
return delegateConverter.convert(value);
};
}
}
|
ujoychou/eco
|
net/protocol/ProtocolFamily.h
|
<reponame>ujoychou/eco
#ifndef ECO_NET_PROTOCOL_FAMILY_H
#define ECO_NET_PROTOCOL_FAMILY_H
/*******************************************************************************
@ name
@ function
@ exception
@ remark
--------------------------------------------------------------------------------
@ history ver 1.0 @
@ records: ujoy modifyed on 2016-11-12.
1.create and init this class.
--------------------------------------------------------------------------------
* copyright(c) 2016 - 2019, ujoy, reserved all right.
*******************************************************************************/
#include <eco/net/protocol/Protocol.h>
namespace eco{;
namespace net{;
////////////////////////////////////////////////////////////////////////////////
class ECO_API ProtocolFamily
{
ECO_OBJECT_API(ProtocolFamily);
public:
/*@ register protocol.*/
void add_protocol(IN Protocol*);
/*@ get protocol by it's version.*/
Protocol* protocol(int version) const;
/*@ get protocol of lastest version.*/
Protocol* protocol_latest() const;
public:
inline eco::Result on_decode_head(
MessageHead& head, const char* buff, uint32_t size) const
{
/*@ eco::fail: check the message edge.
1.if message bytes not enough to check, need read more bytes.
--A.version bytes not enough.
--B.size bytes not enough.
2.eco::error:
if message is error message:
--A.category invalid.
--B.get protocol invalid by version.
--C.message is max than max size.
3.if message check edge success, and get message_size.
--A.heartbeat message.
--B.general message.
*/
// #.get message version & category.
Protocol* prot = protocol_latest();
auto res = prot->decode_version(head, buff, size); // (1.A)
if (res != eco::ok) return res;
if (!is_heartbeat(head.m_category) &&
!eco::has(head.m_category, category_message)) // (2.A)
{
ECO_THIS_ERROR(e_message_category)
< "category error: " < head.m_category;
return eco::error;
}
// #.get protocol by head version.
head.m_protocol = protocol(head.m_version);
if (head.m_protocol == nullptr) // (2.B)
{
ECO_THIS_ERROR(e_protocol_invalid)
< "protocol ver error: " < head.m_version;
return eco::error;
}
// message size = head_size + size_size + data_size.
if (!head.m_protocol->decode_size(head, buff, size)) // (1.B)
{
return eco::fail;
}
if (head.message_size() > size) // (1.B)
{
return eco::fail;
}
if (head.message_size() > head.m_protocol->max_size()) // (2.C)
{
ECO_THIS_ERROR(e_message_overszie)
< "message size over max size: "
< head.message_size() < '>' < prot->max_size();
return eco::error;
}
return eco::ok; // (3.A/B)
}
};
////////////////////////////////////////////////////////////////////////////////
}}
#endif
|
Pioneer-Robotics/FtcPioneer-RobotController
|
TeamCode/src/main/java/org/firstinspires/ftc/teamcode/Hardware/Timer.java
|
package org.firstinspires.ftc.teamcode.Hardware;
public class Timer {
//only use this for timer for cleaner look
public void sleep(int milliseconds){
try {
Thread.sleep(milliseconds);
}
catch (InterruptedException e){}
}
}
|
timfel/netbeans
|
java/java.hints/test/unit/data/org/netbeans/test/java/hints/CastOrMethodInvocation58494g.java
|
<reponame>timfel/netbeans
package org.netbeans.test.java.hints;
import org.netbeans.test.java.hints.pkg.CastOrMethodInvocation58494gUsee;
/**
* @author <NAME>
*/
public class CastOrMethodInvocation58494g extends CastOrMethodInvocation58494gUsee {
public CastOrMethodInvocation58494g() {
CastOrMethodInvocation58494gUsee usee;
Object o;
usee.doStuff(o);
}
}
|
badu/http
|
tport/tls_handshake_timeout_error.go
|
<gh_stars>1-10
/*
* Copyright (c) 2018 The Go Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
*/
package tport
func (tlsHandshakeTimeoutError) Timeout() bool { return true }
func (tlsHandshakeTimeoutError) Temporary() bool { return true }
func (tlsHandshakeTimeoutError) Error() string {
return "github.com/badu/http/tport: TLS handshake timeout"
}
|
bitcrystal/edk
|
Sample/Universal/Network/UefiPxeBc/Dxe/PxeBcMtftp.h
|
/*++
Copyright (c) 2007, Intel Corporation
All rights reserved. This program and the accompanying materials
are licensed and made available under the terms and conditions of the BSD License
which accompanies this distribution. The full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
Module Name:
PxeBcMtftp.h
Abstract:
Mtftp routines for PxeBc
--*/
#ifndef __EFI_PXEBC_MTFTP_H__
#define __EFI_PXEBC_MTFTP_H__
enum {
PXE_MTFTP_OPTION_BLKSIZE_INDEX,
PXE_MTFTP_OPTION_TIMEOUT_INDEX,
PXE_MTFTP_OPTION_TSIZE_INDEX,
PXE_MTFTP_OPTION_MULTICAST_INDEX,
PXE_MTFTP_OPTION_MAXIMUM_INDEX
};
EFI_STATUS
PxeBcTftpGetFileSize (
IN PXEBC_PRIVATE_DATA *Private,
IN EFI_MTFTP4_CONFIG_DATA *Config,
IN UINT8 *Filename,
IN UINTN *BlockSize,
IN OUT UINT64 *BufferSize
)
/*++
Routine Description:
This function is to get size of a file by Tftp.
Arguments:
Private - Pointer to PxeBc private data
Config - Pointer to Mtftp configuration data
Filename - Pointer to file name
BlockSize - Pointer to block size
BufferSize - Pointer to buffer size
Returns:
EFI_SUCCESS
EFI_NOT_FOUND
EFI_DEVICE_ERROR
--*/
;
EFI_STATUS
PxeBcTftpReadFile (
IN PXEBC_PRIVATE_DATA *Private,
IN EFI_MTFTP4_CONFIG_DATA *Config,
IN UINT8 *Filename,
IN UINTN *BlockSize,
IN UINT8 *BufferPtr,
IN OUT UINT64 *BufferSize,
IN BOOLEAN DontUseBuffer
)
/*++
Routine Description:
This function is to get data of a file by Tftp.
Arguments:
Private - Pointer to PxeBc private data
Config - Pointer to Mtftp configuration data
Filename - Pointer to file name
BlockSize - Pointer to block size
BufferPtr - Pointer to buffer
BufferSize - Pointer to buffer size
DontUseBuffer - Indicate whether with a receive buffer
Returns:
EFI_SUCCESS
EFI_DEVICE_ERROR
--*/
;
EFI_STATUS
PxeBcTftpWriteFile (
IN PXEBC_PRIVATE_DATA *Private,
IN EFI_MTFTP4_CONFIG_DATA *Config,
IN UINT8 *Filename,
IN BOOLEAN Overwrite,
IN UINTN *BlockSize,
IN UINT8 *BufferPtr,
IN OUT UINT64 *BufferSize
)
/*++
Routine Description:
This function is put data of a file by Tftp.
Arguments:
Private - Pointer to PxeBc private data
Config - Pointer to Mtftp configuration data
Filename - Pointer to file name
Overwrite - Indicate whether with overwrite attribute
BlockSize - Pointer to block size
BufferPtr - Pointer to buffer
BufferSize - Pointer to buffer size
Returns:
EFI_SUCCESS
EFI_DEVICE_ERROR
--*/
;
EFI_STATUS
PxeBcTftpReadDirectory (
IN PXEBC_PRIVATE_DATA *Private,
IN EFI_MTFTP4_CONFIG_DATA *Config,
IN UINT8 *Filename,
IN UINTN *BlockSize,
IN UINT8 *BufferPtr,
IN OUT UINT64 *BufferSize,
IN BOOLEAN DontUseBuffer
)
/*++
Routine Description:
This function is to get data of a directory by Tftp.
Arguments:
Private - Pointer to PxeBc private data
Config - Pointer to Mtftp configuration data
Filename - Pointer to file name
BlockSize - Pointer to block size
BufferPtr - Pointer to buffer
BufferSize - Pointer to buffer size
DontUseBuffer - Indicate whether with a receive buffer
Returns:
EFI_SUCCES
EFI_DEVICE_ERROR
--*/
;
#endif
|
timperrett/lift-in-action
|
chapter-9/src/main/scala/sample/comet/RockPaperScissors.scala
|
package sample.comet
import scala.xml.Text
import scala.collection.mutable.Map
import net.liftweb.common.{Box,Full,Empty}
import net.liftweb.actor.LiftActor
import net.liftweb.util.Schedule
import net.liftweb.util.Helpers._
import net.liftweb.http.{CometActor,SHtml}
import net.liftweb.http.js.JsCmds.{SetHtml,Run}
sealed trait Move
final case object Rock extends Move
final case object Paper extends Move
final case object Scissors extends Move
sealed trait Outcome
final case object Tie
final case class Winner(is: CometActor)
final case class AddPlayer(who: CometActor)
final case class RemovePlayer(who: CometActor)
final case object PairPlayersInLobby
final case class NowPlaying(game: Game)
final case class Make(move: Move, from: CometActor)
final case object HurryUpAndMakeYourMove
final case object ResetGame
final case object LeaveGame
final case object Adjudicate
object Lobby extends LiftActor {
private var games: List[Game] = Nil
private var lobby: List[CometActor] = Nil
def messageHandler = {
case PairPlayersInLobby => {
for(i <- 0 until (lobby.size / 2)){
val players = lobby.take(2)
val game = new Game(players.head, players.last)
games ::= game
players.foreach(_ ! NowPlaying(game))
lobby = lobby diff players
}
}
case AddPlayer(who) =>
lobby ::= who
this ! PairPlayersInLobby
case RemovePlayer(who) =>
lobby = lobby.filter(_ ne who)
}
}
class Game(playerOne: CometActor, playerTwo: CometActor) extends LiftActor {
private var moves: Map[CometActor, Box[Move]] = Map()
clearMoves()
private def sendToAllPlayers(msg: Any){
moves.foreach(_._1 ! msg)
}
private def clearMoves() {
moves = Map(playerOne -> Empty, playerTwo -> Empty)
}
def messageHandler = {
case Adjudicate => {
val p1move = moves(playerOne)
val p2move = moves(playerTwo)
if(p1move == p2move)
sendToAllPlayers(Tie)
else {
(p1move, p2move) match {
case (Full(Rock), Full(Scissors)) |
(Full(Paper), Full(Rock)) |
(Full(Scissors), Full(Paper)) =>
sendToAllPlayers(Winner(playerOne))
case _ =>
// playerOne didnt win, and its not a tie, so playerTwo must have won
sendToAllPlayers(Winner(playerTwo))
}
}
Schedule.schedule(this, ResetGame, 5 seconds)
}
case Make(move, from) => {
moves.update(from,Full(move))
if(moves.flatMap(_._2).size == 2){
this ! Adjudicate
} else {
// one of the players hasnt made their move,
// prompt the other one to do something
moves.filter(_._1 ne from).head._1 ! HurryUpAndMakeYourMove
}
}
case ResetGame =>
clearMoves()
sendToAllPlayers(ResetGame)
case LeaveGame =>
// one player left, you cant play on your own so
// both players are sent back to the lobby
}
}
class RockPaperScissors extends CometActor {
private var nickName = ""
private var game: Box[Game] = Empty
private def showInformation(msg: String) =
partialUpdate(SetHtml("information", Text(msg)))
override def mediumPriority = {
case NowPlaying(g) =>
game = Full(g)
reRender(true)
case HurryUpAndMakeYourMove =>
showInformation("Hurry up! Your opponent has already made their move!")
case Tie =>
showInformation("Damn, it was a tie!")
case Winner(who) =>
if(who eq this)
showInformation("You are the WINNER!!!")
else
showInformation("Better luck next time, loser!")
case ResetGame =>
reRender(true)
}
def render =
if(!game.isEmpty)
"#information *" #> "Now you're playing! Make your move..." &
".line" #> List(Rock, Paper, Scissors).map(move =>
SHtml.ajaxButton(Text(move.toString), () => {
game.foreach(_ ! Make(move, this))
Run("$('button').attr('disabled',true);")
}))
else
"#game *" #> "Waiting in the lobby for an opponent..."
override def lifespan: Box[TimeSpan] = Full(2 minutes)
override def localSetup(){
askUserForNickname
super.localSetup()
}
override def localShutdown() {
Lobby ! RemovePlayer(this)
super.localShutdown()
}
private def askUserForNickname {
if (nickName.length == 0){
ask(new AskName, "What's your nickname?"){
case s: String if (s.trim.length > 2) =>
nickName = s.trim
Lobby ! AddPlayer(this)
reRender(true)
case _ =>
askUserForNickname
reRender(false)
}
}
}
}
class AskName extends CometActor {
def render = SHtml.ajaxForm(
<p>What is your player nickname? <br />{
SHtml.text("",n => answer(n.trim))}</p> ++
<input type="submit" value="Enter Lobby"/>)
}
|
teeraporn39/google-cloud-java
|
google-api-grpc/proto-google-cloud-os-login-v1/src/main/java/com/google/cloud/oslogin/v1/DeleteSshPublicKeyRequestOrBuilder.java
|
<reponame>teeraporn39/google-cloud-java<filename>google-api-grpc/proto-google-cloud-os-login-v1/src/main/java/com/google/cloud/oslogin/v1/DeleteSshPublicKeyRequestOrBuilder.java
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/oslogin/v1/oslogin.proto
package com.google.cloud.oslogin.v1;
public interface DeleteSshPublicKeyRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.oslogin.v1.DeleteSshPublicKeyRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The fingerprint of the public key to update. Public keys are identified by
* their SHA-256 fingerprint. The fingerprint of the public key is in format
* `users/{user}/sshPublicKeys/{fingerprint}`.
* </pre>
*
* <code>string name = 1;</code>
*/
java.lang.String getName();
/**
*
*
* <pre>
* The fingerprint of the public key to update. Public keys are identified by
* their SHA-256 fingerprint. The fingerprint of the public key is in format
* `users/{user}/sshPublicKeys/{fingerprint}`.
* </pre>
*
* <code>string name = 1;</code>
*/
com.google.protobuf.ByteString getNameBytes();
}
|
afbeals/CBRA
|
src/modules/error/__tests__/actions.testPartial.js
|
// External
import { assert } from 'chai';
// Local
import actions from '../actions';
import actionTypes from '../actionTypes';
// Constants
const { CREATE_ERROR, CLEAR_ERROR } = actionTypes;
const errorActionsTest = () =>
describe('Actions', () => {
it('Should create an error', () => {
const keyValue = 'pokedexDexError';
const clientErr = 'error fetching results';
const devErr = 'server error';
assert.deepEqual(
actions.createStoreError({ keyValue, clientErr, devErr }),
{
type: CREATE_ERROR,
payload: { keyValue, clientErr, devErr },
},
);
});
it('Should clear an error from the store', () => {
const errorKey = 'pokedexDexError';
assert.deepEqual(actions.clearStoreError(errorKey), {
type: CLEAR_ERROR,
payload: errorKey,
});
});
});
export default errorActionsTest;
|
teddywest32/intellij-community
|
python/educational-core/course-creator/src/com/jetbrains/edu/coursecreator/projectView/CCTreeStructureProvider.java
|
<gh_stars>0
package com.jetbrains.edu.coursecreator.projectView;
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.projectView.impl.nodes.PsiFileNode;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import com.jetbrains.edu.coursecreator.CCUtils;
import com.jetbrains.edu.learning.StudyUtils;
import com.jetbrains.edu.learning.projectView.StudyTreeStructureProvider;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
public class CCTreeStructureProvider extends StudyTreeStructureProvider {
@NotNull
@Override
public Collection<AbstractTreeNode> modify(@NotNull AbstractTreeNode parent,
@NotNull Collection<AbstractTreeNode> children,
ViewSettings settings) {
if (!needModify(parent)) {
return children;
}
Collection<AbstractTreeNode> modifiedChildren = super.modify(parent, children, settings);
for (AbstractTreeNode node : children) {
Project project = node.getProject();
if (project == null) {
continue;
}
if (node.getValue() instanceof PsiDirectory) {
String name = ((PsiDirectory)node.getValue()).getName();
if ("zip".equals(FileUtilRt.getExtension(name))) {
modifiedChildren.add(node);
continue;
}
}
if (node instanceof PsiFileNode) {
PsiFileNode fileNode = (PsiFileNode)node;
VirtualFile virtualFile = fileNode.getVirtualFile();
if (virtualFile == null) {
continue;
}
if (StudyUtils.getTaskFile(project, virtualFile) != null || StudyUtils.isTaskDescriptionFile(virtualFile.getName())) {
continue;
}
PsiFile psiFile = ((PsiFileNode)node).getValue();
modifiedChildren.add(new CCStudentInvisibleFileNode(project, psiFile, settings));
}
}
return modifiedChildren;
}
protected boolean needModify(@NotNull final AbstractTreeNode parent) {
Project project = parent.getProject();
if (project == null) {
return false;
}
return CCUtils.isCourseCreator(project);
}
}
|
ogunes-ebi/impc-production-tracker
|
impc_prod_tracker/core/src/main/java/org/gentar/organization/person/associations/PersonRoleWorkUnitRepository.java
|
<reponame>ogunes-ebi/impc-production-tracker
package org.gentar.organization.person.associations;
import org.gentar.organization.person.associations.PersonRoleWorkUnit;
import org.springframework.data.repository.CrudRepository;
public interface PersonRoleWorkUnitRepository extends CrudRepository<PersonRoleWorkUnit, Long> {
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.