text stringlengths 1 1.05M |
|---|
import nltk
from nltk import word_tokenize, pos_tag, ne_chunk
# Tokenzie and Part-of-Speech (POS) Tagging
sent = "John and Mary are travelling to Bangkok."
tokens = word_tokenize(sent)
pos_tags = pos_tag(tokens)
# Named Entity Recognition (NER)
tree = ne_chunk(pos_tags)
# Identify names by traversing the tree
names = []
for subtree in tree.subtrees():
if subtree.label() == 'PERSON':
names.append(' '.join([ word for word, tag in subtree.leaves()]))
# Check the results
print('Identified Names: ', names) # ["John", "Mary"]
# Train the model
# ... |
#!/bin/bash
# missing command
http -f POST 127.0.0.1:5000/cloud_api hello=world
# invalid command
http -f POST 127.0.0.1:5000/cloud_api command=hello
# argument missed
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true
# invalid image name
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-desktop-x86 flavor=m1.small internet=true name=test
# invalid flavor
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.miao internet=true name=test
# invalid internet setting
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=miao name=test
# I haven't thought out an elegant way to test whether we can deal with invalid cloud configurations
# you need to manually modify the code in cloud_api to do this
# for example, change the network_name to a invalid one
# and see whether we catch it
# launch up to 6 instances and see whether we can launch it
# if it fails, is the failed instance removed properly?
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true name=test
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true name=test
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true name=test
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true name=test
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true name=test
http -f POST 127.0.0.1:5000/cloud_api command=launch_instance image=xubuntu-14.04-desktop-x86 flavor=m1.small internet=true name=test
# log into your dashboard and do some necessary cleaning
|
/*
* Copyright 2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package math.rsvd;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import net.jamu.matrix.Matrices;
import net.jamu.matrix.MatrixD;
import net.jamu.matrix.SvdD;
import old.math.rsvd.ApproximateBasis2;
public class ApproximateBasis2Test {
private static final double TOLERANCE = 1.0e-8;
@Test
public void testNaturalNumbersTall() {
int m = 220;
int n = 150;
int estimatedRank = 2;
MatrixD A = Matrices.naturalNumbersD(m, n);
MatrixD Q = getQ(A, estimatedRank);
MatrixD B = checkApproximation(Q, A, TOLERANCE);
SVD svd = createSVD(B, A, Q, estimatedRank);
checkSVD(svd, A, TOLERANCE);
}
@Test
public void testNaturalNumbersWide() {
int m = 150;
int n = 220;
int estimatedRank = 2;
MatrixD A = Matrices.naturalNumbersD(m, n);
MatrixD Q = getQ(A, estimatedRank);
MatrixD B = checkApproximation(Q, A, TOLERANCE);
SVD svd = createSVD(B, A, Q, estimatedRank);
checkSVD(svd, A, TOLERANCE);
}
@Test
public void testRandomNormalTall() {
int m = 220;
int n = 150;
int estimatedRank = Math.min(m, n);
MatrixD A = Matrices.randomNormalD(m, n);
MatrixD Q = getQ(A, estimatedRank);
MatrixD B = checkApproximation(Q, A, TOLERANCE);
SVD svd = createSVD(B, A, Q, estimatedRank);
checkSVD(svd, A, TOLERANCE);
}
@Test
public void testRandomNormalWide() {
int m = 150;
int n = 220;
int estimatedRank = Math.min(m, n);
MatrixD A = Matrices.randomNormalD(m, n);
MatrixD Q = getQ(A, estimatedRank);
MatrixD B = checkApproximation(Q, A, TOLERANCE);
SVD svd = createSVD(B, A, Q, estimatedRank);
checkSVD(svd, A, TOLERANCE);
}
@Test
public void testRandomUniformTall() {
int m = 220;
int n = 150;
int estimatedRank = Math.min(m, n);
MatrixD A = Matrices.randomUniformD(m, n);
MatrixD Q = getQ(A, estimatedRank);
MatrixD B = checkApproximation(Q, A, TOLERANCE);
SVD svd = createSVD(B, A, Q, estimatedRank);
checkSVD(svd, A, TOLERANCE);
}
@Test
public void testRandomUniformWide() {
int m = 150;
int n = 220;
int estimatedRank = Math.min(m, n);
MatrixD A = Matrices.randomUniformD(m, n);
MatrixD Q = getQ(A, estimatedRank);
MatrixD B = checkApproximation(Q, A, TOLERANCE);
SVD svd = createSVD(B, A, Q, estimatedRank);
checkSVD(svd, A, TOLERANCE);
}
private MatrixD checkApproximation(MatrixD Q, MatrixD A, double tolerance) {
boolean transpose = A.numRows() < A.numColumns();
MatrixD QT = Q.transpose();
MatrixD QQT = Q.times(QT);
if (QQT.numColumns() < A.numRows()) {
MatrixD I = Matrices.identityD(A.numRows());
QQT = I.setSubmatrixInplace(0, 0, QQT, 0, 0, QQT.endRow(), QQT.endCol());
}
MatrixD A_approx = QQT.times(A);
boolean equal = Matrices.approxEqual(A_approx, A, tolerance);
assertTrue("A_approx and A should be approximately equal", equal);
if (transpose) {
return QT.times(A);
}
return A.times(Q);
}
private SVD createSVD(MatrixD B, MatrixD A_expected, MatrixD Q, int estimatedRank) {
boolean transpose = A_expected.numRows() < A_expected.numColumns();
SvdD svd = B.svdEcon();
MatrixD U_tilde = svd.getU();
double[] sigma = svd.getS();
MatrixD Vt = svd.getVt();
MatrixD U = null;
if (transpose) {
U = Q.times(U_tilde);
} else {
U = U_tilde;
Vt = Vt.times(Q.transpose());
}
if (U.numColumns() > estimatedRank) {
U = U.selectConsecutiveColumns(0, estimatedRank - 1);
}
if (U.numRows() > A_expected.numRows()) {
U = U.selectSubmatrix(0, 0, A_expected.numRows() - 1, U.endCol());
}
if (Vt.numRows() > estimatedRank) {
Vt = Vt.selectSubmatrix(0, 0, estimatedRank - 1, Vt.endCol());
}
if (Vt.numColumns() > A_expected.numColumns()) {
Vt = Vt.selectConsecutiveColumns(0, A_expected.numColumns() - 1);
}
MatrixD S = Matrices.diagD(U.numColumns(), Vt.numRows(), sigma);
return new SVD(U, S, Vt);
}
private void checkSVD(SVD svd, MatrixD A_expected, double tolerance) {
MatrixD U = svd.U;
MatrixD S = svd.S;
MatrixD Vt = svd.Vt;
MatrixD A_approx = U.timesTimes(S, Vt);
System.out.println("A_approx: " + A_approx.numRows() + "x" + A_approx.numColumns());
boolean equal = Matrices.approxEqual(A_approx, A_expected, tolerance);
System.out.println(equal ? "EQUAL" : "NOT EQUAL");
System.out.println("***");
assertTrue("A and reconstruction of A should be approximately equal", equal);
}
private MatrixD getQ(MatrixD A, int estimatedRank) {
return new ApproximateBasis2(A, estimatedRank).computeQ();
}
}
|
/*************************************************************************************************/
/*!
* \file hci_api.h
*
* \brief HCI subsystem API.
*
* $Date: 2016-04-05 14:14:53 -0700 (Tue, 05 Apr 2016) $
* $Revision: 6646 $
*
* Copyright (c) 2009 Wicentric, Inc., all rights reserved.
* Wicentric confidential and proprietary.
*
* IMPORTANT. Your use of this file is governed by a Software License Agreement
* ("Agreement") that must be accepted in order to download or otherwise receive a
* copy of this file. You may not use or copy this file for any purpose other than
* as described in the Agreement. If you do not agree to all of the terms of the
* Agreement do not use this file and delete all copies in your possession or control;
* if you do not have a copy of the Agreement, you must contact Wicentric, Inc. prior
* to any use, copying or further distribution of this software.
*/
/*************************************************************************************************/
#ifndef HCI_API_H
#define HCI_API_H
#include "wsf_types.h"
#include "hci_defs.h"
#include "wsf_os.h"
#include "bda.h"
#ifdef __cplusplus
extern "C" {
#endif
/**************************************************************************************************
Macros
**************************************************************************************************/
/*! Internal event values for the HCI event and sec callbacks */
#define HCI_RESET_SEQ_CMPL_CBACK_EVT 0 /*! Reset sequence complete */
#define HCI_LE_CONN_CMPL_CBACK_EVT 1 /*! LE connection complete */
#define HCI_LE_ENHANCED_CONN_CMPL_CBACK_EVT 2 /*! LE enhanced connection complete */
#define HCI_DISCONNECT_CMPL_CBACK_EVT 3 /*! LE disconnect complete */
#define HCI_LE_CONN_UPDATE_CMPL_CBACK_EVT 4 /*! LE connection update complete */
#define HCI_LE_CREATE_CONN_CANCEL_CMD_CMPL_CBACK_EVT 5 /*! LE create connection cancel command complete */
#define HCI_LE_ADV_REPORT_CBACK_EVT 6 /*! LE advertising report */
#define HCI_READ_RSSI_CMD_CMPL_CBACK_EVT 7 /*! Read RSSI command complete */
#define HCI_LE_READ_CHAN_MAP_CMD_CMPL_CBACK_EVT 8 /*! LE Read channel map command complete */
#define HCI_READ_TX_PWR_LVL_CMD_CMPL_CBACK_EVT 9 /*! Read transmit power level command complete */
#define HCI_READ_REMOTE_VER_INFO_CMPL_CBACK_EVT 10 /*! Read remote version information complete */
#define HCI_LE_READ_REMOTE_FEAT_CMPL_CBACK_EVT 11 /*! LE read remote features complete */
#define HCI_LE_LTK_REQ_REPL_CMD_CMPL_CBACK_EVT 12 /*! LE LTK request reply command complete */
#define HCI_LE_LTK_REQ_NEG_REPL_CMD_CMPL_CBACK_EVT 13 /*! LE LTK request negative reply command complete */
#define HCI_ENC_KEY_REFRESH_CMPL_CBACK_EVT 14 /*! Encryption key refresh complete */
#define HCI_ENC_CHANGE_CBACK_EVT 15 /*! Encryption change */
#define HCI_LE_LTK_REQ_CBACK_EVT 16 /*! LE LTK request */
#define HCI_VENDOR_SPEC_CMD_STATUS_CBACK_EVT 17 /*! Vendor specific command status */
#define HCI_VENDOR_SPEC_CMD_CMPL_CBACK_EVT 18 /*! Vendor specific command complete */
#define HCI_VENDOR_SPEC_CBACK_EVT 19 /*! Vendor specific */
#define HCI_HW_ERROR_CBACK_EVT 20 /*! Hardware error */
#define HCI_LE_ADD_DEV_TO_RES_LIST_CMD_CMPL_CBACK_EVT 21 /*! LE add device to resolving list command complete */
#define HCI_LE_REM_DEV_FROM_RES_LIST_CMD_CMPL_CBACK_EVT 22 /*! LE remove device from resolving command complete */
#define HCI_LE_CLEAR_RES_LIST_CMD_CMPL_CBACK_EVT 23 /*! LE clear resolving list command complete */
#define HCI_LE_READ_PEER_RES_ADDR_CMD_CMPL_CBACK_EVT 24 /*! LE read peer resolving address command complete */
#define HCI_LE_READ_LOCAL_RES_ADDR_CMD_CMPL_CBACK_EVT 25 /*! LE read local resolving address command complete */
#define HCI_LE_SET_ADDR_RES_ENABLE_CMD_CMPL_CBACK_EVT 26 /*! LE set address resolving enable command complete */
#define HCI_LE_ENCRYPT_CMD_CMPL_CBACK_EVT 27 /*! LE encrypt command complete */
#define HCI_LE_RAND_CMD_CMPL_CBACK_EVT 28 /*! LE rand command complete */
#define HCI_LE_REM_CONN_PARAM_REP_CMD_CMPL_CBACK_EVT 29 /*! LE remote connection parameter request reply complete */
#define HCI_LE_REM_CONN_PARAM_NEG_REP_CMD_CMPL_CBACK_EVT 30 /*! LE remote connection parameter request negative reply complete */
#define HCI_LE_READ_DEF_DATA_LEN_CMD_CMPL_CBACK_EVT 31 /*! LE read suggested default data length command complete */
#define HCI_LE_WRITE_DEF_DATA_LEN_CMD_CMPL_CBACK_EVT 32 /*! LE write suggested default data length command complete */
#define HCI_LE_SET_DATA_LEN_CMD_CMPL_CBACK_EVT 33 /*! LE set data length command complete */
#define HCI_LE_READ_MAX_DATA_LEN_CMD_CMPL_CBACK_EVT 34 /*! LE read maximum data length command complete */
#define HCI_LE_REM_CONN_PARAM_REQ_CBACK_EVT 35 /*! LE remote connection parameter request */
#define HCI_LE_DATA_LEN_CHANGE_CBACK_EVT 36 /*! LE data length change */
#define HCI_LE_READ_LOCAL_P256_PUB_KEY_CMPL_CBACK_EVT 37 /*! LE read local P-256 public key */
#define HCI_LE_GENERATE_DHKEY_CMPL_CBACK_EVT 38 /*! LE generate DHKey complete */
#define HCI_WRITE_AUTH_PAYLOAD_TO_CMD_CMPL_CBACK_EVT 39 /*! Write authenticated payload timeout command complete */
#define HCI_AUTH_PAYLOAD_TO_EXPIRED_CBACK_EVT 40 /*! Authenticated payload timeout expired event */
/**************************************************************************************************
Data Types
**************************************************************************************************/
/*! Connection specification type */
typedef struct
{
uint16_t connIntervalMin;
uint16_t connIntervalMax;
uint16_t connLatency;
uint16_t supTimeout;
uint16_t minCeLen;
uint16_t maxCeLen;
} hciConnSpec_t;
/*! LE connection complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint8_t role;
uint8_t addrType;
bdAddr_t peerAddr;
uint16_t connInterval;
uint16_t connLatency;
uint16_t supTimeout;
uint8_t clockAccuracy;
/* enhanced fields */
bdAddr_t localRpa;
bdAddr_t peerRpa;
} hciLeConnCmplEvt_t;
/*! Disconnect complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint8_t reason;
} hciDisconnectCmplEvt_t;
/*! LE connection update complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint16_t connInterval;
uint16_t connLatency;
uint16_t supTimeout;
} hciLeConnUpdateCmplEvt_t;
/*! LE create connection cancel command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
} hciLeCreateConnCancelCmdCmplEvt_t;
/*! LE advertising report event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t *pData;
uint8_t len;
int8_t rssi;
uint8_t eventType;
uint8_t addrType;
bdAddr_t addr;
/* direct fields */
uint8_t directAddrType;
bdAddr_t directAddr;
} hciLeAdvReportEvt_t;
/*! Read RSSI command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
int8_t rssi;
} hciReadRssiCmdCmplEvt_t;
/*! LE Read channel map command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint8_t chanMap[HCI_CHAN_MAP_LEN];
} hciReadChanMapCmdCmplEvt_t;
/*! Read transmit power level command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t handle;
int8_t pwrLvl;
} hciReadTxPwrLvlCmdCmplEvt_t;
/*! Read remote version information complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint8_t version;
uint16_t mfrName;
uint16_t subversion;
} hciReadRemoteVerInfoCmplEvt_t;
/*! LE read remote features complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint8_t features[HCI_FEAT_LEN];
} hciLeReadRemoteFeatCmplEvt_t;
/*! LE LTK request reply command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciLeLtkReqReplCmdCmplEvt_t;
/*! LE LTK request negative reply command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciLeLtkReqNegReplCmdCmplEvt_t;
/*! Encryption key refresh complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciEncKeyRefreshCmpl_t;
/*! Encryption change event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
uint8_t enabled;
} hciEncChangeEvt_t;
/*! LE LTK request event */
typedef struct
{
wsfMsgHdr_t hdr;
uint16_t handle;
uint8_t randNum[HCI_RAND_LEN];
uint16_t encDiversifier;
} hciLeLtkReqEvt_t;
/*! Vendor specific command status event */
typedef struct
{
wsfMsgHdr_t hdr;
uint16_t opcode;
} hciVendorSpecCmdStatusEvt_t;
/*! Vendor specific command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint16_t opcode;
uint8_t param[1];
} hciVendorSpecCmdCmplEvt_t;
/*! Vendor specific event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t param[1];
} hciVendorSpecEvt_t;
/*! Hardware error event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t code;
} hciHwErrorEvt_t;
/*! LE encrypt command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t data[HCI_ENCRYPT_DATA_LEN];
} hciLeEncryptCmdCmplEvt_t;
/*! LE rand command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t randNum[HCI_RAND_LEN];
} hciLeRandCmdCmplEvt_t;
/*! LE remote connection parameter request reply command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciLeRemConnParamRepEvt_t;
/*! LE remote connection parameter request negative reply command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciLeRemConnParamNegRepEvt_t;
/*! LE read suggested default data len command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t suggestedMaxTxOctets;
uint16_t suggestedMaxTxTime;
} hciLeReadDefDataLenEvt_t;
/*! LE write suggested default data len command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
} hciLeWriteDefDataLenEvt_t;
/*! LE set data len command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciLeSetDataLenEvt_t;
/*! LE read maximum data len command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t supportedMaxTxOctets;
uint16_t supportedMaxTxTime;
uint16_t supportedMaxRxOctets;
uint16_t supportedMaxRxTime;
} hciLeReadMaxDataLenEvt_t;
/*! LE remote connetion parameter request event */
typedef struct
{
wsfMsgHdr_t hdr;
uint16_t handle;
uint16_t intervalMin;
uint16_t intervalMax;
uint16_t latency;
uint16_t timeout;
} hciLeRemConnParamReqEvt_t;
/*! LE data length change event */
typedef struct
{
wsfMsgHdr_t hdr;
uint16_t handle;
uint16_t maxTxOctets;
uint16_t maxTxTime;
uint16_t maxRxOctets;
uint16_t maxRxTime;
} hciLeDataLenChangeEvt_t;
/*! LE local p256 ecc key command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t key[HCI_P256_KEY_LEN];
} hciLeP256CmplEvt_t;
/*! LE generate DH key command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t key[HCI_DH_KEY_LEN];
} hciLeGenDhKeyEvt_t;
/*! LE read peer resolving address command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t peerRpa[BDA_ADDR_LEN];
} hciLeReadPeerResAddrCmdCmplEvt_t;
/*! LE read local resolving address command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint8_t localRpa[BDA_ADDR_LEN];
} hciLeReadLocalResAddrCmdCmplEvt_t;
/*! LE set address resolving enable command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
} hciLeSetAddrResEnableCmdCmplEvt_t;
/*! LE add device to resolving list command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
} hciLeAddDevToResListCmdCmplEvt_t;
/*! LE remove device from resolving list command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
} hciLeRemDevFromResListCmdCmplEvt_t;
/*! LE clear resolving list command complete event */
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
} hciLeClearResListCmdCmplEvt_t;
typedef struct
{
wsfMsgHdr_t hdr;
uint8_t status;
uint16_t handle;
} hciWriteAuthPayloadToCmdCmplEvt_t;
typedef struct
{
wsfMsgHdr_t hdr;
uint16_t handle;
} hciAuthPayloadToExpiredEvt_t;
/*! Union of all event types */
typedef union
{
wsfMsgHdr_t hdr;
wsfMsgHdr_t resetSeqCmpl;
hciLeConnCmplEvt_t leConnCmpl;
hciDisconnectCmplEvt_t disconnectCmpl;
hciLeConnUpdateCmplEvt_t leConnUpdateCmpl;
hciLeCreateConnCancelCmdCmplEvt_t leCreateConnCancelCmdCmpl;
hciLeAdvReportEvt_t leAdvReport;
hciReadRssiCmdCmplEvt_t readRssiCmdCmpl;
hciReadChanMapCmdCmplEvt_t readChanMapCmdCmpl;
hciReadTxPwrLvlCmdCmplEvt_t readTxPwrLvlCmdCmpl;
hciReadRemoteVerInfoCmplEvt_t readRemoteVerInfoCmpl;
hciLeReadRemoteFeatCmplEvt_t leReadRemoteFeatCmpl;
hciLeLtkReqReplCmdCmplEvt_t leLtkReqReplCmdCmpl;
hciLeLtkReqNegReplCmdCmplEvt_t leLtkReqNegReplCmdCmpl;
hciEncKeyRefreshCmpl_t encKeyRefreshCmpl;
hciEncChangeEvt_t encChange;
hciLeLtkReqEvt_t leLtkReq;
hciVendorSpecCmdStatusEvt_t vendorSpecCmdStatus;
hciVendorSpecCmdCmplEvt_t vendorSpecCmdCmpl;
hciVendorSpecEvt_t vendorSpec;
hciHwErrorEvt_t hwError;
hciLeEncryptCmdCmplEvt_t leEncryptCmdCmpl;
hciLeRandCmdCmplEvt_t leRandCmdCmpl;
hciLeReadPeerResAddrCmdCmplEvt_t leReadPeerResAddrCmdCmpl;
hciLeReadLocalResAddrCmdCmplEvt_t leReadLocalResAddrCmdCmpl;
hciLeSetAddrResEnableCmdCmplEvt_t leSetAddrResEnableCmdCmpl;
hciLeAddDevToResListCmdCmplEvt_t leAddDevToResListCmdCmpl;
hciLeRemDevFromResListCmdCmplEvt_t leRemDevFromResListCmdCmpl;
hciLeClearResListCmdCmplEvt_t leClearResListCmdCmpl;
hciLeRemConnParamRepEvt_t leRemConnParamRepCmdCmpl;
hciLeRemConnParamNegRepEvt_t leRemConnParamNegRepCmdCmpl;
hciLeReadDefDataLenEvt_t leReadDefDataLenCmdCmpl;
hciLeWriteDefDataLenEvt_t leWriteDefDataLenCmdCmpl;
hciLeSetDataLenEvt_t leSetDataLenCmdCmpl;
hciLeReadMaxDataLenEvt_t leReadMaxDataLenCmdCmpl;
hciLeRemConnParamReqEvt_t leRemConnParamReq;
hciLeDataLenChangeEvt_t leDataLenChange;
hciLeP256CmplEvt_t leP256;
hciLeGenDhKeyEvt_t leGenDHKey;
hciWriteAuthPayloadToCmdCmplEvt_t writeAuthPayloadToCmdCmpl;
hciAuthPayloadToExpiredEvt_t authPayloadToExpired;
} hciEvt_t;
/**************************************************************************************************
Callback Function Types
**************************************************************************************************/
typedef void (*hciEvtCback_t)(hciEvt_t *pEvent);
typedef void (*hciSecCback_t)(hciEvt_t *pEvent);
typedef void (*hciAclCback_t)(uint8_t *pData);
typedef void (*hciFlowCback_t)(uint16_t handle, bool_t flowDisabled);
/**************************************************************************************************
Function Declarations
**************************************************************************************************/
/*! Initialization, registration, and reset */
void HciEvtRegister(hciEvtCback_t evtCback);
void HciSecRegister(hciSecCback_t secCback);
void HciAclRegister(hciAclCback_t aclCback, hciFlowCback_t flowCback);
void HciResetSequence(void);
void HciVsInit(uint8_t param);
void HciCoreInit(void);
void HciCoreHandler(wsfEventMask_t event, wsfMsgHdr_t *pMsg);
void HciSetMaxRxAclLen(uint16_t len);
void HciSetAclQueueWatermarks(uint8_t queueHi, uint8_t queueLo);
void HciSetLeSupFeat(uint8_t feat, bool_t flag);
/*! Optimization interface */
uint8_t *HciGetBdAddr(void);
uint8_t HciGetWhiteListSize(void);
int8_t HciGetAdvTxPwr(void);
uint16_t HciGetBufSize(void);
uint8_t HciGetNumBufs(void);
uint8_t *HciGetSupStates(void);
uint8_t HciGetLeSupFeat(void);
uint16_t HciGetMaxRxAclLen(void);
uint8_t HciGetResolvingListSize(void);
bool_t HciLlPrivacySupported(void);
/*! ACL data interface */
void HciSendAclData(uint8_t *pAclData);
/*! Command interface */
void HciDisconnectCmd(uint16_t handle, uint8_t reason);
void HciLeAddDevWhiteListCmd(uint8_t addrType, uint8_t *pAddr);
void HciLeClearWhiteListCmd(void);
void HciLeConnUpdateCmd(uint16_t handle, hciConnSpec_t *pConnSpec);
void HciLeCreateConnCmd(uint16_t scanInterval, uint16_t scanWindow, uint8_t filterPolicy,
uint8_t peerAddrType, uint8_t *pPeerAddr, uint8_t ownAddrType,
hciConnSpec_t *pConnSpec);
void HciLeCreateConnCancelCmd(void);
void HciLeEncryptCmd(uint8_t *pKey, uint8_t *pData);
void HciLeLtkReqNegReplCmd(uint16_t handle);
void HciLeLtkReqReplCmd(uint16_t handle, uint8_t *pKey);
void HciLeRandCmd(void);
void HciLeReadAdvTXPowerCmd(void);
void HciLeReadBufSizeCmd(void);
void HciLeReadChanMapCmd(uint16_t handle);
void HciLeReadLocalSupFeatCmd(void);
void HciLeReadRemoteFeatCmd(uint16_t handle);
void HciLeReadSupStatesCmd(void);
void HciLeReadWhiteListSizeCmd(void);
void HciLeRemoveDevWhiteListCmd(uint8_t addrType, uint8_t *pAddr);
void HciLeSetAdvEnableCmd(uint8_t enable);
void HciLeSetAdvDataCmd(uint8_t len, uint8_t *pData);
void HciLeSetAdvParamCmd(uint16_t advIntervalMin, uint16_t advIntervalMax, uint8_t advType,
uint8_t ownAddrType, uint8_t peerAddrType, uint8_t *pPeerAddr,
uint8_t advChanMap, uint8_t advFiltPolicy);
void HciLeSetEventMaskCmd(uint8_t *pLeEventMask);
void HciLeSetHostChanClassCmd(uint8_t *pChanMap);
void HciLeSetRandAddrCmd(uint8_t *pAddr);
void HciLeSetScanEnableCmd(uint8_t enable, uint8_t filterDup);
void HciLeSetScanParamCmd(uint8_t scanType, uint16_t scanInterval, uint16_t scanWindow,
uint8_t ownAddrType, uint8_t scanFiltPolicy);
void HciLeSetScanRespDataCmd(uint8_t len, uint8_t *pData);
void HciLeStartEncryptionCmd(uint16_t handle, uint8_t *pRand, uint16_t diversifier, uint8_t *pKey);
void HciReadBdAddrCmd(void);
void HciReadBufSizeCmd(void);
void HciReadLocalSupFeatCmd(void);
void HciReadLocalVerInfoCmd(void);
void HciReadRemoteVerInfoCmd(uint16_t handle);
void HciReadRssiCmd(uint16_t handle);
void HciReadTxPwrLvlCmd(uint16_t handle, uint8_t type);
void HciResetCmd(void);
void HciSetEventMaskCmd(uint8_t *pEventMask);
void HciSetEventMaskPage2Cmd(uint8_t *pEventMask);
void HciReadAuthPayloadTimeout(uint16_t handle);
void HciWriteAuthPayloadTimeout(uint16_t handle, uint16_t timeout);
void HciLeAddDeviceToResolvingListCmd(uint8_t peerAddrType, const uint8_t *pPeerIdentityAddr,
const uint8_t *pPeerIrk, const uint8_t *pLocalIrk);
void HciLeRemoveDeviceFromResolvingList(uint8_t peerAddrType, const uint8_t *pPeerIdentityAddr);
void HciLeClearResolvingList(void);
void HciLeReadResolvingListSize(void);
void HciLeReadPeerResolvableAddr(uint8_t addrType, const uint8_t *pIdentityAddr);
void HciLeReadLocalResolvableAddr(uint8_t addrType, const uint8_t *pIdentityAddr);
void HciLeSetAddrResolutionEnable(uint8_t enable);
void HciLeSetResolvablePrivateAddrTimeout(uint16_t rpaTimeout);
void HciVendorSpecificCmd(uint16_t opcode, uint8_t len, uint8_t *pData);
void HciLeRemoteConnParamReqReply(uint16_t handle, uint16_t intervalMin, uint16_t intervalMax, uint16_t latency,
uint16_t timeout, uint16_t minCeLen, uint16_t maxCeLen);
void HciLeRemoteConnParamReqNegReply(uint16_t handle, uint8_t reason);
void HciLeSetDataLen(uint16_t handle, uint16_t txOctets, uint16_t txTime);
void HciLeReadDefDataLen(void);
void HciLeWriteDefDataLen(uint16_t suggestedMaxTxOctets, uint16_t suggestedMaxTxTime);
void HciLeReadLocalP256PubKey(void);
void HciLeGenerateDHKey(uint8_t *pPubKeyX, uint8_t *pPubKeyY);
void HciLeReadMaxDataLen(void);
void HciWriteAuthPayloadTimeout(uint16_t handle, uint16_t timeout);
#ifdef __cplusplus
};
#endif
#endif /* HCI_API_H */
|
<reponame>OSADP/C2C-RI<filename>C2CRIBuildDir/projects/C2C-RI/src/NTCIP2306v01_69/src/org/fhwa/c2cri/ntcip2306v109/status/GZIPStatus.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.ntcip2306v109.status;
import java.util.ArrayList;
import org.fhwa.c2cri.testmodel.verification.TestAssertion;
/**
* The Class GZIPStatus maintains the results of various NTCIP 2306 GZIP requirement inspections..
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class GZIPStatus {
/** The is gzip encoded. */
private boolean isGZIPEncoded = false;
/** The well formed xml. */
private boolean wellFormedXML = false;
/** The gzip errors. */
private ArrayList<String> gzipErrors = new ArrayList<String>();
/** The test assertion list. */
private ArrayList<TestAssertion> testAssertionList = new ArrayList<>();
/**
* NTCIP 2306 4.1.1
*
* @return true, if is NTCIP2306 valid gzip
*/
public boolean isNTCIP2306ValidGZIP() {
return (isGZIPEncoded&&wellFormedXML);
}
/**
* Adds the gzip error.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param xmlError the xml error
*/
public void addGZIPError(String xmlError){
gzipErrors.add(xmlError);
}
/**
* Gets the gZIP errors.
*
* @return the gZIP errors
*/
public String getGZIPErrors(){
StringBuilder results = new StringBuilder();
results.append("GZIP Errors Found: \n");
for (String thisError :gzipErrors){
results.append(thisError).append("\n");
}
return results.toString();
}
/**
* Checks if is checks if is gzip encoded.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @return true, if is checks if is gzip encoded
*/
public boolean isIsGZIPEncoded() {
return isGZIPEncoded;
}
/**
* Sets the checks if is gzip encoded.
*
* @param isGZIPEncoded the new checks if is gzip encoded
*/
public void setIsGZIPEncoded(boolean isGZIPEncoded) {
this.isGZIPEncoded = isGZIPEncoded;
}
/**
* Checks if is well formed xml.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @return true, if is well formed xml
*/
public boolean isWellFormedXML() {
return wellFormedXML;
}
/**
* Sets the well formed xml.
*
* @param wellFormedXML the new well formed xml
*/
public void setWellFormedXML(boolean wellFormedXML) {
this.wellFormedXML = wellFormedXML;
}
/**
* Gets the test assertion list.
*
* @return the test assertion list
*/
public ArrayList<TestAssertion> getTestAssertionList() {
return testAssertionList;
}
/**
* Sets the test assertion list.
*
* @param testAssertionList the new test assertion list
*/
public void setTestAssertionList(ArrayList<TestAssertion> testAssertionList) {
this.testAssertionList = testAssertionList;
}
}
|
package org.tarantool.orm.auto;
import com.google.auto.service.AutoService;
import org.tarantool.orm.annotations.Tuple;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.TypeElement;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
@SupportedAnnotationTypes("org.tarantool.orm.annotations.Tuple")
@SupportedSourceVersion(SourceVersion.RELEASE_8)
@AutoService(Processor.class)
public final class TupleManagerProcessor extends BaseProcessor {
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
info("Start TupleManager processor");
try {
info("Generate managers");
List<TupleMeta> metas = generateTupleManagers(roundEnv);
info("Generate manager factory");
generateTupleManagerFactory(metas);
} catch (IllegalArgumentException | IOException e) {
error(e.getLocalizedMessage());
return true;
}
return false;
}
private List<TupleMeta> generateTupleManagers(RoundEnvironment roundEnv) throws IOException {
List<TupleMeta> metaList = new ArrayList<>();
TupleManagerGenerator tupleManagerGenerator = new TupleManagerGenerator(filer);
for (Element element : roundEnv.getElementsAnnotatedWith(Tuple.class)) {
if (element.getKind() != ElementKind.CLASS) {
throw new IllegalArgumentException(String.format("Only classes may be annotated by Tuple: %s", element.getSimpleName()));
}
TupleMeta meta = TupleMeta.getInstance((TypeElement) element, typeUtils);
info("Start to generate new class: %s", meta.className);
tupleManagerGenerator.generate(meta);
metaList.add(meta);
}
return metaList;
}
private void generateTupleManagerFactory(List<TupleMeta> metaList) throws IOException {
ManagerFactoryGenerator managerFactoryGenerator = new ManagerFactoryGenerator();
if (!metaList.isEmpty()) {
managerFactoryGenerator.generate(filer, metaList);
}
}
}
|
using System;
using System.Collections.Generic;
namespace AE.Net.Mail
{
public class Attachment
{
// Define properties and methods for the Attachment class
}
public class EmailProcessor : ObjectWHeaders
{
public List<Attachment> Attachments { get; private set; }
public EmailProcessor()
{
Attachments = new List<Attachment>();
}
public void AddAttachment(Attachment attachment)
{
Attachments.Add(attachment);
}
public List<Attachment> GetAttachments()
{
return Attachments;
}
public override string RawHeaders
{
get => base.RawHeaders;
set => base.RawHeaders = value;
}
}
} |
<reponame>c6401/Snippets
javascript:(() => { })()
|
function getEnvironment(): string {
const nodeEnv = process.env.NODE_ENV;
if (nodeEnv === "development" || nodeEnv === "production" || nodeEnv === "test") {
return nodeEnv;
} else {
return "unknown";
}
} |
import React from 'react'
import { useGlobalContext } from './context'
const Stories = () => {
return <h2>stories component</h2>
}
export default Stories
|
#!/usr/bin/env bash
if [ $# -lt 3 ]; then
echo "usage: $0 <db-name> <db-user> <db-pass> [db-host] [wp-version] [skip-database-creation]"
exit 1
fi
DB_NAME=$1
DB_USER=$2
DB_PASS=$3
DB_HOST=${4-localhost}
WP_VERSION=${5-latest}
SKIP_DB_CREATE=${6-false}
TMPDIR=${TMPDIR-/tmp}
TMPDIR=$(echo $TMPDIR | sed -e "s/\/$//")
WP_TESTS_DIR=${WP_TESTS_DIR-$TMPDIR/wordpress-tests-lib}
WP_CORE_DIR=${WP_CORE_DIR-$TMPDIR/wordpress/}
download() {
if [ `which curl` ]; then
curl -s "$1" > "$2";
elif [ `which wget` ]; then
wget -nv -O "$2" "$1"
fi
}
if [[ $WP_VERSION =~ ^[0-9]+\.[0-9]+\-(beta|RC)[0-9]+$ ]]; then
WP_BRANCH=${WP_VERSION%\-*}
WP_TESTS_TAG="branches/$WP_BRANCH"
elif [[ $WP_VERSION =~ ^[0-9]+\.[0-9]+$ ]]; then
WP_TESTS_TAG="branches/$WP_VERSION"
elif [[ $WP_VERSION =~ [0-9]+\.[0-9]+\.[0-9]+ ]]; then
if [[ $WP_VERSION =~ [0-9]+\.[0-9]+\.[0] ]]; then
# version x.x.0 means the first release of the major version, so strip off the .0 and download version x.x
WP_TESTS_TAG="tags/${WP_VERSION%??}"
else
WP_TESTS_TAG="tags/$WP_VERSION"
fi
elif [[ $WP_VERSION == 'nightly' || $WP_VERSION == 'trunk' ]]; then
WP_TESTS_TAG="trunk"
else
# http serves a single offer, whereas https serves multiple. we only want one
download http://api.wordpress.org/core/version-check/1.7/ /tmp/wp-latest.json
grep '[0-9]+\.[0-9]+(\.[0-9]+)?' /tmp/wp-latest.json
LATEST_VERSION=$(grep -o '"version":"[^"]*' /tmp/wp-latest.json | sed 's/"version":"//')
if [[ -z "$LATEST_VERSION" ]]; then
echo "Latest WordPress version could not be found"
exit 1
fi
WP_TESTS_TAG="tags/$LATEST_VERSION"
fi
set -ex
install_wp() {
if [ -d $WP_CORE_DIR ]; then
return;
fi
mkdir -p $WP_CORE_DIR
if [[ $WP_VERSION == 'nightly' || $WP_VERSION == 'trunk' ]]; then
mkdir -p $TMPDIR/wordpress-nightly
download https://wordpress.org/nightly-builds/wordpress-latest.zip $TMPDIR/wordpress-nightly/wordpress-nightly.zip
unzip -q $TMPDIR/wordpress-nightly/wordpress-nightly.zip -d $TMPDIR/wordpress-nightly/
mv $TMPDIR/wordpress-nightly/wordpress/* $WP_CORE_DIR
else
if [ $WP_VERSION == 'latest' ]; then
local ARCHIVE_NAME='latest'
elif [[ $WP_VERSION =~ [0-9]+\.[0-9]+ ]]; then
# https serves multiple offers, whereas http serves single.
download https://api.wordpress.org/core/version-check/1.7/ $TMPDIR/wp-latest.json
if [[ $WP_VERSION =~ [0-9]+\.[0-9]+\.[0] ]]; then
# version x.x.0 means the first release of the major version, so strip off the .0 and download version x.x
LATEST_VERSION=${WP_VERSION%??}
else
# otherwise, scan the releases and get the most up to date minor version of the major release
local VERSION_ESCAPED=`echo $WP_VERSION | sed 's/\./\\\\./g'`
LATEST_VERSION=$(grep -o '"version":"'$VERSION_ESCAPED'[^"]*' $TMPDIR/wp-latest.json | sed 's/"version":"//' | head -1)
fi
if [[ -z "$LATEST_VERSION" ]]; then
local ARCHIVE_NAME="wordpress-$WP_VERSION"
else
local ARCHIVE_NAME="wordpress-$LATEST_VERSION"
fi
else
local ARCHIVE_NAME="wordpress-$WP_VERSION"
fi
download https://wordpress.org/${ARCHIVE_NAME}.tar.gz $TMPDIR/wordpress.tar.gz
tar --strip-components=1 -zxmf $TMPDIR/wordpress.tar.gz -C $WP_CORE_DIR
fi
download https://raw.github.com/markoheijnen/wp-mysqli/master/db.php $WP_CORE_DIR/wp-content/db.php
}
install_test_suite() {
# portable in-place argument for both GNU sed and Mac OSX sed
if [[ $(uname -s) == 'Darwin' ]]; then
local ioption='-i.bak'
else
local ioption='-i'
fi
# set up testing suite if it doesn't yet exist
if [ ! -d $WP_TESTS_DIR ]; then
# set up testing suite
mkdir -p $WP_TESTS_DIR
svn co --quiet https://develop.svn.wordpress.org/${WP_TESTS_TAG}/tests/phpunit/includes/ $WP_TESTS_DIR/includes
#svn co --quiet https://develop.svn.wordpress.org/${WP_TESTS_TAG}/tests/phpunit/data/ $WP_TESTS_DIR/data
fi
if [ ! -f wp-tests-config.php ]; then
download https://develop.svn.wordpress.org/${WP_TESTS_TAG}/wp-tests-config-sample.php "$WP_TESTS_DIR"/wp-tests-config.php
# remove all forward slashes in the end
WP_CORE_DIR=$(echo $WP_CORE_DIR | sed "s:/\+$::")
sed $ioption "s:dirname( __FILE__ ) . '/src/':'$WP_CORE_DIR/':" "$WP_TESTS_DIR"/wp-tests-config.php
sed $ioption "s/youremptytestdbnamehere/$DB_NAME/" "$WP_TESTS_DIR"/wp-tests-config.php
sed $ioption "s/yourusernamehere/$DB_USER/" "$WP_TESTS_DIR"/wp-tests-config.php
sed $ioption "s/yourpasswordhere/$DB_PASS/" "$WP_TESTS_DIR"/wp-tests-config.php
sed $ioption "s|localhost|${DB_HOST}|" "$WP_TESTS_DIR"/wp-tests-config.php
fi
}
install_db() {
if [ ${SKIP_DB_CREATE} = "true" ]; then
return 0
fi
# parse DB_HOST for port or socket references
local PARTS=(${DB_HOST//\:/ })
local DB_HOSTNAME=${PARTS[0]};
local DB_SOCK_OR_PORT=${PARTS[1]};
local EXTRA=""
if ! [ -z $DB_HOSTNAME ] ; then
if [ $(echo $DB_SOCK_OR_PORT | grep -e '^[0-9]\{1,\}$') ]; then
EXTRA=" --host=$DB_HOSTNAME --port=$DB_SOCK_OR_PORT --protocol=tcp"
elif ! [ -z $DB_SOCK_OR_PORT ] ; then
EXTRA=" --socket=$DB_SOCK_OR_PORT"
elif ! [ -z $DB_HOSTNAME ] ; then
EXTRA=" --host=$DB_HOSTNAME --protocol=tcp"
fi
fi
# create database
mysqladmin create $DB_NAME --user="$DB_USER" --password="$DB_PASS"$EXTRA
}
install_wp
install_test_suite
install_db
|
package simulation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* ๋ฐฑ์ค 1057๋ฒ: ํ ๋๋จผํธ
*
* @see https://www.acmicpc.net/problem/1057/
*
*/
public class Boj1057 {
public static void main(String[] args) throws Exception{
// ๋ฒํผ๋ฅผ ํตํ ๊ฐ ์
๋ ฅ
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int r1 = Integer.parseInt(st.nextToken());
int r2 = Integer.parseInt(st.nextToken());
// ์์ ๋ผ์ด๋์ ๊ฐ์ ์ง์๋ก ๋ณ๊ฒฝ
if(r1 % 2 == 1) r1++;
if(r2 % 2 == 1) r2++;
int rCnt = 1;
while((r1 /= 2) != (r2 /= 2)) { // ํ ๋ผ์ด๋ ๊ฐ์ 2๋ก ๋๋๋ฉด ๋ค์ ๋ผ์ด๋์์์ ์๋ฒ์ด ์ ํด์ง, ๋ ๊ฐ์ด ๊ฐ์์ง๋ ๊ฒฝ์ฐ ํด๋น ๋ผ์ด๋์์ ๋ง๋จ
if(r1 % 2 == 1) r1++; // ๋ผ์ด๋๋ง๋ค ํ์๊ฐ ์๊ธฐ๋ฉด ์ง์๋ก ๋ณ๊ฒฝ
if(r2 % 2 == 1) r2++;
rCnt++; // ๋ผ์ด๋ + 1
}
System.out.println(rCnt); // ์ต์ข
๋ง๋๋ ๋ผ์ด๋๋ฅผ ์ถ๋ ฅ
}
}
|
#!/bin/bash
git add -A
git commit -m "Alg adding"
git push
|
class CustomQuantizationModule:
def __init__(self, kernel_size, stride, padding, dilation, groups, bias, q_type, a_bits, w_bits, out_channels, first_layer):
self.kernel_size = kernel_size
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
self.bias = bias
self.first_layer = first_layer
if q_type == 0:
self.activation_quantizer = SymmetricQuantizer(bits=a_bits, range_tracker=AveragedRangeTracker(q_level='L'))
self.weight_quantizer = SymmetricQuantizer(bits=w_bits, range_tracker=GlobalRangeTracker(q_level='C', out_channels=out_channels))
else:
self.activation_quantizer = AsymmetricQuantizer(bits=a_bits, range_tracker=AveragedRangeTracker(q_level='L'))
self.weight_quantizer = AsymmetricQuantizer(bits=w_bits, range_tracker=GlobalRangeTracker(q_level='C', out_channels=out_channels)) |
import random
import string
length = 10
# generate a 10 character long password
password = "".join(random.choice(string.ascii_letters + string.digits + string.punctuation[1:-1]) for x in range(length))
print(password) |
const express = require('express')
const Promise = require('bluebird')
const TwitterStrategy = require('passport-twitter').Strategy
const Twitter = require('twitter')
const Boom = require('boom')
const logger = require('hw-logger')
const log = logger.log
const helper = require('../../../../helper')
const router = express.Router()
module.exports = (config, passport) => {
router.get('/', (req, res, next) => passport.authenticate(
'twitter', {
callbackURL: `${helper.getPublicBaseUrl(req)}${req.baseUrl}/callback`,
})(req, res, next)
)
router.get('/callback',
(req, res, next) => passport.authenticate('twitter', {
failureRedirect: `${req.baseUrl}/fail`,
callbackURL: `${helper.getPublicBaseUrl(req)}${req.baseUrl}/callback`,
})(req, res, next),
(req, res) => {
const token = helper.createJwt(config, req.user)
res.cookie('quizzy-user', JSON.stringify(req.user), {maxAge: config.cookiesMaxAge})
res.cookie('quizzy-token', token, {maxAge: config.cookiesMaxAge})
res.redirect('/')
}
)
router.get('/fail', (/*req, res*/) => {
Boom.forbidden('Login failed')
})
passport.use(new TwitterStrategy(
{
consumerKey: config.get('auth.twitter.consumerKey'),
consumerSecret: config.get('auth.twitter.consumerSecret'),
},
(accessToken, accessTokenSecret, profile, done) => {
logger.enabledLevels.debug && log.debug('twitter profile :', profile)
return new Promise(
(resolve, reject) => {
const client = new Twitter({
consumer_key: config.get('auth.twitter.consumerKey'),
consumer_secret: config.get('auth.twitter.consumerSecret'),
access_token_key: accessToken,
access_token_secret: accessTokenSecret,
})
client.get('account/verify_credentials', {include_email: true, skip_status: true}, (err, data) => {
if (err) {
return reject(err)
}
const user = {
provider: 'twitter',
name: data.name,
email: data.email,
}
user.admin = helper.isAdmin(config, user)
resolve(user)
})
})
.asCallback(done)
}
))
return router
}
|
import os
import time
import random
import requests
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
bot = ChatBot('Weather Bot')
# Train the bot with all the corpus related to conversations
trainer = ChatterBotCorpusTrainer(bot)
trainer.train('chatterbot.corpus.english.conversations')
# Train the chatbot with the weather-related conversations
trainer.train('data/weather_data.yml')
# Main loop
while True:
query = input()
if query == 'exit':
break
response = bot.get_response(query)
print('Bot:', response) |
<reponame>sigmametals/sigmametals.github.io<filename>bower_components/foundation/node_modules/grunt-rsync/node_modules/rsyncwrapper/tests/package.js
"use strict";
var vows = require("vows");
var assert = require("assert");
var rsyncwrapper = require("../lib/rsyncwrapper");
exports.suite = vows.describe("Package tests").addBatch({
"The RSyncWrapper package": {
topic: rsyncwrapper,
"is not null": function (topic) {
assert.isNotNull(topic);
},
"has a 'rsync()' function": function (topic) {
assert.isFunction(topic.rsync);
},
"errors when started without options": function (topic) {
assert.throws(topic.rsync,Error);
}
}
}); |
<filename>src/main/java/fr/clementgre/pdf4teachers/utils/dialogs/alerts/ButtonPosition.java
package fr.clementgre.pdf4teachers.utils.dialogs.alerts;
public enum ButtonPosition{
DEFAULT,
CLOSE,
OTHER_RIGHT,
OTHER_LEFT
}
|
#!/bin/sh
#
# Copyright IBM Corp All Rights Reserved
#
# SPDX-License-Identifier: Apache-2.0
#
# Exit on first error, print all commands.
set +ev
NUM_CONTAINERS=$(docker ps -f label=fabric-environment-name="<%= name %>" -q | wc -l | tr -d ' ')
if [ "${NUM_CONTAINERS}" -eq 0 ]; then
exit 1
fi
exit 0 |
function HALProperty({ key, value }) {
return {
key,
value
};
}
export default HALProperty;
|
<filename>doc/html/search/files_2.js
var searchData=
[
['readme_2emd_0',['README.md',['../doc_2css_2dark_2_r_e_a_d_m_e_8md.html',1,'']]]
];
|
#!/bin/bash
sudo apt-get install -y git
./clone_3rdparty.sh
sudo apt-get -y install automake autoconf libtool python-dev libblas-dev
./autogen.sh
./configure --prefix=/usr --enable-cuda
make
sudo make install
|
#!/bin/bash
#ps -ef | grep 172.24.83.25:7002 | grep -v grep | awk '{print $2}' |xargs kill -9
#ssh -Tq zhaole@xuserver002 << remotessh
#ps -ef | grep 172.24.83.27:7004 | grep -v grep | awk '{print \$2}' |xargs kill -9
#exit
#remotessh
#ssh -Tq zhaole@xuserver003 << remotessh
#ps -ef | grep 172.24.83.26:7009 | grep -v grep | awk '{print \$2}' |xargs kill
#-9
#exit
#remotessh
#echo -e "\033[32m master nodes are stopped \033[0m"
#sleep 20
/data/zhaole/redis-cluster/redis-server /data/zhaole/redis-cluster/7002/redis.conf
ssh -Tq zhaole@xuserver002 << remotessh
/data/zhaole/redis-cluster/redis-server /data/zhaole/redis-cluster/7004/redis.conf
exit
remotessh
ssh -Tq zhaole@xuserver003 << remotessh
/data/zhaole/redis-cluster/redis-server /data/zhaole/redis-cluster/7009/redis.conf
exit
remotessh
echo -e "\033[32m master nodes are started \033[0m"
|
#!/bin/bash
#
###############################################################################
#
# Build a binary package on Windows with MinGW and MSYS
#
# Set the paths where MinGW, Mingw-w32, or MinGW-w64 are installed. If both
# MinGW and MinGW-w32 are specified, MinGW-w32 will be used. If there is no
# 32-bit or 64-bit compiler at all, it is simply skipped.
#
# Optionally, 7-Zip is used to create the final .zip and .7z packages.
# If you have installed it in the default directory, this script should
# find it automatically. Otherwise adjust the path manually.
#
# If you want to use a cross-compiler e.g. on GNU/Linux, this script won't
# work out of the box. You need to omit "make check" commands and replace
# u2d with some other tool to convert newlines from LF to CR+LF. You will
# also need to pass the --host option to configure.
#
###############################################################################
#
# Author: Lasse Collin
#
# This file has been put into the public domain.
# You can do whatever you want with this file.
#
###############################################################################
MINGW_DIR=/c/devel/tools/mingw
MINGW_W32_DIR=/c/devel/tools/mingw-w32
MINGW_W64_DIR=/c/devel/tools/mingw-w64
for SEVENZ_EXE in "$PROGRAMW6432/7-Zip/7z.exe" "$PROGRAMFILES/7-Zip/7z.exe" \
"/c/Program Files/7-Zip/7z.exe"
do
[ -x "$SEVENZ_EXE" ] && break
done
# Abort immediately if something goes wrong.
set -e
# White spaces in directory names may break things so catch them immediately.
case $(pwd) in
' ' | ' ' | '
') echo "Error: White space in the directory name" >&2; exit 1 ;;
esac
# This script can be run either at the top-level directory of the package
# or in the same directory containing this script.
if [ ! -f windows/build.bash ]; then
cd ..
if [ ! -f windows/build.bash ]; then
echo "You are in a wrong directory." >&2
exit 1
fi
fi
# Run configure and copy the binaries to the given directory.
#
# The first argument is the directory where to copy the binaries.
# The rest of the arguments are passed to configure.
buildit()
{
DESTDIR=$1
BUILD=$2
CFLAGS=$3
# Clean up if it was already configured.
[ -f Makefile ] && make distclean
# Build the size-optimized binaries. Providing size-optimized liblzma
# could be considered but I don't know if it should only use -Os or
# should it also use --enable-small and if it should support
# threading. So I don't include a size-optimized liblzma for now.
./configure \
--prefix= \
--enable-silent-rules \
--disable-dependency-tracking \
--disable-nls \
--disable-scripts \
--disable-threads \
--disable-shared \
--enable-small \
--build="$BUILD" \
CFLAGS="$CFLAGS -Os"
make check
mkdir -pv "$DESTDIR"
cp -v src/xzdec/{xz,lzma}dec.exe src/lzmainfo/lzmainfo.exe "$DESTDIR"
make distclean
# Build the normal speed-optimized binaries. The type of threading
# (win95 vs. vista) will be autodetect from the target architecture.
./configure \
--prefix= \
--enable-silent-rules \
--disable-dependency-tracking \
--disable-nls \
--disable-scripts \
--build="$BUILD" \
CFLAGS="$CFLAGS -O2"
make -C src/liblzma
make -C src/xz LDFLAGS=-static
make -C tests check
cp -v src/xz/xz.exe src/liblzma/.libs/liblzma.a "$DESTDIR"
cp -v src/liblzma/.libs/liblzma-*.dll "$DESTDIR/liblzma.dll"
strip -v "$DESTDIR/"*.{exe,dll}
strip -vg "$DESTDIR/"*.a
}
# Copy files and convert newlines from LF to CR+LF. Optinally add a suffix
# to the destination filename.
#
# The first argument is the destination directory. The second argument is
# the suffix to append to the filenames; use empty string if no extra suffix
# is wanted. The rest of the arguments are actual the filenames.
txtcp()
{
DESTDIR=$1
SUFFIX=$2
shift 2
for SRCFILE; do
DESTFILE="$DESTDIR/${SRCFILE##*/}$SUFFIX"
echo "Converting \`$SRCFILE' -> \`$DESTFILE'"
u2d < "$SRCFILE" > "$DESTFILE"
done
}
if [ -d "$MINGW_W32_DIR" ]; then
# 32-bit x86, Win95 or later, using MinGW-w32
PATH=$MINGW_W32_DIR/bin:$MINGW_W32_DIR/i686-w64-mingw32/bin:$PATH \
buildit \
pkg/bin_i686 \
i686-w64-mingw32 \
'-march=i686 -mtune=generic'
# 32-bit x86 with SSE2, Win98 or later, using MinGW-w32
PATH=$MINGW_W32_DIR/bin:$MINGW_W32_DIR/i686-w64-mingw32/bin:$PATH \
buildit \
pkg/bin_i686-sse2 \
i686-w64-mingw32 \
'-march=i686 -msse2 -mfpmath=sse -mtune=generic'
elif [ -d "$MINGW_DIR" ]; then
# 32-bit x86, Win95 or later, using MinGW
PATH=$MINGW_DIR/bin:$PATH \
buildit \
pkg/bin_i486 \
i486-pc-mingw32 \
'-march=i486 -mtune=generic'
fi
if [ -d "$MINGW_W64_DIR" ]; then
# x86-64, Windows Vista or later, using MinGW-w64
PATH=$MINGW_W64_DIR/bin:$MINGW_W64_DIR/x86_64-w64-mingw32/bin:$PATH \
buildit \
pkg/bin_x86-64 \
x86_64-w64-mingw32 \
'-march=x86-64 -mtune=generic'
fi
# Copy the headers, the .def file, and the docs.
# They are the same for all architectures and builds.
mkdir -pv pkg/{include/lzma,doc/{manuals,examples}}
txtcp pkg/include "" src/liblzma/api/lzma.h
txtcp pkg/include/lzma "" src/liblzma/api/lzma/*.h
txtcp pkg/doc "" src/liblzma/liblzma.def
txtcp pkg/doc .txt AUTHORS COPYING NEWS README THANKS TODO
txtcp pkg/doc "" doc/*.txt windows/README-Windows.txt
txtcp pkg/doc/manuals "" doc/man/txt/{xz,xzdec,lzmainfo}.txt
cp -v doc/man/pdf-*/{xz,xzdec,lzmainfo}-*.pdf pkg/doc/manuals
txtcp pkg/doc/examples "" doc/examples/*
if [ -f windows/COPYING-Windows.txt ]; then
txtcp pkg/doc "" windows/COPYING-Windows.txt
fi
# Create the package. This requires 7z.exe from 7-Zip. If it wasn't found,
# this step is skipped and you have to zip it yourself.
VER=$(sh build-aux/version.sh)
cd pkg
if [ -x "$SEVENZ_EXE" ]; then
"$SEVENZ_EXE" a -tzip ../xz-$VER-windows.zip *
"$SEVENZ_EXE" a ../xz-$VER-windows.7z *
else
echo
echo "NOTE: 7z.exe was not found. xz-$VER-windows.zip"
echo " and xz-$VER-windows.7z were not created."
echo " You can create them yourself from the pkg directory."
fi
if [ ! -f ../windows/COPYING-Windows.txt ]; then
echo
echo "NOTE: windows/COPYING-Windows.txt doesn't exists."
echo " MinGW(-w64) runtime copyright information"
echo " is not included in the package."
fi
echo
echo "Build completed successfully."
echo
|
<filename>src/java/com/itstore/services/LoginService.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.itstore.services;
/**
*
* @author user
*/
public class LoginService {
SessionService service;
public LoginService() {
}
public boolean isLoggedIn() {
if (service.getAttribute("user") != null) {
return true;
}
return false;
}
public SessionService getService() {
return service;
}
public void setService(SessionService service) {
this.service = service;
}
}
|
package pokeapigo
type EvolutionChain struct {
Id int
Baby_Trigger_Item NamedAPIResource
Chain ChainLink
}
type ChainLink struct {
Is_Baby bool
Species NamedAPIResource
Evolution_Details []EvolutionDetail
Evolves_To []ChainLink
}
type EvolutionDetail struct {
Item NamedAPIResource
Trigger NamedAPIResource
Gender int
Held_Item NamedAPIResource
Known_Move NamedAPIResource
Known_Move_Type NamedAPIResource
Location NamedAPIResource
Min_Level int
Min_Happiness int
Min_Beauty int
Min_Affection int
Needs_Overworld_Rain bool
Party_Species NamedAPIResource
Party_Type NamedAPIResource
Relative_Physical_Stats int
Time_Of_Day string
Trade_Species NamedAPIResource
Turn_Upside_Down bool
}
type EvolutionTrigger struct {
Id int
Name string
Names []Name
Pokemon_Species []NamedAPIResource
}
|
package module
import (
"context"
"os"
"path/filepath"
"github.com/google/go-containerregistry/pkg/name"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/log"
"github.com/aquasecurity/trivy/pkg/oci"
)
const mediaType = "application/vnd.module.wasm.content.layer.v1+wasm"
// Install installs a module
func Install(ctx context.Context, repo string, quiet, insecure bool) error {
ref, err := name.ParseReference(repo)
if err != nil {
return xerrors.Errorf("repository parse error: %w", err)
}
log.Logger.Infof("Installing the module from %s...", repo)
artifact, err := oci.NewArtifact(repo, mediaType, quiet, insecure)
if err != nil {
return xerrors.Errorf("module initialize error: %w", err)
}
dst := filepath.Join(dir(), ref.Context().Name())
log.Logger.Debugf("Installing the module to %s...", dst)
if err = artifact.Download(ctx, dst); err != nil {
return xerrors.Errorf("module download error: %w", err)
}
return nil
}
// Uninstall uninstalls a module
func Uninstall(_ context.Context, repo string) error {
ref, err := name.ParseReference(repo)
if err != nil {
return xerrors.Errorf("repository parse error: %w", err)
}
log.Logger.Infof("Uninstalling %s ...", repo)
dst := filepath.Join(dir(), ref.Context().Name())
if err = os.RemoveAll(dst); err != nil {
return xerrors.Errorf("remove error: %w", err)
}
return nil
}
|
<reponame>pierrekeda/orion
import { TextureLoader } from 'three';
import winlo from 'winlo';
import { Floor } from './scene/Floor';
import { Orb } from './scene/Orb.js';
import { Ticker } from './misc/Ticker';
import { controls } from './controls';
import { gui } from './gui';
import { render } from './render';
import { stage } from './stage';
import { settings } from './settings';
// Load settings
winlo.init();
winlo.digits = 2;
settings.load();
// Build scene
const { scene } = stage;
const floor = new Floor();
floor.position.y = - 1.35;
scene.add( floor );
const orb = new Orb();
scene.add( orb );
// Load texture
const loader = new TextureLoader();
loader.load(
'textures/polished_concrete_basecolor.jpg', // URL
( texture ) => init( texture ), // onLoad
undefined, // onProgress
() => init() // onError
);
// Callbacks
function init( texture ) {
settings.init( orb );
floor.init( texture );
render.init();
controls.init();
setTimeout( gui.init, 0 ); // hack for weird dat.gui + winlo bug
window.addEventListener( 'resize', resize );
resize();
const ticker = new Ticker( animate, 60 );
ticker.start();
}
function resize() {
const { innerWidth, innerHeight, devicePixelRatio } = window;
const width = innerWidth;
const height = innerHeight;
const toResize = [ stage, render, floor, controls ];
toResize.forEach( item => item.resize( width, height, devicePixelRatio ) );
orb.position.z = ( width < height ) ? 2 : 0;
}
function animate( time ) {
const toUpdate = [ controls, orb, render ];
toUpdate.forEach( item => item.update( time ) );
}
|
#!/bin/bash
cleanup() {
git am --abort
git reset --hard origin/master
>&2 echo "Warning: git am of $file failed"
>&2 echo "git am --abort has already been executed"
>&2 echo "as well as 'git reset --hard origin/master'"
>&2 echo "to reset manually, remove the patches directory'"
exit 1
}
trap 'cleanup' ERR
for file in patches/* ; do
git am $file
done
|
<filename>ANattr/src/ZombieAttr.hpp
#ifndef ZOMBIE_ATTR_HPP_
#define ZOMBIE_ATTR_HPP_
//============================================================================
// Name :
// Author : Avi
// Revision : $Revision: #9 $
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description :
//============================================================================
#include "Child.hpp"
#include "User.hpp"
namespace cereal { class access; }
// Class ZombieAttr:
// Use compiler , generated destructor, assignment, copy constructor
// ZombieAttr does *not* have any changeable state
class ZombieAttr {
public:
ZombieAttr(ecf::Child::ZombieType t, const std::vector<ecf::Child::CmdType>& c, ecf::User::Action a, int zombie_lifetime = 0);
ZombieAttr()= default;
bool operator==(const ZombieAttr& rhs) const;
void print(std::string&) const;
bool empty() const { return zombie_type_ == ecf::Child::NOT_SET; }
ecf::Child::ZombieType zombie_type() const { return zombie_type_;}
ecf::User::Action action() const { return action_; }
int zombie_lifetime() const { return zombie_lifetime_; }
const std::vector<ecf::Child::CmdType>& child_cmds() const { return child_cmds_; }
std::vector<ecf::Child::CmdType>::const_iterator child_begin() const { return child_cmds_.begin();} // for python
std::vector<ecf::Child::CmdType>::const_iterator child_end() const { return child_cmds_.end(); } // for python
std::string toString() const;
bool fob( ecf::Child::CmdType ) const;
bool fail( ecf::Child::CmdType ) const;
bool adopt( ecf::Child::CmdType ) const;
bool block( ecf::Child::CmdType ) const;
bool remove( ecf::Child::CmdType ) const;
bool kill( ecf::Child::CmdType ) const;
/// Create from a string. Will throw std::runtime_error of parse errors
/// expects <zombie_type>:<user_action>:child_cmds:zombie_lifetime
static ZombieAttr create(const std::string& str);
// Added to support return by reference
static const ZombieAttr& EMPTY();
// Provide the default behaviour
static ZombieAttr get_default_attr(ecf::Child::ZombieType);
static int default_ecf_zombie_life_time() { return 3600; }
static int default_user_zombie_life_time() { return 300; }
static int default_path_zombie_life_time() { return 900; }
static int minimum_zombie_life_time() { return 60; }
private:
void write(std::string&) const;
private:
std::vector<ecf::Child::CmdType> child_cmds_; // init, event, meter,label, complete
ecf::Child::ZombieType zombie_type_{ecf::Child::NOT_SET}; // User,path or ecf
ecf::User::Action action_{ecf::User::BLOCK}; // fob, fail,remove, adopt, block, kill
int zombie_lifetime_{0}; // How long zombie lives in server
friend class cereal::access;
template<class Archive>
void serialize(Archive & ar, std::uint32_t const version );
};
#endif
|
#!/bin/bash
set -e
set -x
if [[ "$(uname -s)" == 'Linux' ]]; then
# Also install -multilib to cross-compile from x86-64 to x86
# https://stackoverflow.com/questions/4643197/missing-include-bits-cconfig-h-when-cross-compiling-64-bit-program-on-32-bit
sudo apt-get install gcc-${GCC_VERSION} g++-${GCC_VERSION} gcc-${GCC_VERSION}-multilib g++-${GCC_VERSION}-multilib libudev-dev
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-${GCC_VERSION} 60 --slave /usr/bin/g++ g++ /usr/bin/g++-${GCC_VERSION}
sudo update-alternatives --config gcc
python3 --version
sudo pip3 install cmake==3.13.3
pip3 install --upgrade pip --user
pip --version
pip install conan --upgrade --user
pip install conan_package_tools --user
cmake --version
conan --version
conan profile new default --detect --force
conan profile update settings.compiler.libcxx=libstdc++11 default
fi
|
<reponame>serginij/project-manager
import { createSymbiote } from 'redux-symbiote'
const initialState = {
loading: false,
columns: {},
error: null
}
const symbiotes = {
getColumns: {
start: state => ({ ...state, loading: true }),
fail: (state, error) => ({ ...state, loading: false, error: error }),
done: (state, columns) => ({ ...state, loading: false, columns: columns })
},
addColumn: (state, column) => ({
...state,
columns: {
...state.columns,
[column.id]: { name: column.name, id: column.id, cards: [] }
}
}),
addCard: (state, columnId, cardId) => ({
...state,
columns: {
...state.columns,
[columnId]: {
...state.columns[columnId],
cards: [...state.columns[columnId].cards, cardId]
}
}
}),
deleteCard: (state, columnId, cardId) => {
const filteredCards = state.columns[columnId].cards.filter(
id => id !== cardId
)
return {
...state,
columns: {
...state.columns,
[columnId]: {
...state.columns[columnId],
cards: filteredCards
}
}
}
},
updateColumn: (state, id, name) => ({
...state,
columns: { ...state.columns, [id]: { ...state.columns[id], name: name } }
})
}
export const {
actions: columnsActions,
reducer: columnsReducer
} = createSymbiote(initialState, symbiotes, '@@columns')
|
<filename>src/main/java/com/alipay/api/domain/ServiceModelContext.java
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* ๆๅกๆจกๅไธไธๆ
*
* @author auto create
* @since 1.0, 2021-04-07 21:34:54
*/
public class ServiceModelContext extends AlipayObject {
private static final long serialVersionUID = 6573712215431268969L;
/**
* xpId๏ผๆๅก่ฐ็จๅฏไธ็่ฎพๅคๆ ่ฏ
*/
@ApiField("xp_id")
private String xpId;
public String getXpId() {
return this.xpId;
}
public void setXpId(String xpId) {
this.xpId = xpId;
}
}
|
import { withIcon } from "../withIcon";
import { ReactComponent as Icon } from "./wallet-connect.svg";
export const IconWalletConnect = withIcon(Icon);
|
#!/usr/bin/env bash
set -e
if [ -z "$template" ]; then
echo "Please set var template first";
return -1;
fi
tmpdir="travis-tmp"
# Create or new create a temporary directory
if [ -d "${tmpdir}" ]; then
rm -rfv "travis-tmp";
fi
# Get specific template repository ready
git clone "https://github.com/bincrafters/template-${template}.git" "${tmpdir}"
cd "${tmpdir}"
rm -rv !(".git"|"."|"..") || true
cd ..
# Copy generic files to tmp dir
cp -v "conandata.yml" "${tmpdir}"
cp -rv ".ci" "${tmpdir}"
cp -rv ".github" "${tmpdir}"
# Move specific files to tmp dir
cp -rv "${template}/." "${tmpdir}"
cd "${tmpdir}"
if [ -z "$GITHUB_TOKEN" ] || [ -z "$GITHUB_BOT_NAME" ] || [ -z "$GITHUB_BOT_EMAIL" ] ; then
echo "Please set vars GITHUB_BOT_NAME, GITHUB_BOT_EMAIL, GITHUB_TOKEN first";
return -1;
fi
git config user.name ${GITHUB_BOT_NAME}
git config user.email ${GITHUB_BOT_EMAIL}
# Check if repository is new
git checkout master || git checkout -b master
TARGET_REPOSITORY="https://github.com/bincrafters/template-${template}"
TOKEN_REPO=${TARGET_REPOSITORY/github.com/$GITHUB_BOT_NAME:$GITHUB_TOKEN@github.com}
git add -A .
# true for the case of no change, we don't want to let CI fail due to this
git commit -am "Automatic update from central templates repository for ${template}" || true
# Push changes
git push ${TOKEN_REPO} || git push -u origin master ${TOKEN_REPO}
|
mkdir -p ${PREFIX}/bin
cd clearcut
make
cp clearcut ${PREFIX}/bin
|
#!/usr/bin/env sh
#
# Description : PostgreSQL
# Author : Jose Cerrejon Gonzalez (ulysess@gmail_dot._com)
# Version : 1.0 (Apr/20)
# Compatible : Raspberry Pi 4
#
clear
. ./helper.sh || wget -q 'https://raw.githubusercontent.com/jmcerrejon/alpinOS/master/scripts/helper.sh'
chmod +x helper.sh
. ./helper.sh
install() {
apk add postgresql postgresql-client
/etc/init.d/postgresql setup
/etc/init.d/postgresql start && rc-update add postgresql default
}
install |
<gh_stars>0
const jwt = require('jsonwebtoken')
const config = require('../../config/app.conf.js')
module.exports = function(req, res, next)
{
const authorization = req.get('Authorization');
if (!authorization) {
const err = new Error()
err.name = "Unauthorized"
err.status = "401"
err.message = 'Authorization header is missing'
return next(err)
}
// Check that the header has the correct format.
const match = authorization.match(/^Bearer (.+)$/);
if (!match) {
const err = new Error()
err.name = "Unauthorized"
err.status = "401"
err.message = 'Authorization header is not a bearer token'
return next(err)
}
// Extract and verify the JWT.
const token = match[1]
jwt.verify(token, config.secretKey, function (err, payload) {
if (err) {
const err = new Error()
err.name = "Unauthorized"
err.status = "401"
err.message = "Your token is invalid or has expired"
} else {
req.currentUserId = payload.sub;
next(); // Pass the ID of the authenticated user to the next middleware.
}
});
} |
#!/bin/bash
# Script to generate the python contract from XSD
# Requires pyxb module to be installed and available in path
dt=`date '+%m/%d/%Y %H:%M:%S'`
AnetURL=https://apitest.authorize.net/xml/v1/schema/AnetApiSchema.xsd
AnetURLPERL='https:\/\/apitest.authorize.net\/xml\/v1\/schema\/AnetApiSchema.xsd'
LOCALXSDWITHANY=./script/AnetOut.xsd
CONTRACTSDIR=authorizenet
CONTRACTSFILE=apicontractsv1
PYXBGENPATH=`which pyxbgen`
TEMPFILE=binding
TEMPDIRECTORY=./script/temp
echo Starting pyxbgen on ${dt}
which python > /dev/null
if [ $? -eq 0 ]
then
echo Found python
else
echo Unable to find python. Make sure python is installed.
exit 1
fi
which pyxbgen > /tmp/pyxbgenpath.txt
if [ $? -eq 0 ]
then
echo Found pyxbgen
else
echo Unable to find pyxbgen. Make sure pyxb package is installed.
exit 1
fi
which perl > /dev/null
if [ $? -eq 0 ]
then
echo Found perl
else
echo Unable to find perl. Make sure perl is installed.
exit 1
fi
which wget > /dev/null
if [ $? -eq 0 ]
then
echo Found wget. Downloading AnetAPISchema file under Script directory.
wget -O ./script/AnetApiSchema.xsd ${AnetURL}
if [ $? -eq 0 ]
then
echo AnetAPISchema.xsd downloaded.
else
echo Unable to download AnetAPISchema.
exit 1
fi
else
echo Wget not found. Looking for Curl
which curl > /dev/null
if [ $? -eq 0 ]
then
echo Found curl. Downloading AnetAPISchema file under Script directory.
curl --noproxy '*' ${AnetURL} > ./script/AnetApiSchema.xsd
if [ $? -eq 0 ]
then
echo AnetAPISchema.xsd downloaded.
else
curl ${AnetURL} > ./script/AnetApiSchema.xsd
if [ $? -eq 0 ]
then
echo AnetAPISchema.xsd downloaded.
else
echo Unable to download AnetAPISchema.
exit 1
fi
fi
else
echo Unable to find wget and curl. Make sure either one is installed
exit 1
fi
fi
echo Modifying XSD using perl to support backward compatibility
echo Creating temporary directory
mkdir -p "$TEMPDIRECTORY"
perl script/addany.pl script/AnetApiSchema.xsd ${TEMPDIRECTORY}/IntermediateAnetOut.xsd ${LOCALXSDWITHANY}
if [ $? -eq 0 ]
then
: #echo AnetOut.xsd generated #Uncomment for debugging
else
echo Unable to generate AnetOut.xsd
exit 1
fi
echo Deleting temporary directory
rm -rf "$TEMPDIRECTORY"
echo Using pyxb from "${PYXBGENPATH}"
if [ -e "${TEMPFILE}.py" ]; then
rm ${TEMPFILE}.py
fi
python "${PYXBGENPATH}" -u ${LOCALXSDWITHANY} -m ${TEMPFILE}
if [ $? -eq 0 ]
then
if [ -e "${CONTRACTSDIR}/${CONTRACTSFILE}.old" ]
then
rm "${CONTRACTSDIR}/${CONTRACTSFILE}.old"
fi
if [ -e "${CONTRACTSDIR}/${CONTRACTSFILE}.py" ]
then
rm "${CONTRACTSDIR}/${CONTRACTSFILE}.py"
fi
mv "${TEMPFILE}.py" "${CONTRACTSDIR}/${CONTRACTSFILE}.py"
echo Bindings have been successfully generated from XSD in the file "${CONTRACTSDIR}/${CONTRACTSFILE}.py"
echo Old contracts have been moved to .old
else
echo Error generating bindings from XSD. Review the errors and rerun the script.
exit 1
fi
perl -i -pe "s/.Location\(\'.*xsd\'/.Location\(\'$AnetURLPERL\'/g" ${CONTRACTSDIR}/${CONTRACTSFILE}.py
exit 0 |
#!/bin/bash -e
if [[ "$#" != "0" ]];then
version="$1"
else
version="1.0.0"
fi
appplicationCount=`sfctl application list | grep fabric:/ActorApplicationApplication | wc -l`
if [[ "$appplicationCount" -eq "0" ]];then
echo "Nothing to uninstall"
exit 0
fi
sfctl application delete --application-id ActorApplicationApplication
if [ $? -ne 0 ]; then
echo "Application removal failed."
exit 1
fi
sfctl application unprovision --application-type-name ActorApplicationApplicationType --application-type-version $version
if [ $? -ne 0 ]; then
echo "Unregistering application type failed."
exit 1
fi
sfctl store delete --content-path ActorApplicationApplication
if [ $? -ne 0 ]; then
echo "Unable to delete image store content."
exit 1
fi
echo "Uninstall script executed successfully."
|
package com.telenav.osv.http;
import com.android.volley.AuthFailureError;
import com.android.volley.VolleyLog;
import com.telenav.osv.item.KVFile;
import com.telenav.osv.listener.network.GenericResponseListener;
import com.telenav.osv.utils.Log;
import com.telenav.osv.utils.Utils;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Map;
/**
* Created by Kalman on 10/6/2015.
*/
public class IssueUploadRequest<T> extends KvVolleyStringRequestJarvisAuthorization {
private static final String FILE_PART_NAME = "file";
private static final String TAG = "IssueUploadRequest";
private static final String PARAM_TOKEN = "access_token";
private static final String PARAM_ISSUE_ID = "issueId";
private static final String PARAM_ISSUE_INDEX = "fileIndex";
private static final String PARAM_FILE_TYPE = "fileType";
private final int mId;
private final int mIndex;
private final GenericResponseListener mListener;
private final KVFile mFile;
private final String mToken;
protected Map<String, String> headers;
private MultipartEntityBuilder mBuilder = MultipartEntityBuilder.create();
private ProgressiveEntity mProgressiveEntity;
public IssueUploadRequest(String url, GenericResponseListener listener, String token, KVFile file, int issueId, int index, boolean isJarvisAuthorization, String jarvisAccessToken) {
super(Method.POST, url, listener, listener, isJarvisAuthorization, jarvisAccessToken);
mListener = listener;
mFile = file;
mId = issueId;
mIndex = index;
mToken = token;
buildMultipartEntity();
}
@Override
public String getBodyContentType() {
return mProgressiveEntity.getContentType().getValue();
}
@Override
public byte[] getBody() throws AuthFailureError {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
mProgressiveEntity.writeTo(bos);
} catch (IOException e) {
VolleyLog.e("IOException writing to ByteArrayOutputStream bos, building the multipart request.");
}
return bos.toByteArray();
}
@Override
public HttpEntity getMultipartEntity() {
return mProgressiveEntity;
}
@Override
protected void deliverResponse(String response) {
mListener.onResponse(response);
}
private void buildMultipartEntity() {
mBuilder.addTextBody(PARAM_TOKEN, mToken);
mBuilder.addTextBody(PARAM_ISSUE_ID, "" + mId);
mBuilder.addTextBody(PARAM_ISSUE_INDEX, "" + mIndex);
mBuilder.addTextBody(PARAM_FILE_TYPE, "text");
if (!mFile.exists()) {
Log.d(TAG, "buildMultipartEntity: file doesn't exist");
}
mBuilder.addBinaryBody(FILE_PART_NAME, mFile, ContentType.create("text/plain"), mFile.getName());
mBuilder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
mBuilder.setLaxMode().setBoundary("xx").setCharset(Charset.forName("UTF-8"));
HttpEntity mEntity = mBuilder.build();
mProgressiveEntity = new ProgressiveEntity(mEntity, null, Utils.fileSize(mFile), null);
}
} |
name := "arbitrage-trader"
version := "0.9.4-SNAPSHOT"
scalaVersion := "2.13.4"
val AkkaVersion = "2.6.+"
val AkkaHttpVersion = "10.2.+"
libraryDependencies += "com.typesafe.akka" %% "akka-slf4j" % AkkaVersion
libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.+"
libraryDependencies += "com.typesafe.akka" %% "akka-actor-typed" % AkkaVersion
libraryDependencies += "com.typesafe.akka" %% "akka-http-core" % AkkaHttpVersion
libraryDependencies += "io.spray" %% "spray-json" % "1.3.+"
libraryDependencies += "com.typesafe.akka" %% "akka-stream" % AkkaVersion
libraryDependencies += "org.scalatest" %% "scalatest" % "3.2.+" % "test"
libraryDependencies += "com.typesafe.akka" %% "akka-actor-testkit-typed" % AkkaVersion % Test
//libraryDependencies += "org.scalamock" %% "scalamock" % "4.4.0" % Test
|
def isPrime(n):
if n <= 1:
return False
for i in range(2, n):
if n % i == 0:
return False;
return True |
#ifndef ERL_COMMUNICATION_H_
#define ERL_COMMUNICATION_H_
#include <stdint.h>
#include <stdbool.h>
typedef uint8_t byte;
int read_cmd(byte *buf);
int write_cmd(byte *buf, int len);
int read_exact(byte *buf, int len);
int write_exact(byte *buf, int len);
#endif |
#!/usr/bin/env sh
../ci/codespell.sh
# A normal build, including tests, triggers the max time limit:
# The job exceeded the maximum time limit for jobs, and has been terminated.
# no tests, to reduce time
cmake -DCMAKE_CXX_COMPILER="${CXX}" -DwexBUILD_SHARED=ON ..
# only make wex targets, to reduce time
make -j 4 wex-core wex-factory wex-data wex-common wex-ui wex-vi wex-stc wex-del
|
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#ifndef GBXML_FORWARDTRANSLATOR_HPP
#define GBXML_FORWARDTRANSLATOR_HPP
#include "gbXMLAPI.hpp"
#include "../utilities/core/Path.hpp"
#include "../utilities/core/Optional.hpp"
#include "../utilities/core/Logger.hpp"
#include "../utilities/core/StringStreamLogSink.hpp"
// This is needed to declare the set<Material, IdfObjectImplLess>, so everything is a complete type
#include "../utilities/idf/IdfObject.hpp"
#include "../model/Material.hpp"
#include "../model/ModelObject.hpp"
#include <map>
namespace pugi {
class xml_node;
class xml_document;
} // namespace pugi
namespace openstudio {
class ProgressBar;
class Transformation;
namespace model {
class Model;
class ConstructionBase;
class Facility;
class Building;
class BuildingStory;
class ThermalZone;
class Space;
class ShadingSurfaceGroup;
class BuildingStory;
class Surface;
class SubSurface;
class ShadingSurface;
} // namespace model
namespace gbxml {
class GBXML_API ForwardTranslator
{
public:
ForwardTranslator();
virtual ~ForwardTranslator();
// Save the GbXML to a file
bool modelToGbXML(const openstudio::model::Model& model, const openstudio::path& path, ProgressBar* progressBar = nullptr);
// Return a string representation of the GbXML document
std::string modelToGbXMLString(const openstudio::model::Model& model, ProgressBar* progressBar = nullptr);
/** Get warning messages generated by the last translation. */
std::vector<LogMessage> warnings() const;
/** Get error messages generated by the last translation. */
std::vector<LogMessage> errors() const;
private:
std::string escapeName(const std::string& name);
// listed in translation order
bool translateModel(const openstudio::model::Model& model, pugi::xml_document& document);
// Facility and Building could not be explicitly instantiated in the model, but the functions still need to be called so that Spaces and surfaces
// are translated. Facility and Building both are UniqueModelObjects, so passing model here as an argument is harmless
boost::optional<pugi::xml_node> translateFacility(const openstudio::model::Model& model, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateBuilding(const openstudio::model::Model& model, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateSpace(const openstudio::model::Space& space, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateShadingSurfaceGroup(const openstudio::model::ShadingSurfaceGroup& shadingSurfaceGroup,
pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateBuildingStory(const openstudio::model::BuildingStory& story, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateSurface(const openstudio::model::Surface& surface, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateSubSurface(const openstudio::model::SubSurface& subSurface,
const openstudio::Transformation& transformation, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateShadingSurface(const openstudio::model::ShadingSurface& shadingSurface, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateThermalZone(const openstudio::model::ThermalZone& thermalZone, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateLayer(const openstudio::model::Material& material, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateMaterial(const openstudio::model::Material& material, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateConstructionBase(const openstudio::model::ConstructionBase& constructionBase, pugi::xml_node& parent);
boost::optional<pugi::xml_node> translateCADObjectId(const openstudio::model::ModelObject& modelObject, pugi::xml_node& parentElement);
std::map<openstudio::Handle, pugi::xml_node> m_translatedObjects;
std::set<openstudio::model::Material, openstudio::IdfObjectImplLess> m_materials;
StringStreamLogSink m_logSink;
ProgressBar* m_progressBar;
REGISTER_LOGGER("openstudio.gbxml.ForwardTranslator");
};
} // namespace gbxml
} // namespace openstudio
#endif // GBXML_FORWARDTRANSLATOR_HPP
|
#!/bin/bash
rm -rf vendor
mkdir vendor
cd vendor
mkdir new-sigs.k8s.io
(cd new-sigs.k8s.io; git clone --depth=1 https://github.com/kubernetes-sigs/cluster-api)
mv new-sigs.k8s.io/cluster-api/vendor/* .
mv new-sigs.k8s.io/* sigs.k8s.io/
rmdir sigs.k8s.io/cluster-api/vendor
# clean up duplicates which result in panics at runtime
rm -rf github.com/golang/glog
rm -rf golang.org/x/net/trace
|
import axios from '@/libs/api.request'
import siteApi from '@/api/site'
let siteSettingsApi = {};
siteSettingsApi.getAll = function(){
return this.getAllWithSiteId(siteApi.getCurrentSiteID())
}
siteSettingsApi.getAllWithSiteId = function(siteId){
return axios.request({
url: 'cms/siteSettings/getMap',
method: 'get',
params: {
siteId: siteId
}
});
}
siteSettingsApi.commonRequest = function(condition){
condition.siteId = siteApi.getCurrentSiteID()
return axios.request({
url: 'cms/siteSettings/commonRequest',
method: 'post',
data: condition
});
}
siteSettingsApi.getById = function(id){
return axios.request({
url: 'cms/siteSettings/get',
method: 'get',
params: {
id: id
}
});
}
siteSettingsApi.addOrUpdate = function(settings){
settings.siteId = siteApi.getCurrentSiteID()
return axios.request({
url: 'cms/siteSettings/addOrUpdate',
method: 'post',
data: settings
});
}
siteSettingsApi.batchAddOrUpdate = function(settingsArray){
let settingsData = {siteId:siteApi.getCurrentSiteID(), settingsList: settingsArray}
return axios.request({
url: 'cms/siteSettings/batchAddOrUpdate',
method: 'post',
data: settingsData
});
}
siteSettingsApi.batchAddOrUpdateWithSiteId = function(siteId,settingsArray){
let settingsData = {siteId:siteId, settingsList: settingsArray}
return axios.request({
url: 'cms/siteSettings/batchAddOrUpdate',
method: 'post',
data: settingsData
});
}
siteSettingsApi.delete = function(id){
return axios.request({
url: 'cms/siteSettings/delete',
method: 'delete',
params: {
id: id
}
});
}
siteSettingsApi.batchDelete = function (ids) {
return axios.request({
url: 'cms/siteSettings/batchDelete',
method: 'delete',
params: {
ids: ids
}
});
}
export default siteSettingsApi;
|
import { configs } from '../config/configs';
import { CoreLintScriptBase } from '../coreLintScriptBase';
export class PrettierFixScript extends CoreLintScriptBase {
get name(): string {
return 'prettier-fix';
}
get description(): string {
return `Fix the project formating using the Prettier rules.`;
}
protected async main() {
const { prettierFix } = require(`${configs.projectRoot}/node_modules/@villedemontreal/lint-config`);
await prettierFix(configs.projectRoot);
}
}
|
import { APP_INITIALIZER, FactoryProvider } from '@angular/core';
import { TestBed } from '@angular/core/testing';
import { RouterTestingModule } from '@angular/router/testing';
import { IncludeStylesModule } from './include-styles.module';
import { INJECT_STYLES_PROVIDER, injectIncludeStyles } from './inject-styles';
describe('styles', () => {
describe('modules', () => {
describe('IncludeStylesModule', () => {
it('should call injectIncludeStyles on startup', () => {
expect((<FactoryProvider>INJECT_STYLES_PROVIDER).useFactory).toBe(
injectIncludeStyles
);
spyOn(
<FactoryProvider>INJECT_STYLES_PROVIDER,
'useFactory'
).and.callThrough();
TestBed.configureTestingModule({
imports: [IncludeStylesModule, RouterTestingModule]
});
TestBed.get(APP_INITIALIZER);
expect(
(<FactoryProvider>INJECT_STYLES_PROVIDER).useFactory
).toHaveBeenCalled();
});
});
});
});
|
<gh_stars>0
import * as express from 'express';
import {DnsBLConfig, FirewallCommons, FirewallConfig} from './firewall.commons';
import * as isIP from 'validator/lib/isIP';
import {CacheEntry, DataCacheModule} from './datacache.module';
import {ServerLogUtils} from './serverlog.utils';
export enum DnsBLCacheEntryState {
OK, BLOCKED, NORESULT
}
export interface DnsBLCacheEntry extends CacheEntry {
ip: string;
ttl: number;
state: DnsBLCacheEntryState;
}
export interface DnsBLQuery {
ip: string;
req: any;
res: any;
_next: any;
alreadyServed: boolean;
timeoutTimer: any;
}
export abstract class GenericDnsBLModule {
private dnsBLResultCache = {};
private queryCache = {};
private redisPrefix = 'dnsblv1_';
constructor(protected app: express.Application, protected firewallConfig: FirewallConfig, protected config: DnsBLConfig,
protected filePathErrorDocs: string, protected cache: DataCacheModule) {
this.configureDnsBLClient();
this.configureMiddleware();
}
protected abstract configureDnsBLClient();
protected abstract callDnsBLClient(query: DnsBLQuery): Promise<DnsBLCacheEntry>;
protected checkResultOfDnsBLClient(query: DnsBLQuery, err, blocked: boolean, details: any): Promise<DnsBLCacheEntry> {
return new Promise<DnsBLCacheEntry>((resolve, reject) => {
this.getCachedResult(query.ip).then(value => {
let potCacheEntry: DnsBLCacheEntry = value;
if (!potCacheEntry) {
potCacheEntry = {
created: Date.now(),
updated: undefined,
details: undefined,
state: undefined,
ttl: undefined,
ip: query.ip,
};
}
potCacheEntry.updated = Date.now();
potCacheEntry.details = details;
if (err) {
// NORESULT
if (err.code === 'ENOTFOUND') {
// not known: OK
potCacheEntry.ttl = (Date.now() + this.config.dnsttl);
potCacheEntry.state = DnsBLCacheEntryState.OK;
} else {
// ERROR
console.error('DnsBLModule: error while reading for query:'
+ ServerLogUtils.sanitizeLogMsg([query.ip, query.req.url].join(' ')), err);
if (potCacheEntry.state !== DnsBLCacheEntryState.BLOCKED) {
potCacheEntry.state = DnsBLCacheEntryState.NORESULT;
}
potCacheEntry.ttl = (Date.now() + this.config.errttl);
}
} else if (!blocked) {
// OK
potCacheEntry.ttl = (Date.now() + this.config.dnsttl);
potCacheEntry.state = DnsBLCacheEntryState.OK;
} else {
// BLOCKED
potCacheEntry.ttl = (Date.now() + this.config.dnsttl);
potCacheEntry.state = DnsBLCacheEntryState.BLOCKED;
}
this.putCachedResult(query.ip, potCacheEntry);
this.resolveResult(potCacheEntry, query, this.firewallConfig, this.filePathErrorDocs);
return resolve(potCacheEntry);
});
});
}
protected configureMiddleware() {
const me = this;
me.app.use(function(req, res, _next) {
const ip = req['clientIp'];
// check for valid ip4
if (isIP(ip, '6')) {
return _next();
}
if (!isIP(ip, '4')) {
console.warn('DnsBLModule: BLOCKED invalid IP:' + ServerLogUtils.sanitizeLogMsg(ip) +
' URL:' + ServerLogUtils.sanitizeLogMsg(req.url));
return FirewallCommons.resolveBlocked(req, res, me.firewallConfig, me.filePathErrorDocs);
}
// check for dnsbl
me.getCachedResult(ip).then(value => {
const cacheEntry: DnsBLCacheEntry = value;
const query = me.createQuery(ip, req, res, _next);
// already cached
if (me.isCacheEntryValid(cacheEntry)) {
return me.resolveResult(cacheEntry, query, me.firewallConfig, me.filePathErrorDocs);
}
// whitelisted
if (me.isWhitelisted(ip)) {
return _next();
}
// same query running
let promise = me.getCachedQuery(ip);
if (promise) {
promise.then(function(parentCacheEntry: DnsBLCacheEntry) {
return me.resolveResult(parentCacheEntry, query, me.firewallConfig, me.filePathErrorDocs);
});
return;
}
// do new query
promise = me.callDnsBLClient(query);
me.putCachedQuery(ip, promise);
});
});
}
protected resolveResult(cacheEntry: DnsBLCacheEntry, query: DnsBLQuery, firewallConfig: FirewallConfig, filePathErrorDocs: string) {
// remove from queryCache
this.removeCachedQuery(query.ip);
// delete timer
if (query.timeoutTimer) {
clearTimeout(query.timeoutTimer);
}
// ignore if already served
if (query.alreadyServed) {
return;
}
query.alreadyServed = true;
if (cacheEntry.state !== DnsBLCacheEntryState.BLOCKED) {
return query._next();
}
console.warn('DnsBLModule: BLOCKED blacklisted IP:' + ServerLogUtils.sanitizeLogMsg(query.req['clientIp']) +
' URL:' + ServerLogUtils.sanitizeLogMsg(query.req.url));
return FirewallCommons.resolveBlocked(query.req, query.res, firewallConfig, filePathErrorDocs);
}
protected createQuery(ip: string, req, res, _next): DnsBLQuery {
return {
ip: ip,
req: req,
res: res,
_next: _next,
alreadyServed: false,
timeoutTimer: undefined
};
}
protected isCacheEntryValid(cacheEntry: DnsBLCacheEntry): boolean {
return cacheEntry && cacheEntry.ttl >= Date.now();
}
protected isWhitelisted(ip: string): boolean {
return this.config.whitelistIps.indexOf(ip) >= 0;
}
protected getCachedResult(ip: string): Promise<DnsBLCacheEntry> {
return new Promise<DnsBLCacheEntry>((resolve, reject) => {
if (this.dnsBLResultCache[ip]) {
return resolve(this.dnsBLResultCache[ip]);
} else if (this.cache) {
this.cache.get(this.redisPrefix + ip).then(value => {
return resolve(<DnsBLCacheEntry>value);
}).catch(reason => {
console.error('DnsBLModule: cant read cache:', reason);
return resolve();
});
} else {
return resolve();
}
});
}
protected putCachedResult(ip: string, cacheEntry: DnsBLCacheEntry) {
this.dnsBLResultCache[ip] = cacheEntry;
if (this.cache) {
this.cache.set(this.redisPrefix + ip, cacheEntry);
}
}
protected getCachedQuery(ip: string): Promise<DnsBLCacheEntry> {
return this.queryCache[ip];
}
protected putCachedQuery(ip: string, query: Promise<DnsBLCacheEntry>) {
this.queryCache[ip] = query;
}
protected removeCachedQuery(ip: string) {
delete this.queryCache[ip];
}
}
|
#!/usr/bin/env bash
#
# returns Ansible Version found in ${DOCKERFILE}
#
DOCKERFILE="$1"
if [ "${DOCKERFILE}" == "" ]
then
echo "usage: $0 /path/to/dockerfile"
else
sed -ne 's/^.*ANSIBLE_VERSION="\([^"]\+\)"$/\1/p' "${DOCKERFILE}"
fi
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
def load_data(rootdir='./'):
print('load data \n')
x_train = np.loadtxt(rootdir + 'x_train.txt', dtype=str).astype(float)
y_train = np.loadtxt(rootdir + 'y_train.txt', dtype=str).astype(int)
print('x_train: [%d, %d], y_train:[%d,]' % (
x_train.shape[0], x_train.shape[1], y_train.shape[0]))
return x_train, y_train
|
#!/bin/bash
python3 autoreg_main.py >> log.txt &
if [ $(uname) == "Darwin" ]
then
caffeinate -w $(ps -ef | grep autoreg_main.py | grep -v grep | awk '{print $2}') &
echo "caffeinated"
fi
|
import afterEvent from '../index';
interface EventCallback {
(e?: Event): void;
}
interface EventInfo {
eventName: string;
callback: EventCallback;
}
class EventTarget {
eventList: EventInfo[];
constructor() {
this.eventList = [];
}
resetEventList() {
this.eventList = [];
}
addEventListener(eventName: string, callback: EventCallback) {
this.eventList.push({
eventName: eventName,
callback: callback
});
}
removeEventListener(eventName: string, callback: EventCallback) {
for (let i = 0; i < this.eventList.length; i++) {
if (
this.eventList[i].eventName == eventName &&
this.eventList[i].callback == callback
) {
this.eventList.splice(i, 1);
break;
}
}
}
dispatchEvent(e: any) {
this.eventList.forEach(
event => event.eventName === e.type && event.callback()
);
return true;
}
}
class Event {
constructor(type: string) {
return {
type
};
}
}
const window = new EventTarget();
beforeEach(() => {
window.resetEventList();
});
it('should accept the second parameter', () => {
afterEvent('SOME_EVENT2', window);
expect(window.eventList.length).toBe(1);
});
it('should cleanup if event has triggered', () => {
afterEvent('SOME_EVENT3', window);
expect(window.eventList.length).toBe(1);
window.dispatchEvent(new Event('SOME_EVENT3'));
expect(window.eventList.length).toBe(0);
});
it('should work fine when async usage', async () => {
const e = new Event('SOME_EVENT4');
expect(afterEvent('SOME_EVENT4', window)).resolves.toEqual(e);
window.dispatchEvent(e);
});
|
#!/bin/bash
python ../adoc_utils/adoc-link-check.py .
|
<reponame>GiantLuigi4/origins-architectury
package io.github.apace100.origins.access;
import dev.architectury.injectables.annotations.ExpectPlatform;
import net.minecraft.block.EntityShapeContext;
import net.minecraft.entity.Entity;
public class EntityShapeContextAccessor {
@ExpectPlatform
public static Entity getEntity(EntityShapeContext context) {
throw new AssertionError();
}
}
|
<reponame>feeedback/hexlet_professions_backend
/* eslint-disable import/prefer-default-export */
// sc:
// https://ru.hexlet.io/courses/js-asynchronous-programming/lessons/parallel-execution/exercise_unit
// info.js
// ะ ะตะฐะปะธะทัะนัะต ะธ ัะบัะฟะพััะธััะนัะต ะฐัะธะฝั
ัะพะฝะฝัั ััะฝะบัะธั getDirectorySize, ะบะพัะพัะฐั ััะธัะฐะตั ัะฐะทะผะตั
// ะฟะตัะตะดะฐะฝะฝะพะน ะดะธัะตะบัะพัะธะธ (ะฝะต ะฒะบะปััะฐั ะฟะพะดะดะธัะตะบัะพัะธะธ). ะะฝะฐะปะธะท ัะฐะทะผะตัะฐ ัะฐะนะปะฐ ะดะพะปะถะตะฝ
// ะฟัะพะธัั
ะพะดะธัั ะฟะฐัะฐะปะปะตะปัะฝะพ, ะดะปั ััะพะณะพ ะฒะพัะฟะพะปัะทัะนัะตัั ะฑะธะฑะปะธะพัะตะบะพะน async
// import { getDirectorySize } from './info.js';
// getDirectorySize('/usr/local/bin', (err, size) => {
// console.log(size);
// });
// ะะพะดัะบะฐะทะบะฐ
// fs.readdir - ััะตะฝะธะต ัะพะดะตัะถะธะผะพะณะพ ะดะธัะตะบัะพัะธะธ
// path.join - ะบะพะฝััััะธััะตั ะฟััะธ
// async.map
// fs.stat - ะธะฝัะพัะผะฐัะธั ะพ ัะฐะนะปะต
// _.sumBy - ะฝะฐั
ะพะถะดะตะฝะธะต ััะผะผั ะฒ ะผะฐััะธะฒะต
// BEGIN (write your solution here)
export const getDirectorySize = (dirpath, callback) => {
fs.readdir(dirpath, (error1, names) => {
if (error1) {
callback(error1);
return;
}
const files = names.map((name) => path.join(dirpath, name));
async.map(files, fs.stat, (error2, stats) => {
if (error2) {
callback(error2);
return;
}
const onlyFilesStats = stats.filter((stat) => stat.isFile());
callback(null, _.sumBy(onlyFilesStats, 'size'));
});
});
};
// END
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/viewbookmark/config/v1/JAXBVector3d.java
package io.opensphere.core.viewbookmark.config.v1;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import io.opensphere.core.math.Vector3d;
import io.opensphere.core.util.Utilities;
/**
* The Class JAXBVector3d.
*/
@XmlRootElement(name = "Vector3D")
@XmlAccessorType(XmlAccessType.FIELD)
public class JAXBVector3d
{
/** The X. */
@XmlAttribute(name = "x")
private double myX;
/** The Y. */
@XmlAttribute(name = "y")
private double myY;
/** The Z. */
@XmlAttribute(name = "z")
private double myZ;
/**
* Instantiates a new jAXB vector3d.
*/
public JAXBVector3d()
{
}
/**
* Instantiates a new jAXB vector3d.
*
* @param x the x
* @param y the y
* @param z the z
*/
public JAXBVector3d(double x, double y, double z)
{
myX = x;
myY = y;
myZ = z;
}
/**
* Instantiates a new jAXB vector3d.
*
* @param vec the {@link Vector3d}
*/
public JAXBVector3d(Vector3d vec)
{
myX = vec.getX();
myY = vec.getY();
myZ = vec.getZ();
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null || getClass() != obj.getClass())
{
return false;
}
JAXBVector3d other = (JAXBVector3d)obj;
return Utilities.equalsOrBothNaN(myX, other.myX) && Utilities.equalsOrBothNaN(myY, other.myY)
&& Utilities.equalsOrBothNaN(myZ, other.myZ);
}
/**
* Gets the vector3d.
*
* @return the vector3d
*/
public Vector3d getVector3d()
{
return new Vector3d(myX, myY, myZ);
}
/**
* Gets the x.
*
* @return the x
*/
public final double getX()
{
return myX;
}
/**
* Gets the y.
*
* @return the y
*/
public final double getY()
{
return myY;
}
/**
* Gets the z.
*
* @return the z
*/
public final double getZ()
{
return myZ;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(myX);
result = prime * result + (int)(temp ^ temp >>> 32);
temp = Double.doubleToLongBits(myY);
result = prime * result + (int)(temp ^ temp >>> 32);
temp = Double.doubleToLongBits(myZ);
result = prime * result + (int)(temp ^ temp >>> 32);
return result;
}
/**
* Sets the x.
*
* @param x the new x
*/
public final void setX(double x)
{
myX = x;
}
/**
* Sets the y.
*
* @param y the new y
*/
public final void setY(double y)
{
myY = y;
}
/**
* Sets the z.
*
* @param z the new z
*/
public final void setZ(double z)
{
myZ = z;
}
}
|
package com.utn.ejercicio3;
import java.util.UUID;
public class Titular {
private UUID uuid;
private String nombre;
private char genero;
public Titular(String nombre, char genero) {
this.uuid = UUID.randomUUID();
this.nombre = nombre;
this.genero = genero;
}
public UUID getUuid() {
return uuid;
}
public String getNombre() {
return nombre;
}
public char getGenero() {
return genero;
}
@Override
public String toString() {
return "Titular{" +
"uuid=" + uuid +
", nombre='" + nombre + '\'' +
", genero=" + genero +
'}';
}
}
|
<gh_stars>10-100
MYOB_BASE_URL = 'https://api.myob.com/accountright/'
MYOB_PARTNER_BASE_URL = 'https://secure.myob.com/oauth2/'
AUTHORIZE_URL = 'account/authorize/'
ACCESS_TOKEN_URL = 'v1/authorize/'
DEFAULT_PAGE_SIZE = 400
# Format in which MYOB returns datetimes
# (pymyob won't parse these, but offers the constant for convenience).
DATETIME_FORMATS = ['YYYY-MM-DDTHH:mm:ss', 'YYYY-MM-DDTHH:mm:ss.SSS']
|
#import relevant libraries
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
#define customer data
customer_data = {
'Age': [34],
'Gender': ['Male'],
'Homeowner': [1],
'Education': [1],
'Marital Status': [1],
'Employment': [2],
'Income': [50000],
}
#convert to a dataframe
customer_data_df = pd.DataFrame(customer_data, columns=['Age', 'Gender', 'Homeowner', 'Education', 'Marital Status', 'Employment', 'Income'])
#load data
data = pd.read_csv('data.csv')
#create the features for our model
features = [x for x in data.columns if x not in ['label']]
#split into data and labels
X = data[features]
y = data['label']
#split into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
#fit a random forest model
model = RandomForestClassifier(n_estimators=100)
model.fit(X_train, y_train)
#get the customer's prediction
prediction = model.predict(customer_data_df)[0]
if prediction == 0:
print("Customer is not likely to buy the product.")
else:
print("Customer is likely to buy the product.") |
<reponame>tsmvision/spring-security-examples
package com.example.corespringsecurity.security.handler;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.web.access.AccessDeniedHandler;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@Configuration
public class CustomAccessDeniedHandler implements AccessDeniedHandler {
@Value("${errors.error-page-url}")
private String errorPage;
@Override
public void handle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, AccessDeniedException e) throws IOException, ServletException {
String deniedUrl = errorPage + "?exception=" + e.getMessage();
httpServletResponse.sendRedirect(deniedUrl);
}
}
|
<gh_stars>0
@javax.xml.bind.annotation.XmlSchema(namespace = "http://thomas-bayer.com/blz/", elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED)
package com.thomas_bayer.blz;
|
#!/usr/bin/env bash
# Download pfsense+ firmware from site.
# Requires cookies to be valid first so a browswer login and fetch of the cookie is required.
# Setting version allows for downloading only current version.
USER_COOKIE=""
CURRENT_VERISON="21.05.1"
DL_CHECKSUM=1 # Set to 1 to download .sha256 checksum
VERIFY_CHECKSUM=0 # Set to 0 to verify checksum.
CURL_OPTS="--compressed --progress-bar -O"
function verify_sha256() {
# Verify SHA256 signature Filename should not have the .sha256 suffix
filename=$1
checksum=$(sha256sum --check "${filename}.sha256")
# TODO colorize response. Check if failed/success
echo $checksum
}
function download() {
# SG-1000
URL[6]="https://partnervault.netgate.com/files/firmware/sg-1000/pfSense-plus-SG-1000-recover-${DOWNLOAD_VERISON}-RELEASE-armv7.img.gz"
# SG-1100
URL[6]="https://partnervault.netgate.com/files/firmware/sg-1100/pfSense-plus-SG-1100-recover-${DOWNLOAD_VERISON}-RELEASE-armv7.img.gz"
# SG-2100
URL[5]="https://partnervault.netgate.com/files/firmware/sg-2100/pfSense-plus-SG-2100-recovery-${DOWNLOAD_VERISON}-RELEASE-aarch64.img.gz"
# SG-3100
URL[4]="https://partnervault.netgate.com/files/firmware/sg-3100/pfSense-plus-SG-3100-recover-${DOWNLOAD_VERISON}-RELEASE-armv7.img.gz"
# memstick
URL[0]="https://partnervault.netgate.com/files/firmware/memstick/pfSense-plus-memstick-${DOWNLOAD_VERISON}-RELEASE-amd64.img.gz"
URL[1]="https://partnervault.netgate.com/files/firmware/memstick/pfSense-plus-memstick-ADI-${DOWNLOAD_VERISON}-RELEASE-amd64.img.gz"
URL[2]="https://partnervault.netgate.com/files/firmware/memstick/pfSense-plus-memstick-XG-7100-${DOWNLOAD_VERISON}-RELEASE-amd64.img.gz"
URL[3]="https://partnervault.netgate.com/files/firmware/memstick/pfSense-plus-memstick-serial-${DOWNLOAD_VERISON}-RELEASE-amd64.img.gz"
# Download files.
for url in "${URL[@]}"
do
filename=$(basename "${url}")
echo "Downloading ${url} => ${filename}"
curl "${url}" \
-H 'Connection: keep-alive' \
-H 'Upgrade-Insecure-Requests: 1' \
-H "Cookie: ${COOKIE}" \
${CURL_OPTS}
if [ "$DL_CHECKSUM" -eq "1" ]; then
sha_url="${url}.sha256"
filename=$(basename "${sha_url}")
echo "Downloading checksum ${filename} "
curl "${sha_url}" \
-H 'Connection: keep-alive' \
-H 'Upgrade-Insecure-Requests: 1' \
-H "Cookie: ${COOKIE}" \
${CURL_OPTS}
fi
done
}
read -p "Enter the pfsense+ version to download [${CURRENT_VERISON}]: " DOWNLOAD_VERISON
DOWNLOAD_VERISON=${DOWNLOAD_VERISON:-${CURRENT_VERISON}}
read -p "Paste your user cookie, or enter to use default: " COOKIE
COOKIE=${COOKIE:-${USER_COOKIE}}
download
echo "Done. Have a nice day!"
|
#!/usr/bin/env bash
git submodule init
if [ $? != 0 ]; then
printf "Initialized submodule failure"
exit 1
fi
git submodule update
if [ $? != 0 ]; then
printf "submodule update failure"
exit 1
fi
npm install
if [ $? != 0 ]; then
printf "npm install failure"
exit 1
fi
cd ace
npm install
if [ $? != 0 ]; then
printf "npm install failure"
exit 1
fi
node Makefile.dryice.js full --target ../ace-builds
if [ $? != 0 ]; then
printf "ace build failure"
exit 1
fi
|
<gh_stars>1-10
package desarrollomobile.tiendadeclases.tiendadeclases.Adapters;
import java.util.ArrayList;
import java.util.List;
import desarrollomobile.tiendadeclases.tiendadeclases.Service.SubCategorias;
public interface CategoriaListener {
void onItemClick(ArrayList<SubCategorias> subcat);
} |
const express = require('express');
const app = express();
const http = require('http').Server(app);
const io = require('socket.io')(http);
app.get('/', (req, res) => {
res.send('Chat Server is running on port 3000');
});
io.on('connection', socket => {
console.log('user connected');
socket.on('change_username', data => {
socket.username = data.username
});
socket.on('new_message', data => {
io.sockets.emit('new_message', {message : data.message, username : socket.username});
});
socket.on('typing', data => {
socket.broadcast.emit('typing', {username : socket.username})
});
socket.on('stop_typing', () => {
socket.broadcast.emit('stop_typing', {username : socket.username});
});
socket.on('disconnect', () => {
console.log('user disconnected');
});
});$
http.listen(3000, () => {
console.log('listening on *:3000');
}); |
#include<iostream>
using namespace std;
int main()
{
int array[100], n, c;
cout << "Enter the number of elements to be stored in the array: ";
cin >> n;
cout << "Enter " << n << " elements:" << endl;
for (c = 0; c < n; c++)
cin >> array[c];
cout << "You entered:" << endl;
for (c = 0; c < n; c++)
cout << array[c] << endl;
return 0;
} |
#!/usr/bin/with-contenv ash
source homecentr_print_banner
source homecentr_print_context
homecentr_print_banner
homecentr_print_context |
package net.imglib2.type.label;
import org.junit.Assert;
public class VolatileLabelMultisetArrayTest {
public static void assertVolatileLabelMultisetArrayEquality(VolatileLabelMultisetArray first, VolatileLabelMultisetArray second) {
Assert.assertArrayEquals(": ArgMax was not equal!", first.argMaxCopy(), second.argMaxCopy());
/* This is necessary, since the size of the array may be larger than the actual size of the data, due to how the array is resized when more space is needed. */
assert first.getListDataUsedSizeInBytes() == second.getListDataUsedSizeInBytes();
for (int i = 0; i < first.getListDataUsedSizeInBytes() / Long.BYTES; i++) {
assert first.getListData().data[i] == second.getListData().data[i];
}
Assert.assertArrayEquals(": List Data was not equal!", first.getCurrentStorageArray(), second.getCurrentStorageArray());
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.repo.isa_dom.dataimport;
import ed.biodare2.backend.repo.isa_dom.dataimport.TimeColumnProperties;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import ed.biodare2.backend.repo.isa_dom.DomRepoTestBuilder;
import java.io.IOException;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.Before;
/**
*
* @author tzielins
*/
public class TimeColumnPropertiesTest {
public TimeColumnPropertiesTest() {
}
ObjectMapper mapper;
@Before
public void setUp() {
mapper = new ObjectMapper();
mapper.findAndRegisterModules();
}
@After
public void tearDown() {
}
@Test
public void serializesToJSONAndBack() throws JsonProcessingException, IOException {
TimeColumnProperties org = DomRepoTestBuilder.makeTimeColumnProperties();
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
String json = mapper.writeValueAsString(org);
assertNotNull(json);
System.out.println("TimeColumnProperties JSON:\n\n"+json+"\n");
TimeColumnProperties cpy = mapper.readValue(json, TimeColumnProperties.class);
assertEquals(org.firstRow,cpy.firstRow);
assertEquals(org.timeOffset,cpy.timeOffset,1E-6);
assertEquals(org.imgInterval,cpy.imgInterval,1E-6);
assertEquals(org.timeType,cpy.timeType);
assertEquals(org,cpy);
}
}
|
/// <reference types="@nervosnetwork/ckb-types" />
import { Injectable, HttpService } from '@nestjs/common';
import * as _ from 'lodash';
import * as Types from '../types';
import { BlockService } from '../block/block.service';
import { AddressService } from '../address/address.service';
import { bigintStrToNum } from '../util/number';
import * as ckbUtils from '@nervosnetwork/ckb-sdk-utils';
import { CKB_TOKEN_DECIMALS, EMPTY_TX_HASH } from '../util/constant';
import { CellRepository } from './cell.repository';
import { ApiException } from '../exception/api.exception';
import { ApiCode } from '../util/apiCode.enums';
import * as utils from '@nervosnetwork/ckb-sdk-utils';
import { Not } from 'typeorm';
import { CkbService } from '../ckb/ckb.service';
import {
ReadableCell,
NewTx,
TxHistory,
BlockNumberAndTxHash,
} from './cell.interface';
import { Cell } from 'src/model/cell.entity';
import { ReturnCapacity } from './interfaces/cell.interface';
@Injectable()
export class CellService {
constructor(
private cellRepository: CellRepository,
private readonly httpService: HttpService,
private readonly blockService: BlockService,
private readonly addressService: AddressService,
private readonly ckbService: CkbService,
) { }
private readonly ckb = this.ckbService.getCKB();
public async create(cell: Cell): Promise<Cell> {
return await this.cellRepository.save(cell);
}
public async getBalanceByAddress(address: string): Promise<number> {
const queryObj = {
address,
status: 'live',
};
const liveCells = await this.cellRepository.find(queryObj);
if (liveCells.length === 0) return 0;
const result = liveCells.reduce((pre, cur, index, arr) => {
return pre + Number(cur.capacity);
}, 0);
return result;
}
public getReturnCells(unspentCells) {
const newUnspentCells = [];
for (const cell of unspentCells) {
const dataLength = ckbUtils.hexToBytes(cell.outputData).length;
const newCell = {
blockHash: cell.blockHash,
lock: {
codeHash: cell.lockCodeHash,
hashType: cell.lockHashType,
args: cell.lockArgs,
},
lockHash: cell.lockHash,
outPoint: {
txHash: cell.txHash,
index: cell.index,
},
outputData: cell.outputData,
outputDataLen: '0x' + dataLength.toString(16),
capacity: '0x' + bigintStrToNum(cell.capacity.toString()).toString(16),
type: {
codeHash: cell.typeCodeHash,
hashType: cell.typeHashType,
args: cell.typeArgs,
},
typeHash: cell.typeHash,
dataHash: cell.outputDataHash,
status: cell.status,
};
newUnspentCells.push(newCell);
}
return newUnspentCells;
}
public async getUnspentCells(params: Types.UnspentCellsParams) {
console.time('getUnspentCells');
const { lockHash, typeHash, capacity, hasData, limit } = params;
const queryObj: Types.UnspentCellsQuery = {
lockHash,
status: 'live',
};
let typeHashisNullFlg = false;
if (!_.isEmpty(typeHash)) {
if (typeHash === 'isNull') {
typeHashisNullFlg = true;
} else {
queryObj.typeHash = typeHash;
}
}
if (hasData === 'true') {
queryObj.outputData = Not('0x'); // TODO
} else if (hasData === 'false') {
queryObj.outputData = '0x';
}
// 1 - limit
if (limit !== undefined) {
const cells = await this.cellRepository.queryByQueryObjAndStepPage(
queryObj,
parseInt(limit, 10),
0,
typeHashisNullFlg,
);
const unspentCells = await cells.getMany();
return this.getReturnCells(unspentCells) || [];
}
// 2- capacity
if (capacity === undefined) {
return [];
}
const fakeFee = 1 * CKB_TOKEN_DECIMALS;
const ckbcapacity = parseInt(capacity);
const _totalcapacity = await this.addressService.getAddressInfo(lockHash);
const totalcapacity = BigInt(_totalcapacity.capacity);
if (totalcapacity < ckbcapacity) {
// ไฝ้ขไธ่ถณ
// throw new ServiceError('lack of capacity', '1001')
throw new ApiException('lack of capacity', ApiCode.LACK_OF_CAPACITY, 200);
}
const sendCapactity = ckbcapacity + 61 * CKB_TOKEN_DECIMALS + fakeFee;
const cell = await this.cellRepository.queryByQueryObjAndCapacity(
queryObj,
sendCapactity,
);
let unspentCells = [];
if (cell) {
unspentCells.push(cell);
} else {
let sumCapacity = 0;
let page = 0;
const step = 50;
let newcells = [];
do {
const cells = await this.cellRepository.queryByQueryObjAndStepPage(
queryObj,
step,
page,
typeHashisNullFlg,
);
newcells = await cells.getMany();
unspentCells = _.concat(unspentCells, newcells);
sumCapacity = unspentCells.reduce((pre, cur) => {
return pre + BigInt(cur.capacity);
}, BigInt(0));
page += 1;
} while (sumCapacity < sendCapactity && newcells.length > 0);
}
if (unspentCells.length === 0) {
return [];
}
console.timeEnd('getUnspentCells');
return this.getReturnCells(unspentCells);
}
public async getCellsByLockHashAndTypeScripts(
lockHash,
typeScripts: CKBComponents.Script[],
) {
const result = {};
if (
typeScripts === undefined ||
_.isEmpty(typeScripts) ||
typeScripts.length === 0
) {
const findObj = { lockHash, status: 'live' };
const cells = await this.cellRepository.find(findObj);
if (_.isEmpty(cells)) {
return [];
}
const udts = [];
for (const cell of cells) {
if (cell.outputData !== '0x') {
if (cell.typeCodeHash === undefined || cell.typeCodeHash === null) {
const udt = {
typeHash: null,
capacity: cell.capacity,
outputdata: cell.outputData,
type: null,
};
udts.push(udt);
} else {
const typeScript: CKBComponents.Script = {
args: cell.typeArgs,
codeHash: cell.typeCodeHash,
hashType: cell.typeHashType as CKBComponents.ScriptHashType,
};
const typeScriptHash = utils.scriptToHash(typeScript);
const udt = {
typeHash: typeScriptHash,
capacity: cell.capacity,
outputdata: cell.outputData,
type: typeScript,
};
udts.push(udt);
}
}
}
result['udts'] = udts;
} else {
for (const typeScript of typeScripts) {
const cells = await this.cellRepository.queryCellsByLockHashAndTypeScript(
lockHash,
typeScript.hashType,
typeScript.codeHash,
typeScript.args,
);
if (_.isEmpty(cells)) {
return [];
}
const udts = [];
for (const cell of cells) {
const typeScript: CKBComponents.Script = {
args: cell.typeArgs,
codeHash: cell.typeCodeHash,
hashType: cell.typeHashType as CKBComponents.ScriptHashType,
};
const typeScriptHash = utils.scriptToHash(typeScript);
const udt = {
typeHash: typeScriptHash,
capacity: cell.capacity,
outputdata: cell.outputData,
type: typeScript,
txHash: cell.txHash,
index: cell.index,
};
udts.push(udt);
}
result['udts'] = udts;
}
}
const freeCells = await this.cellRepository.queryFreeCellsByLockHash(
lockHash,
);
function getTotalCapity(total, cell) {
return BigInt(total) + BigInt(cell.capacity);
}
const totalFreeCapity = freeCells.reduce(getTotalCapity, 0);
result['capacity'] = totalFreeCapity.toString();
return result;
}
public async getCellsByLockHashAndTypeHashes(lockHash, typeHashes: string[]) {
const result = {};
if (typeHashes === undefined || typeHashes.length === 0) {
const findObj = { lockHash, status: 'live' };
const cells = await this.cellRepository.find(findObj);
if (_.isEmpty(cells)) {
return [];
}
const udts = [];
for (const cell of cells) {
if (cell.outputData !== '0x') {
if (cell.typeCodeHash === undefined || cell.typeCodeHash === null) {
const udt = {
typeHash: null,
capacity: cell.capacity,
outputdata: cell.outputData,
type: null,
};
udts.push(udt);
} else {
const typeScript: CKBComponents.Script = {
args: cell.typeArgs,
codeHash: cell.typeCodeHash,
hashType: cell.typeHashType as CKBComponents.ScriptHashType,
};
const typeScriptHash = utils.scriptToHash(typeScript);
const udt = {
typeHash: typeScriptHash,
capacity: cell.capacity,
outputdata: cell.outputData,
type: typeScript,
txHash: cell.txHash,
index: cell.index,
};
udts.push(udt);
}
}
}
result['udts'] = udts;
} else {
const cells = await this.cellRepository.queryCellsByLockHashAndTypeHashes(
lockHash,
typeHashes,
);
if (_.isEmpty(cells)) {
return [];
}
const udts = [];
for (const cell of cells) {
const typeScript: CKBComponents.Script = {
args: cell.typeArgs,
codeHash: cell.typeCodeHash,
hashType: cell.typeHashType as CKBComponents.ScriptHashType,
};
const typeScriptHash = utils.scriptToHash(typeScript);
const udt = {
typeHash: typeScriptHash,
capacity: cell.capacity,
outputdata: cell.outputData,
type: typeScript,
};
udts.push(udt);
}
result['udts'] = udts;
}
const freeCells = await this.cellRepository.queryFreeCellsByLockHash(
lockHash,
);
function getTotalCapity(total, cell) {
return BigInt(total) + BigInt(cell.capacity);
}
const totalFreeCapity = freeCells.reduce(getTotalCapity, 0);
result['capacity'] = totalFreeCapity.toString();
return result;
}
public async getCapacity(lockHash): Promise<ReturnCapacity> {
const capacity = await this.addressService.getAddressInfo(lockHash);
const findObj = { lockHash, status: 'live', outputData: '0x' };
const totalCapacity = await this.cellRepository.queryEmptyCapacity(findObj);
const returnCapacity = {
capacity: capacity.capacity,
emptyCapacity: totalCapacity.totalCapacity,
};
return returnCapacity;
}
async parseBlockTxs(txs: TxHistory[]) {
const newTxs = [];
for (const tx of txs) {
const newTx: Partial<NewTx> = {};
newTx.hash = tx.txHash;
if (tx.blockNumber) {
newTx.blockNum = Number(tx.blockNumber);
const header = await this.ckb.rpc.getHeaderByNumber(
BigInt(tx.blockNumber),
);
if (!header) continue;
newTx.timestamp = parseInt(header.timestamp, 16);
}
const txObj = await this.ckb.rpc.getTransaction(tx.txHash);
const { outputs, inputs, outputsData } = txObj.transaction;
const newInputs = [];
for (const input of inputs) {
const befTxHash = input.previousOutput.txHash;
if (befTxHash !== EMPTY_TX_HASH) {
// 0x000......00000 ๆฏๅบๅๅฅๅฑ๏ผinputsไธบ็ฉบ๏ผcellbase
const befIndex = input.previousOutput.index;
const inputTxObj = await this.ckb.rpc.getTransaction(befTxHash);
const inputTx = inputTxObj.transaction;
const output = inputTx.outputs[parseInt(befIndex, 16)];
const outputData = inputTx.outputsData[parseInt(befIndex, 16)];
const newInput = this.getReadableCell(output, outputData);
if (newInput.typeHash !== null) {
newTx.typeHash = newInput.typeHash;
}
newInputs.push(newInput);
}
}
newTx.inputs = newInputs;
const newOutputs = [];
for (let i = 0; i < outputs.length; i++) {
const output = outputs[i];
const outputData = outputsData[i];
const newOutput = this.getReadableCell(output, outputData);
if (newOutput.typeHash !== null) {
newTx.typeHash = newOutput.typeHash;
}
newOutputs.push(newOutput);
}
newTx.outputs = newOutputs;
newTxs.push(newTx);
}
return newTxs;
}
async getTxDetails(blockTxs, lockHash) {
for (const tx of blockTxs) {
const inSum = tx.inputs.reduce((prev, next) => prev + next.capacity, 0);
const outSum = tx.outputs.reduce((prev, next) => prev + next.capacity, 0);
const fee = inSum - outSum;
tx.fee = fee < 0 ? 0 : fee; // handle cellBase condition
tx.amount = 0;
// inputs outputs filter
const inputCells = _.filter(tx.inputs, function (input) {
return input.lockHash === lockHash;
});
const outputCells = _.filter(tx.outputs, function (output) {
return output.lockHash === lockHash;
});
// 1-
if (!_.isEmpty(inputCells) && _.isEmpty(outputCells)) {
tx.income = false; // ๅบ่ดฆ
tx.amount = inputCells.reduce((prev, next) => prev + next.capacity, 0);
tx.sudt = inputCells.reduce((prev, next) => prev + next.sudt, 0);
}
// 2-
if (_.isEmpty(inputCells) && !_.isEmpty(outputCells)) {
tx.income = true; // ๅ
ฅ่ดฆ
tx.amount = outputCells.reduce((prev, next) => prev + next.capacity, 0);
tx.sudt = outputCells.reduce((prev, next) => prev + next.sudt, 0);
}
let inputAmount = 0;
let outputAmount = 0;
let inputSudt = 0;
let outputSudt = 0;
// 3-
if (!_.isEmpty(inputCells) && !_.isEmpty(outputCells)) {
inputAmount = inputCells.reduce(
(prev, next) => prev + next.capacity,
0,
);
outputAmount = outputCells.reduce(
(prev, next) => prev + next.capacity,
0,
);
if (inputAmount > outputAmount) {
tx.income = false; // ๅบ่ดฆ
tx.amount = inputAmount - outputAmount;
} else {
tx.income = true; // ๅ
ฅ่ดฆ
tx.amount = outputAmount - inputAmount;
}
inputSudt = inputCells.reduce((prev, next) => prev + next.sudt, 0);
outputSudt = outputCells.reduce((prev, next) => prev + next.sudt, 0);
if (inputSudt > outputSudt) {
tx.sudt = inputSudt - outputSudt;
} else {
tx.sudt = outputSudt - inputSudt;
}
}
}
return blockTxs;
}
parseSUDT = (bigEndianHexStr: string) => {
const littleEndianStr = bigEndianHexStr
.replace('0x', '')
.match(/../g)
.reverse()
.join('');
const first128Bit = littleEndianStr.substr(16);
return parseInt(`0x${first128Bit}`, 16);
};
getReadableCell(output, outputData) {
let typeHash = null;
let sudt = 0;
if (output.type !== null) {
typeHash = ckbUtils.scriptToHash(output.type);
sudt = this.parseSUDT(outputData);
}
const result: ReadableCell = {
capacity: parseInt(output.capacity, 16),
lockHash: ckbUtils.scriptToHash(output.lock),
lockCodeHash: output.lock.codeHash,
lockArgs: output.lock.args,
lockHashType: output.lock.hashType,
typeHash: typeHash,
sudt: sudt,
};
return result;
}
async getTransactionsByLockHash(
lockHash: string,
page = 0,
step = 20,
): Promise<BlockNumberAndTxHash[]> {
const cells: Cell[] = await this.cellRepository.queryCellsByLockHashPage(
lockHash,
page,
step,
);
let transactionList: BlockNumberAndTxHash[] = [];
for (const cell of cells) {
const transaction: BlockNumberAndTxHash = {
blockNumber: cell.blockNumber.toString(),
txHash: cell.txHash,
};
transactionList.push(transaction);
transactionList = _.uniqBy(transactionList, 'txHash');
}
return transactionList;
}
public async getTxHistoriesByLockHash(lockHash, page = 0, step = 20) {
const transactionList = await this.getTransactionsByLockHash(
lockHash,
page,
step,
);
const blockTxs = await this.parseBlockTxs(transactionList);
const result = await this.getTxDetails(blockTxs, lockHash);
return result;
}
}
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/icomoon/yahoo.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.yahoo = void 0;
var yahoo = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M8.878 9.203v0c1.759-3.088 4.666-8.125 5.463-9.203-0.35 0.234-0.887 0.353-1.381 0.466l-0.747-0.466c-0.6 1.119-2.813 4.734-4.222 7.050-1.428-2.366-3.119-5.097-4.222-7.050-0.875 0.188-1.237 0.197-2.109 0v0 0c0 0 0 0 0 0v0c1.731 2.606 4.503 7.572 5.447 9.203v0l-0.128 6.797 1.013-0.466v-0.012l1.012 0.478-0.125-6.797z"
}
}]
};
exports.yahoo = yahoo; |
<gh_stars>0
"""
Program to explore stop diagrams on the back of the retina in the Eye class
"""
from poptics.lens import Eye
from poptics.ray import RayPencil,RayPath
from poptics.tio import getFloat,tprint,getUnit3d
from poptics.psf import Psf,SpotDiagram
import matplotlib.pyplot as plt
def main():
lens = Eye()
iris = getFloat("Iris",1.0)
lens.setIris(iris)
u = getUnit3d("Direction",0.0)
vpencil = RayPencil().addBeam(lens,u,key="vl").addMonitor(RayPath())
spencil = RayPencil().addBeam(lens,u,key="array")
vpencil *= lens
spencil *= lens
plane = lens.getRetina()
ps = Psf().setWithRays(spencil,plane)
tprint("PSF is",repr(ps))
plt.subplot(2,1,1)
lens.draw()
vpencil.draw()
plt.axis("equal")
plt.subplot(2,1,2)
spot = SpotDiagram(spencil)
spot.draw(plane,True)
plt.show()
main()
|
#!/bin/bash
# stars25 was not in service despite being in the database
# The old hardware was fried so we reuse the name
# for a new photometer
service tessdb pause ; sleep 1
sqlite3 /var/dbase/tess.db <<EOF
.mode column
.headers on
BEGIN TRANSACTION;
UPDATE tess_t
SET filter = 'UVIR', mac_address = '1A:FE:34:D3:43:A8', zero_point = 20.41
WHERE name = 'stars25';
COMMIT;
EOF
service tessdb resume
|
var socket = io();
var localSearchTerm;
var page = 1;
var updateAndStoreRating = function(movieId, newRating) {
localStorage.setItem(movieId, newRating);
}
var getRatingById = function(movieId) {
return localStorage.getItem(movieId);
}
window.onpopstate = function(event) {
if(event.state) {
page = event.state.page;
$("#loadingSpinner").css('display','block');
socket.emit("updatePage", event.state.page);
}
};
var SearchTerm = React.createClass({
getInitialState: function() {
return {searchTerm : 'Batman'};
},
componentDidMount: function() {
socket.on('updateSearchTerm', this.updateSearchTerm);
},
updateSearchTerm: function(term) {
this.setState({searchTerm : term});
},
render() {
return (
<div>
{this.state.searchTerm}
</div>
);
}
});
ReactDOM.render(
<SearchTerm/>,
document.getElementById('searchTerm')
);
var MovieStarRating = React.createClass({
ratingClickHandler: function(e) {
if($(e.target).hasClass('glyphicon')) {
var clickedRating = $(e.target).attr('id');
var movieId = $(e.target.parentElement).attr('id');
var stars = $(e.target.parentElement).find('.glyphicon');
for(var i=0;i<stars.length;i++) {
if(i < clickedRating) {
$(stars[i]).removeClass('glyphicon-star-empty').addClass('glyphicon-star');
} else {
$(stars[i]).addClass('glyphicon-star-empty').removeClass('glyphicon-star');
}
}
updateAndStoreRating(movieId, clickedRating);
}
},
render: function() {
var starNodes = [];
var numberOfEmptyStars = 5 - this.props.rating;
for(var i=0;i<5;i++) {
if(i < this.props.rating) {
starNodes.push(<span key={i} id={i+1} className="glyphicon glyphicon-star" aria-hidden="true"></span>);
} else {
starNodes.push(<span key={i} id={i+1} className="glyphicon glyphicon-star-empty" aria-hidden="true"></span>);
}
}
return (
<div className="row" id={this.props.movieId} onClick={this.ratingClickHandler}>
{starNodes}
</div>
);
}
});
var MovieResults = React.createClass({
getInitialState: function() {
return {movies : []};
},
componentDidUpdate() {
ReactDOM.findDOMNode(this).scrollTop = 0;
},
componentDidMount: function() {
socket.on('updateResults', this.updateResults);
},
updateResults: function(results) {
$("#loadingSpinner").css('display','none');
this.setState({movies : results});
},
render: function() {
var movieNodes = [];
for(var i=0;i<this.state.movies.length;i++) {
var movie = this.state.movies[i];
var rating = getRatingById(movie.imdbID);
movieNodes.push(
<div className='col-xs-12 col-md-4 movieContainer' key={movie.imdbID}>
<div className='row movieNode'>
<div className='col-xs-4 moviePoster'>
<img className='moviePosterImg' src={movie.Poster}/>
</div>
<div className='col-xs-8 movieInfo'>
<div className='col-xs-12 movieTitleYear'>
<div className='col-xs-12 movieTitle'>
<b>{movie.Title}</b>
</div>
<div className='col-xs-12 movieYear'>
Year: {movie.Year}
</div>
</div>
<div className='col-xs-12 movieRating'>
<MovieStarRating rating={rating} movieId={movie.imdbID}/>
</div>
</div>
</div>
</div>);
}
return (
<div className="row" onClick={this.ratingClickHandler}>
{movieNodes}
</div>
);
}
});
ReactDOM.render(
<MovieResults/>,
document.getElementById('results')
);
var Pagination = React.createClass({
getInitialState: function() {
var stateObj = { page : 1 };
history.pushState(stateObj, "page1", "");
return { numberOfPages : 0,
currentPage : 0};
},
componentDidMount: function() {
socket.on('updatePagination', this.updatePagination);
},
updatePagination: function(pageData) {
this.setState({ numberOfPages : pageData.numberOfPages,
currentPage : pageData.currentPage});
},
pageClickHandler: function(e) {
if($(e.target).hasClass('pageLink')) {
$("#loadingSpinner").css('display','block');
var clickedPage = $(e.target).attr('id');
if(clickedPage == 'Prev') {
page--;
} else {
if(clickedPage == 'Next') {
page++;
} else {
page = clickedPage;
}
}
var stateObj = { page : page };
history.pushState(stateObj, "page" + page, "");
socket.emit("updatePage", page);
}
},
render: function() {
var pageLinks = [];
var start = this.state.currentPage - 5;
if(start < 1) {
start = 1;
}
var end = start + 9;
if(end > this.state.numberOfPages) {
end = this.state.numberOfPages;
}
if(start > 1) {
pageLinks.push(<a className='col-xs-1 pageLink' id='Prev' key='Prev'>Prev</a>);
end--;
}
for (var i = start; i < end + 1; i++) {
if(i == this.state.currentPage) {
pageLinks.push(<a className='col-xs-1 pageLink disabled' id={i} key={i}>{i}</a>);
} else {
pageLinks.push(<a className='col-xs-1 pageLink' id={i} key={i}>{i}</a>);
}
}
if(this.state.numberOfPages > end) {
pageLinks.push(<a className='col-xs-1 pageLink' id='Next' key='Next'>Next</a>);
}
return (
<div className='pagesLinks row' onClick={this.pageClickHandler}>
{pageLinks}
</div>
);
}
});
ReactDOM.render(
<Pagination/>,
document.getElementById('pagination')
);
|
<gh_stars>0
import React from "react"
import StringArgInput from "./components/StringArgInput"
import NumberArgInput from "./components/NumberArgInput"
import FilepathArgInput from "./components/FilepahArgInput"
import { ArgValue, ArgDefinition } from "../../../../types/scripts"
import ArgOptionSelector from "./components/ArgOptionSelector"
interface Props {
definition: ArgDefinition
value: ArgValue
onChange: (value: ArgValue) => void
}
export default function ScriptArgInput({ definition, value, onChange }: Props) {
if (definition.options) {
return (
<ArgOptionSelector
options={definition.options}
value={value}
onChange={onChange}
allowBlank
/>
)
}
switch (definition.type) {
case "string":
return (
<StringArgInput
value={value as string | undefined}
onChange={onChange}
/>
)
case "filepath":
return (
<FilepathArgInput
value={value as string | undefined}
onChange={onChange}
/>
)
case "int":
case "decimal":
return (
<NumberArgInput
value={value as number | undefined}
onChange={onChange}
/>
)
default:
return <></>
}
}
|
<filename>elements/tunnel/gre.h
/* -*- mode: c; c-basic-offset: 4 -*- */
#ifndef GRE_H
#define GRE_H
#include <click/cxxprotect.h>
CLICK_CXX_PROTECT
/*
* our own definitions of GRE headers
* based on a file from one of the BSDs
*/
#define GRE_CP 0x8000 /* Checksum Present */
#define GRE_RP 0x4000 /* Routing Present */
#define GRE_KP 0x2000 /* Key Present */
#define GRE_SP 0x1000 /* Sequence Present */
#define GRE_SS 0x0800 /* Strict Source Route */
#define GRE_VERSION 0x0007 /* Version Number */
struct click_gre {
uint16_t flags; /* See above */
uint16_t protocol; /* Ethernet protocol type */
uint32_t options[3]; /* Optional fields (up to 12 bytes in GRE version 0) */
};
CLICK_CXX_UNPROTECT
#include <click/cxxunprotect.h>
#endif
|
<gh_stars>1-10
/**
*
*/
package net.community.apps.common;
import java.util.Map;
import javax.swing.JFrame;
import javax.swing.UIManager;
import net.community.chest.resources.PropertiesResolver;
import net.community.chest.swing.options.BaseOptionPane;
import net.community.chest.util.logging.LoggerWrapper;
import net.community.chest.util.logging.factory.WrapperFactoryManager;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* <P>A base "main" class for UI applications</P>
* @author <NAME>.
* @since Aug 12, 2008 10:34:42 AM
*/
public abstract class BaseMain implements Runnable {
private static LoggerWrapper _logger /* =null */;
private static final synchronized LoggerWrapper getLogger ()
{
if (null == _logger)
_logger = WrapperFactoryManager.getLogger(BaseMain.class);
return _logger;
}
public static final String resolveStringArg (
final String a, final String[] args, final int numArgs, final int aIndex, final String curVal)
{
if (aIndex >= numArgs)
throw new IllegalArgumentException("Missing " + a + " option argument");
final String v=args[aIndex];
if ((null == v) || (v.length() <= 0))
throw new IllegalArgumentException("Null/empty " + a + " option not allowed");
if ((curVal != null) && (curVal.length() > 0))
throw new IllegalStateException(a + " option argument re-specified (old=" + curVal + "/new=" + v + ")");
return v;
}
/**
* Extracts the <U>next</U> argument from the provided arguments
* @param opt Current parsed option name
* @param procArgs Collected {@link Map} of parsed options so far - key=
* option name, value=option value. If successful, the map is updated to
* contain the extracted value for the parsed option
* @param oIndex Index of the option in the <code>args</code> array
* @param args Available arguments
* @return The index of the option <U>value</U> used
* @throws IllegalArgumentException if no option value or option re-specified
*/
public static final int addExtraArgument (
final String opt, final Map<String,String> procArgs, final int oIndex, final String ... args)
throws IllegalArgumentException
{
final int aIndex=(oIndex + 1), numArgs=(args == null) ? 0 : args.length;
if (aIndex >= numArgs)
throw new IllegalArgumentException("Missing option " + opt + " argument");
final String argVal=args[aIndex];
if ((argVal == null) || (argVal.length() <= 0))
throw new IllegalArgumentException("No value for option " + opt + " argument");
final String prev=procArgs.put(opt, argVal);
if (prev != null)
throw new IllegalArgumentException("Option " + opt + " re-specified");
return aIndex;
}
/**
* Collects all the values up to first non-value or no more arguments
* @param opt Current parsed option name
* @param procArgs Collected {@link Map} of parsed options so far - key=
* option name, value=option value. If successful, the map is updated to
* contain the extracted value for the parsed option
* @param oIndex Index of the option in the <code>args</code> array
* @param optSep The <U>1st</U> character used to detect if an option was encountered
* @param valSep The separator to use to append the collected values - if '\0'
* then values are appended with no separation
* @param args Available arguments
* @return The index of the <U>last</U> option <U>value</U> used
* @throws IllegalArgumentException if no option value or option re-specified
*/
public static final int collectExtraArguments (
final String opt, final Map<String,String> procArgs, final int oIndex,
final char optSep, final char valSep, final String ... args)
{
final int numArgs=(args == null) ? 0 : args.length;
int aIndex=oIndex + 1;
final StringBuilder sb=new StringBuilder(Math.max(numArgs - aIndex, 1) * 32);
for ( ; aIndex < numArgs; aIndex++)
{
final String argVal=args[aIndex];
if ((argVal == null) || (argVal.length() <= 0))
throw new IllegalArgumentException("No value for option " + opt + " argument");
if (argVal.charAt(0) == optSep)
break;
if ((valSep != '\0') && (sb.length() > 0))
sb.append(valSep);
sb.append(argVal);
}
if (sb.length() <= 0)
throw new IllegalArgumentException("Missing option " + opt + " argument(s)");
final String prev=procArgs.put(opt, sb.toString());
if (prev != null)
throw new IllegalArgumentException("Option " + opt + " re-specified");
return aIndex - 1; // index of last value
}
private final String[] _args;
public final String[] getMainArguments ()
{
return _args;
}
/**
* Default property name suffix for building the debug mode full name
*/
public static final String DEBUG_MODE_BASE_NAME="debug.mode";
public String getDebugModePropertyName ()
{
return PropertiesResolver.getClassPropertyName(getClass(), DEBUG_MODE_BASE_NAME);
}
private Boolean _debugMode /* null */;
public synchronized boolean isDebugMode ()
{
if (null == _debugMode)
{
final String propName=getDebugModePropertyName(),
propVal=System.getProperty(propName);
_debugMode =
((propVal != null) && (propVal.length() > 0)) ? Boolean.valueOf(propVal) : Boolean.FALSE;
}
return (null == _debugMode) ? false : _debugMode.booleanValue();
}
public void setDebugMode (boolean modeOn)
{
_debugMode = Boolean.valueOf(modeOn);
}
protected BaseMain (final String ... args)
{
_args = args;
}
/**
* Called by default {@link #run()} implementation
* @return The {@link JFrame} instance to be used as the main UI - may
* NOT be null
* @throws Exception if cannot create the frame
*/
protected abstract JFrame createMainFrameInstance () throws Exception;
private static JFrame _mainFrame;
public static final JFrame getMainFrameInstance ()
{
return _mainFrame;
}
/*
* @see java.lang.Runnable#run()
*/
@Override
public void run ()
{
/* To change dynamically the look and feel AFTER initialization:
*
* UIManager.setLookAndFeel(lnfName);
* SwingUtilities.updateComponentTreeUI(mainFrame);
* mainFrame.pack();
*/
try
{
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
catch(Throwable t)
{
getLogger().error(t.getClass().getName() + " on set look and feel(): " + t.getMessage(), t);
}
try
{
if (null == (_mainFrame=createMainFrameInstance()))
throw new IllegalStateException("No main frame instance created");
_mainFrame.pack();
_mainFrame.setVisible(true);
}
catch(Throwable t)
{
getLogger().error(t.getClass().getName() + " on run(): " + t.getMessage(), t);
BaseOptionPane.showMessageDialog(null, t);
t.printStackTrace(System.err);
System.exit(-2);
}
}
}
|
<filename>src/com/dailymotion/aa2bk/Util.java
package com.dailymotion.aa2bk;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ContentIterator;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import javax.tools.JavaFileObject;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* Created by martin on 2/24/17.
*/
public class Util {
public static void addImportIfNeeded(Project project, PsiFile psiFile, String qualifiedName) {
PsiClass layoutInflaterPsiClass = JavaPsiFacade.getInstance(project).findClass(qualifiedName, GlobalSearchScope.allScope(project));
PsiImportList psiImportList = findElement(psiFile, PsiImportList.class);
for (PsiElement child: psiImportList.getChildren()) {
PsiJavaCodeReferenceElement e = findElement(child, PsiJavaCodeReferenceElement.class);
if (e != null && e.getText().equals(qualifiedName)) {
// we already have the reference, to not add it
return;
}
}
psiImportList.add(JavaPsiFacade.getElementFactory(project).createImportStatement(layoutInflaterPsiClass));
}
public static String getAnnotationName(PsiAnnotation psiAnnotation) {
for (PsiElement child : psiAnnotation.getChildren()) {
if (child instanceof PsiJavaCodeReferenceElement) {
for (PsiElement child2 : child.getChildren()) {
if (child2 instanceof PsiIdentifier) {
return child2.getText();
}
}
}
}
return "?";
}
public static PsiAnnotation getAnnotation(PsiElement element, String name) {
for (PsiElement child : element.getChildren()) {
if (child instanceof PsiModifierList) {
for (PsiElement child2 : child.getChildren()) {
if (child2 instanceof PsiAnnotation) {
if (name.equals(getAnnotationName((PsiAnnotation) child2))) {
return (PsiAnnotation) child2;
}
}
}
}
}
return null;
}
public static String getAnnotationParameter(PsiAnnotation annotation) {
PsiElement psiNameValuePair = Util.findElement(annotation, PsiNameValuePair.class);
if (psiNameValuePair == null) {
return null;
}
return psiNameValuePair.getText();
}
public static <T extends PsiElement> T findElement(PsiElement psiElement, Class<T> clazz) {
List<T> list = findElements(psiElement, clazz);
if (list.size() > 0) {
return list.get(0);
} else {
return null;
}
}
public static <T extends PsiElement> List<T> findElements(PsiElement psiElement, Class<T> clazz) {
final List<T> list = new ArrayList<T>();
psiElement.accept(new PsiRecursiveElementVisitor() {
@Override
public void visitElement(PsiElement element) {
super.visitElement(element);
if (clazz.isInstance(element)) {
list.add((T) element);
}
}
});
return list;
}
interface JavaFileIterator {
void processFile(PsiFile psiFile);
}
public static void traverseFiles(Project project, Set<String> extensions, JavaFileIterator iterator) {
ProjectFileIndex.SERVICE.getInstance(project).iterateContent(fileOrDir -> {
String n = fileOrDir.toString();
int i = n.lastIndexOf('.');
if (i < 0) {
return true;
}
String ext = n.substring(i);
if (extensions.contains(ext)) {
PsiFile psiFile = PsiManager.getInstance(project).findFile(fileOrDir);
if (psiFile != null) {
iterator.processFile(psiFile);
} else {
System.err.println("could not find psiFile for: " + n);
}
}
return true;
});
}
}
|
#!/bin/bash
if pgrep --exact xautolock > /dev/null; then
dunstify "Locker already running";
else
xautolock -locker i3lock-multimonitor-wrapper -time 3 -killtime 10 -killer "xset dpms force off" &
fi
|
unset ERLENV_RELEASE
unset ERLENV_DIR
ERLENV_TEST_DIR="${BATS_TMPDIR}/erlenv"
# guard against executing this block twice due to bats internals
if [ "$ERLENV_ROOT" != "${ERLENV_TEST_DIR}/root" ]; then
export ERLENV_ROOT="${ERLENV_TEST_DIR}/root"
export HOME="${ERLENV_TEST_DIR}/home"
PATH=/usr/bin:/bin:/usr/sbin:/sbin
PATH="${ERLENV_TEST_DIR}/bin:$PATH"
PATH="${BATS_TEST_DIRNAME}/../libexec:$PATH"
PATH="${BATS_TEST_DIRNAME}/libexec:$PATH"
PATH="${ERLENV_ROOT}/shims:$PATH"
export PATH
fi
teardown() {
rm -rf "$ERLENV_TEST_DIR"
}
flunk() {
{ if [ "$#" -eq 0 ]; then cat -
else echo "$@"
fi
} | sed "s:${ERLENV_TEST_DIR}:TEST_DIR:g" >&2
return 1
}
assert_success() {
if [ "$status" -ne 0 ]; then
flunk "command failed with exit status $status"
elif [ "$#" -gt 0 ]; then
assert_output "$1"
fi
}
assert_failure() {
if [ "$status" -eq 0 ]; then
flunk "expected failed exit status"
elif [ "$#" -gt 0 ]; then
assert_output "$1"
fi
}
assert_equal() {
if [ "$1" != "$2" ]; then
{ echo "expected: $1"
echo "actual: $2"
} | flunk
fi
}
assert_output() {
local expected
if [ $# -eq 0 ]; then expected="$(cat -)"
else expected="$1"
fi
assert_equal "$expected" "$output"
}
assert_line() {
if [ "$1" -ge 0 ] 2>/dev/null; then
assert_equal "$2" "${lines[$1]}"
else
local line
for line in "${lines[@]}"; do
if [ "$line" = "$1" ]; then return 0; fi
done
flunk "expected line \`$1'"
fi
}
refute_line() {
if [ "$1" -ge 0 ] 2>/dev/null; then
local num_lines="${#lines[@]}"
if [ "$1" -lt "$num_lines" ]; then
flunk "output has $num_lines lines"
fi
else
local line
for line in "${lines[@]}"; do
if [ "$line" = "$1" ]; then
flunk "expected to not find line \`$line'"
fi
done
fi
}
assert() {
if ! "$@"; then
flunk "failed: $@"
fi
}
|
<reponame>jonathanopie/TestPark
require("./chai.helper");
var domHelper = require("./dom.helper");
describe("text", function () {
var expected = "I am the text for no2";
beforeEach(function () {
domHelper(
"<div id=\"no1\"></div>" +
"<div id=\"no2\">" + expected + "</div>"
);
});
it("should set text inside an element", function () {
var text = "this is a test";
$("#no1").text(text);
document.getElementById("no1").textContent.should.equal(text);
});
it("should get text from inside an element", function () {
$("#no2").text().should.equal(expected);
});
});
|
<gh_stars>10-100
/*******************************************************************************
* Copyright (c) 2016 comtel inc.
*
* Licensed under the Apache License, version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*******************************************************************************/
package org.jfxvnc.net.rfb.render;
import org.jfxvnc.net.rfb.codec.PixelFormat;
import org.jfxvnc.net.rfb.codec.ProtocolVersion;
import org.jfxvnc.net.rfb.codec.security.SecurityType;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
public class DefaultProtocolConfiguration implements ProtocolConfiguration {
private final ObjectProperty<ProtocolVersion> versionProperty = new SimpleObjectProperty<>(ProtocolVersion.RFB_3_8);
private final ObjectProperty<PixelFormat> clientPixelFormatProperty = new SimpleObjectProperty<>(PixelFormat.RGB_888);
private final ObjectProperty<SecurityType> securityProperty = new SimpleObjectProperty<>(SecurityType.VNC_Auth);
private final StringProperty host = new SimpleStringProperty("127.0.0.1");
private final IntegerProperty port = new SimpleIntegerProperty(DEFAULT_PORT);
private final IntegerProperty listeningPort = new SimpleIntegerProperty(DEFAULT_LISTENING_PORT);
private final StringProperty password = new SimpleStringProperty();
private final BooleanProperty shared = new SimpleBooleanProperty(true);
private final BooleanProperty ssl = new SimpleBooleanProperty(false);
private final BooleanProperty rawEnc = new SimpleBooleanProperty(true);
private final BooleanProperty copyRectEnc = new SimpleBooleanProperty(true);
private final BooleanProperty hextileEnc = new SimpleBooleanProperty(false);
private final BooleanProperty zlibEnc = new SimpleBooleanProperty(false);
private final BooleanProperty clientCursor = new SimpleBooleanProperty(false);
private final BooleanProperty desktopSize = new SimpleBooleanProperty(true);
@Override
public StringProperty hostProperty() {
return host;
}
@Override
public IntegerProperty portProperty() {
return port;
}
@Override
public IntegerProperty listeningPortProperty() {
return listeningPort;
}
@Override
public StringProperty passwordProperty() {
return password;
}
@Override
public BooleanProperty sslProperty() {
return ssl;
}
@Override
public ObjectProperty<SecurityType> securityProperty() {
return securityProperty;
}
@Override
public BooleanProperty sharedProperty() {
return shared;
}
@Override
public ObjectProperty<ProtocolVersion> versionProperty() {
return versionProperty;
}
@Override
public ObjectProperty<PixelFormat> clientPixelFormatProperty() {
return clientPixelFormatProperty;
}
@Override
public BooleanProperty rawEncProperty() {
return rawEnc;
}
@Override
public BooleanProperty copyRectEncProperty() {
return copyRectEnc;
}
@Override
public BooleanProperty hextileEncProperty() {
return hextileEnc;
}
@Override
public BooleanProperty clientCursorProperty() {
return clientCursor;
}
@Override
public BooleanProperty desktopSizeProperty() {
return desktopSize;
}
@Override
public BooleanProperty zlibEncProperty() {
return zlibEnc;
}
}
|
#!/bin/bash -eu
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
./autogen.sh
./configure
make V=1 -j$(nproc)
cd libpromises
mv $SRC/string_fuzzer.c .
find . -name "*.o" -exec ar rcs fuzz_lib.a {} \;
$CC $CFLAGS -I./ -c string_fuzzer.c -o string_fuzzer.o
$CC $CXXFLAGS $LIB_FUZZING_ENGINE string_fuzzer.o \
-o $OUT/string_fuzzer fuzz_lib.a \
../libntech/libutils/.libs/libutils.a
|
<filename>src/etl/ETLTransform.py<gh_stars>10-100
import functools
import pyspark.sql.functions as SparkSQLFunctions
from pyspark.sql import DataFrame, SparkSession
from etl import ETL
from etl.ITable import SourceTable, TargetTable, matchEqualityOperator
from etl.meta.MetaModel import MetaModel, MetaResult
# ToDo - Source & Target group aggregations
class Transform:
def __init__(self, targettable, model: MetaModel, sc: SparkSession):
self.model = model
self.spark = sc
self.sourcetables: list[SourceTable] = []
self.targettable = targettable
self.transformquery = ""
self.joindict = {}
self.sourcetablesdf: list[DataFrame] = []
self.targetdf: DataFrame = None
self.targetcolumnslist = []
self.joincolumns = None
self.jointype = None
def genericDfOperation(self, operationFunc):
return operationFunc(self)
DataFrame.genericDfOperation = genericDfOperation
def filterSourceTable(self, srctbl):
srctbls = filter(lambda tbl: tbl.tablename == srctbl, self.sourcetables)
return list(srctbls)
def joinDataframes(self, dict1, dict2):
targetdf: DataFrame = dict1['df'].join(dict2['df'], on=dict2['condition'], how=dict2['jointype'])
return {'df': targetdf}
def mapAggregationFunction(self, fieldname, functionname):
if str(functionname).__eq__('min'):
return SparkSQLFunctions.min(col=SparkSQLFunctions.col(fieldname))
elif str(functionname).__eq__('max'):
return SparkSQLFunctions.max(col=SparkSQLFunctions.col(fieldname))
elif str(functionname).__eq__('count'):
return SparkSQLFunctions.count(col=SparkSQLFunctions.col(fieldname))
elif str(functionname).__eq__('sum'):
return SparkSQLFunctions.sum(col=SparkSQLFunctions.col(fieldname))
elif str(functionname).__eq__('avg'):
return SparkSQLFunctions.avg(col=SparkSQLFunctions.col(fieldname))
def applyJoin(self):
self.query, self.joindict = self.model.joinSQL(self.model.datamodel, 'purchase', 'product', 'store')
joinlist = []
for k in self.joindict.keys():
srctabledf: DataFrame = self.filterSourceTable(k)[0].targetdf
self.joindict[k].update({'df': srctabledf})
joinlist.append(self.joindict[k])
self.targetdf: DataFrame = functools.reduce(self.joinDataframes, joinlist)['df']
def applyFilters(self):
tblinfo: MetaResult = self.model.filterMetaResultBySourceTable(self.sourcetables[0].tablename)
targettable: TargetTable = TargetTable(sourcesystem=tblinfo.src_system, tablename=tblinfo.target_table, pk=[],
database=tblinfo.target_database,
filetype=tblinfo.target_filetype, filepath=tblinfo.target_file_path,
modeltableorder=tblinfo.src_table_order)
for metares in self.model.metaresultlist:
filterexpr = matchEqualityOperator(expression=metares.src_col_filter)
if filterexpr is not None and not filterexpr.__eq__("") and not filterexpr.lower().__eq__('none'):
self.filterclause = f"{self.filterclause} {metares.target_col}{filterexpr}".strip()
self.filterclause = self.filterclause.strip()
if self.filterclause is None:
self.filterclause = ""
targettable.df: DataFrame = self.targetdf.filter(self.filterclause)
def applyGroupAndAggregation(self):
selectlist = []
aggregations = {}
for metares in self.model.filterMetaResultByTargetTable(self.targettable):
if ETL.isNullOrEmpty(metares.target_col_aggregator) is not None:
selectlist.append(metares.target_col)
else:
aggregations.update({
metares.target_col: {
'function': metares.target_col_aggregator,
'filter': metares.target_col_aggregator_filter
}
})
self.targetdf: DataFrame = self.targetdf.groupby(*selectlist).agg(SparkSQLFunctions.min)
def transform(self):
# Get Unique source table names for Transformation
srctables = set()
for metares in self.model.metaresultlist:
srctables.add(metares.src_table)
# For each source table create SourceTable object and assign transform columns
for srctable in srctables:
tablemetaresult = self.model.filterMetaResultBySourceTable(srctbl=srctable)
tblinfo: MetaResult = tablemetaresult[0]
fklist = []
for item in self.model.datamodel.keys():
if self.model.datamodel[item]['fk'] is not None or self.model.datamodel[item]['fk'] is {}:
if srctable in self.model.datamodel[item]['fk'].keys():
fklist.extend(self.model.datamodel[item]['fk'][srctable]['fk_pk'])
sourcetable: SourceTable = SourceTable(sourcesystem=tblinfo.src_system, tablename=tblinfo.src_table,
pk=self.model.datamodel[tblinfo.src_table]['pk'],
fk=fklist,
database=tblinfo.src_database, filepath=tblinfo.src_file_path,
filetype=tblinfo.src_filetype,
modeltableorder=tblinfo.src_table_order)
self.sourcetables.append(sourcetable)
for tbl in tablemetaresult:
sourcetable.addColumn(name=tbl.src_col, type=tbl.src_col_datatype,
pk=(True, False)[tbl.src_key_constraints.__eq__('pk')],
udf=tbl.udf, udfargs=tbl.udfarguments, casttype=tbl.target_col_datatype,
aliasname=tbl.target_col, filterclause=tbl.src_col_filter, fk={})
# Read file as dataframe
sourcetable.readFileFromSource(spark=self.spark)
ETL.registerAllUDF(sc=self.spark)
for sourcetable in self.sourcetables:
sourcetable.applyTransform()
self.applyJoin()
self.applyFilters()
self.applyGroupAggregation()
self.targetdf.show()
|
#!/usr/bin/env bash
# Install Consul
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
GRAY='\033[1;30m'
NC='\033[0m' # No Color
echo -e "\n${RED}Running installation of Consul...${NC}\n"
# Run get_latest_release.sh
. ./get_latest_release.sh
# Save current directory
CURRENT_DIR=$(pwd)
# Set version to download
VERSION=$(get_latest_release "hashicorp/consul")
VERSION_WITHOUT_V=$(echo $VERSION | cut -d "v" -f 2)
cd ~ || exit
sudo rm -rf ~/*.zip*
wget -q "https://releases.hashicorp.com/consul/${VERSION_WITHOUT_V}/consul_${VERSION_WITHOUT_V}_linux_amd64.zip"
unzip "consul_${VERSION_WITHOUT_V}_linux_amd64.zip"
sudo chown root:root consul
sudo mv consul /usr/local/bin/
sudo rm -rf ~/*.zip*
# Set back to original current directory
cd "$CURRENT_DIR" || exit
echo -e "${GREEN}Consul installation complete.${NC}\n"
|
<reponame>IchordeDionysos/FirebaseUI-iOS<filename>FirebaseAuthUI/FUIStaticContentTableViewController.h
//
// Copyright (c) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <UIKit/UIKit.h>
#import "FUIAuthBaseViewController.h"
#import "FUIStaticContentTableViewManager.h"
NS_ASSUME_NONNULL_BEGIN
/** @class FUIStaticContentTableViewController
@brief The view controller which presents contents of @c FUIStaticContentTableViewContent.
controller has footer and header views.
*/
@interface FUIStaticContentTableViewController : FUIAuthBaseViewController
/** @fn initWithContents:nextTitle:nextAction
@brief Convenience initializer. View controller doesn't have header and footer sections.
@param contents The contents of the table view presented in the controller.
@param nextTitle Text displayed on the navigation bar title.
@param nextAction Action triggered on the right bar item of @C UINavigationController
*/
- (instancetype)initWithContents:(nullable FUIStaticContentTableViewContent *)contents
nextTitle:(nullable NSString *)nextTitle
nextAction:(nullable FUIStaticContentTableViewCellAction)nextAction;
// TODO: set nextAction param last arg
/** @fn initWithContents:nextTitle:nextAction:headerText:
@brief Convenience initializer. View controller doesn't have footer section.
@param contents The contents of the table view presented in the controller.
@param nextTitle Text displayed on the navigation bar title.
@param nextAction Action triggered on the right bar item of @C UINavigationController
@param headerText Text displayed at the header view controller.
*/
- (instancetype)initWithContents:(nullable FUIStaticContentTableViewContent *)contents
nextTitle:(nullable NSString *)nextTitle
nextAction:(nullable FUIStaticContentTableViewCellAction)nextAction
headerText:(nullable NSString *)headerText;
/** @fn initWithContents:nextTitle:nextAction:headerText:footerText:footerAction:
@brief Designated initializer.
@param contents The contents of the table view presented in the controller.
@param actionTitle Text displayed on the navigation bar title.
@param nextAction Action triggered on the right bar item of @C UINavigationController
@param headerText Text displayed at the header view controller.
@param footerText Text displayed at the footer of view controller.
@param footerAction Action triggered when user taps on the footer.
*/
- (instancetype)initWithContents:(nullable FUIStaticContentTableViewContent *)contents
nextTitle:(nullable NSString *)actionTitle
nextAction:(nullable FUIStaticContentTableViewCellAction)nextAction
headerText:(nullable NSString *)headerText
footerText:(nullable NSString *)footerText
footerAction:(nullable FUIStaticContentTableViewCellAction)footerAction
NS_DESIGNATED_INITIALIZER;
/** @fn init
@brief Please use @c initWithContents:nextTitle:nextAction:headerText:footerText:footerAction:.
*/
- (instancetype)init NS_UNAVAILABLE;
/** @fn initWithNibName:bundle:
@brief Please use @c initWithContents:nextTitle:nextAction:headerText:footerText:footerAction:.
*/
- (instancetype)initWithNibName:(nullable NSString *)nibNameOrNil
bundle:(nullable NSBundle *)nibBundleOrNil NS_UNAVAILABLE;
/** @fn initWithCoder:
@brief Please use @c initWithContents:nextTitle:nextAction:headerText:footerText:footerAction:.
*/
- (nullable instancetype)initWithCoder:(NSCoder *)aDecoder NS_UNAVAILABLE;
/** @fn initWithNibName:bundle:authUI:
@brief Please use @c initWithContents:nextTitle:nextAction:headerText:footerText:footerAction:.
@param nibNameOrNil The name of the nib file to associate with the view controller.
@param nibBundleOrNil The bundle in which to search for the nib file.
@param authUI The @c FUIAuth instance that manages this view controller.
*/
- (instancetype)initWithNibName:(nullable NSString *)nibNameOrNil
bundle:(nullable NSBundle *)nibBundleOrNil
authUI:(FUIAuth *)authUI NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/bash
set -ex
# configure dsyslog to send to logz.io
curl -sL https://qsdevops.s3-eu-west-1.amazonaws.com/rsyslog_install.sh | /bin/bash -s demoapp-server;
if [ -z "$(nodejs --version | grep -E 'v6.*')" ] ; then
echo 'Install NodeJs'
curl -sL https://deb.nodesource.com/setup_6.x | /bin/bash -E;
apt-get update && apt-get install --no-install-recommends --no-install-suggests -y nodejs;
fi
echo 'Unzip'
cd $ARTIFACTS_PATH;
tar -xvf demoapp-server.tar.gz;
echo 'Run App'
cd $ARTIFACTS_PATH;
set +ex
|
#!/usr/bin/env sh
######################################################################
# @author : Bhishan (Bhishan@BpMacpro.local)
# @file : run
# @created : Tuesday Apr 07, 2020 14:41:06 EDT
#
# @description : convert pdf to png and upload to github
######################################################################
I="$1"
/usr/local/bin/gs -dNOPAUSE -q -sDEVICE=png16m -r256 -sOutputFile="${I%.*}_"%03d.png "$I" -c quit;
# upload to github
git pull
git add --all
git commit -m "added png files"
git push origin master
# copy png path to write in readme file
var1=$(cat << EOF

\`\`\`sql
\`\`\`
EOF)
echo -n "$var1" | pbcopy
|
#!/bin/sh
# shellcheck disable=SC1091
. /root/.env.cook
set -e
# shellcheck disable=SC3040
set -o pipefail
export PATH=/usr/local/bin:$PATH
if [ ! -s /mnt/unsealcerts/unwrapped.token ]; then
UNSEALTOKEN=$(< /mnt/unsealcerts/credentials.json \
jq -re .wrapped_token)
< /mnt/unsealcerts/credentials.json \
jq -re .cert >/mnt/unsealcerts/client.crt
< /mnt/unsealcerts/credentials.json \
jq -re .ca >/mnt/unsealcerts/ca.crt
< /mnt/unsealcerts/credentials.json \
jq -re .ca_chain >/mnt/unsealcerts/ca_chain.crt
< /mnt/unsealcerts/credentials.json \
jq -re .ca_root >>/mnt/unsealcerts/ca_chain.crt
< /mnt/unsealcerts/credentials.json \
jq -re .ca_root >/mnt/unsealcerts/ca_root.crt
umask 177
< /mnt/unsealcerts/credentials.json \
jq -re .key >/mnt/unsealcerts/client.key
HOME=/var/empty \
vault unwrap -address="https://$UNSEALIP:8200" \
-tls-server-name=server.global.vaultunseal \
-ca-cert=/mnt/unsealcerts/ca_chain.crt \
-client-key=/mnt/unsealcerts/client.key \
-client-cert=/mnt/unsealcerts/client.crt \
-format=json "$UNSEALTOKEN" | \
jq -r '.auth.client_token' > /mnt/unsealcerts/unwrapped.token
chown vault /mnt/unsealcerts/*
fi
|
#!/bin/bash
docker network create reddit
docker run -d --network=reddit --network-alias=post_db --network-alias=comment_db --name=post_db mongo:latest
docker run -d --network=reddit --network-alias=post --name=post bessonovd/post:1.0
docker run -d --network=reddit --network-alias=comment --name=comment bessonovd/comment:1.0
docker run -d --network=reddit -p 9292:9292 --name=ui bessonovd/ui:1.0
|
<filename>scripts/buildClient.js
// @flow
// eslint-disable-next-line no-sync
const execSync = require('child_process').execSync;
execSync('webpack');
execSync('cp public/* dist/client/');
|
package com.hapramp.steem.models;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
/**
* Created by Ankit on 3/27/2018.
*/
public class Profile {
private static final String TAG = Profile.class.getSimpleName();
@SerializedName("profile_image")
@Expose
public String profileImage = "";
@SerializedName("name")
@Expose
public String name = "";
@SerializedName("location")
public String location = "";
@Expose
@SerializedName("cover_image")
public String cover_image = "";
@Expose
@SerializedName("website")
public String website = "";
@SerializedName("about")
@Expose
public String about = "";
public Profile(String profileImage, String name, String location, String website, String about, String cover_image) {
this.profileImage = profileImage;
this.name = name;
this.location = location;
this.website = website;
this.cover_image = cover_image;
this.about = about;
}
public static String getDefaultProfileAsJson() {
String json = "{\"about\":\"about\",\"location\":\"location\",\"name\":\"name\",\"profile_image\":\"https://user-images.githubusercontent.com/10809719/38206885-b36c8a66-36c9-11e8-9c7a-3bba603b4994.png\",\"website\":\"website\",\"cover_image\":\"cover_image_url\"}";
return json;
}
public String getCover_image() {
return cover_image;
}
public String getProfileImage() {
return profileImage;
}
public String getName() {
return name;
}
public String getLocation() {
return location;
}
public String getWebsite() {
return website;
}
public String getAbout() {
return about;
}
}
|
#!/bin/sh
process=$1
# Check environment variables.
if [ -z "$USER_ID" ]; then
echo "USER_ID not set. Exiting..."
exit
fi
if [ -z "$GROUP_ID" ]; then
echo "GROUP_ID not set. Exiting..."
exit
fi
if [ -z "$USER_NAME" ]; then
echo "USER_NAME not set. Exiting..."
exit
fi
if [ -z "$GROUP_NAME" ]; then
echo "GROUP_NAME not set. Exiting..."
exit
fi
groupadd -f -g "$GROUP_ID" "$GROUP_NAME"
useradd -G sudo --shell /bin/zsh -u "$USER_ID" -g "$GROUP_ID" -o -c "" \
-m "$USER_NAME" 2> /dev/null
echo "$USER_NAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
export HOME=/home/$USER_NAME
export SHELL=/bin/zsh
touch "/home/$USER_NAME/.zshrc"
chown $USER_NAME:$GROUP_NAME "/home/$USER_NAME/.zshrc"
mkdir -p /home/$USER_NAME/.ssh
chown -R $USER_NAME:$GROUP_NAME ~/.ssh
exec gosu "$USER_NAME" $process |
<reponame>op-ct/pupmod-simp-sudosh
require 'spec_helper'
describe 'sudosh' do
context 'supported operating systems' do
on_supported_os.each do |os, facts|
let(:facts) do
facts
end
context "on #{os}" do
it { is_expected.to create_class('sudosh') }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_package('sudosh2') }
it do
is_expected.to contain_rsyslog__rule__local('0sudosh').with({
'rule' => "if ($programname == \'sudosh\') then",
'target_log_file' => '/var/log/sudosh.log',
'stop_processing' => true
})
end
it do
is_expected.to contain_logrotate__add('sudosh').with({
'log_files' => ['/var/log/sudosh.log'],
'missingok' => true,
'lastaction' => '/sbin/service rsyslog restart > /dev/null 2>&1 || true'
})
end
end
end
end
end
|
#!/usr/bin/env bash
# Init swap
/vagrant/vagrant/scripts/swap.sh
# Enable nginx vhosts
# Reload is required because on system startup /vagrant directory may not be initialized yet,
# so nginx may be missing our vhosts configs.
sudo service nginx reload
|
<gh_stars>0
__webpack_public_path__ = window.contextJsParameters.contextPath + '/modules/jahia-dashboard/javascript/apps/';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.